2009-06-05 14:56:22 +04:00
|
|
|
import os, sys, math, urllib, re
|
2009-05-02 07:05:30 +04:00
|
|
|
import toposort
|
2009-04-24 02:26:10 +04:00
|
|
|
from dulwich.repo import Repo
|
|
|
|
from dulwich.client import SimpleFetchGraphWalker
|
|
|
|
from hgext import bookmarks
|
|
|
|
from mercurial.i18n import _
|
2009-06-19 19:53:39 +04:00
|
|
|
from mercurial.node import hex, bin, nullid
|
2009-06-05 14:56:22 +04:00
|
|
|
from mercurial import context
|
2009-06-03 20:55:45 +04:00
|
|
|
from dulwich.misc import make_sha
|
2009-04-29 01:28:27 +04:00
|
|
|
from dulwich.objects import (
|
|
|
|
Blob,
|
|
|
|
Commit,
|
|
|
|
Tag,
|
|
|
|
Tree,
|
2009-05-15 07:20:48 +04:00
|
|
|
format_timezone,
|
2009-04-29 01:28:27 +04:00
|
|
|
)
|
2009-04-29 06:33:03 +04:00
|
|
|
|
2009-04-28 03:15:48 +04:00
|
|
|
|
2009-04-24 02:26:10 +04:00
|
|
|
class GitHandler(object):
|
2009-04-27 05:27:47 +04:00
|
|
|
|
2009-04-24 02:26:10 +04:00
|
|
|
def __init__(self, dest_repo, ui):
|
|
|
|
self.repo = dest_repo
|
|
|
|
self.ui = ui
|
2009-05-10 21:37:23 +04:00
|
|
|
self.mapfile = 'git-mapfile'
|
2009-06-19 01:38:09 +04:00
|
|
|
self.tagsfile = 'git-tags'
|
2009-05-14 22:32:54 +04:00
|
|
|
|
|
|
|
if ui.config('git', 'intree'):
|
|
|
|
self.gitdir = self.repo.wjoin('.git')
|
|
|
|
else:
|
|
|
|
self.gitdir = self.repo.join('git')
|
|
|
|
|
2009-06-16 17:44:19 +04:00
|
|
|
self.paths = ui.configitems('paths')
|
2009-05-25 06:20:44 +04:00
|
|
|
|
2009-04-27 04:23:06 +04:00
|
|
|
self.init_if_missing()
|
2009-04-25 01:05:50 +04:00
|
|
|
self.load_git()
|
|
|
|
self.load_map()
|
2009-06-19 01:38:09 +04:00
|
|
|
self.load_tags()
|
2009-04-27 05:27:47 +04:00
|
|
|
|
|
|
|
# make the git data directory
|
2009-04-27 04:23:06 +04:00
|
|
|
def init_if_missing(self):
|
2009-05-10 21:42:44 +04:00
|
|
|
if not os.path.exists(self.gitdir):
|
|
|
|
os.mkdir(self.gitdir)
|
|
|
|
Repo.init_bare(self.gitdir)
|
2009-04-27 05:27:47 +04:00
|
|
|
|
2009-04-25 01:05:50 +04:00
|
|
|
def load_git(self):
|
2009-05-10 21:42:44 +04:00
|
|
|
self.git = Repo(self.gitdir)
|
2009-04-25 01:05:50 +04:00
|
|
|
|
2009-04-27 03:25:04 +04:00
|
|
|
## FILE LOAD AND SAVE METHODS
|
|
|
|
|
2009-04-27 23:26:44 +04:00
|
|
|
def map_set(self, gitsha, hgsha):
|
|
|
|
self._map_git[gitsha] = hgsha
|
|
|
|
self._map_hg[hgsha] = gitsha
|
|
|
|
|
|
|
|
def map_hg_get(self, gitsha):
|
2009-05-10 05:03:51 +04:00
|
|
|
return self._map_git.get(gitsha)
|
2009-04-27 23:26:44 +04:00
|
|
|
|
|
|
|
def map_git_get(self, hgsha):
|
2009-05-10 05:03:51 +04:00
|
|
|
return self._map_hg.get(hgsha)
|
2009-04-29 06:33:03 +04:00
|
|
|
|
2009-04-25 01:05:50 +04:00
|
|
|
def load_map(self):
|
2009-04-27 23:26:44 +04:00
|
|
|
self._map_git = {}
|
|
|
|
self._map_hg = {}
|
2009-05-10 21:37:23 +04:00
|
|
|
if os.path.exists(self.repo.join(self.mapfile)):
|
|
|
|
for line in self.repo.opener(self.mapfile):
|
2009-04-25 01:05:50 +04:00
|
|
|
gitsha, hgsha = line.strip().split(' ', 1)
|
2009-04-27 23:26:44 +04:00
|
|
|
self._map_git[gitsha] = hgsha
|
|
|
|
self._map_hg[hgsha] = gitsha
|
2009-04-27 05:27:47 +04:00
|
|
|
|
2009-04-25 01:05:50 +04:00
|
|
|
def save_map(self):
|
2009-05-10 21:37:23 +04:00
|
|
|
file = self.repo.opener(self.mapfile, 'w+', atomictemp=True)
|
2009-05-10 18:36:47 +04:00
|
|
|
for gitsha, hgsha in sorted(self._map_git.iteritems()):
|
2009-04-25 01:05:50 +04:00
|
|
|
file.write("%s %s\n" % (gitsha, hgsha))
|
2009-05-10 19:54:47 +04:00
|
|
|
file.rename()
|
2009-04-27 05:27:47 +04:00
|
|
|
|
2009-06-19 01:38:09 +04:00
|
|
|
|
|
|
|
def load_tags(self):
|
|
|
|
self.tags = {}
|
|
|
|
if os.path.exists(self.repo.join(self.tagsfile)):
|
|
|
|
for line in self.repo.opener(self.tagsfile):
|
|
|
|
sha, name = line.strip().split(' ', 1)
|
|
|
|
self.tags[name] = sha
|
|
|
|
|
|
|
|
def save_tags(self):
|
|
|
|
file = self.repo.opener(self.tagsfile, 'w+', atomictemp=True)
|
|
|
|
for name, sha in sorted(self.tags.iteritems()):
|
|
|
|
file.write("%s %s\n" % (sha, name))
|
|
|
|
file.rename()
|
|
|
|
|
2009-04-27 03:25:04 +04:00
|
|
|
## END FILE LOAD AND SAVE METHODS
|
2009-04-25 01:05:50 +04:00
|
|
|
|
2009-06-16 16:39:11 +04:00
|
|
|
## COMMANDS METHODS
|
|
|
|
|
2009-05-15 02:48:24 +04:00
|
|
|
def import_commits(self, remote_name):
|
|
|
|
self.import_git_objects(remote_name)
|
|
|
|
self.save_map()
|
|
|
|
|
2009-06-16 17:44:19 +04:00
|
|
|
def fetch(self, remote):
|
|
|
|
self.ui.status(_("fetching from : %s\n") % remote)
|
2009-04-24 02:26:10 +04:00
|
|
|
self.export_git_objects()
|
2009-06-16 17:44:19 +04:00
|
|
|
refs = self.fetch_pack(remote)
|
|
|
|
remote_name = self.remote_name(remote)
|
|
|
|
|
2009-04-30 00:55:22 +04:00
|
|
|
if refs:
|
2009-05-28 02:58:07 +04:00
|
|
|
self.import_git_objects(remote_name, refs)
|
2009-06-19 01:38:09 +04:00
|
|
|
self.import_tags(refs)
|
2009-06-16 17:44:19 +04:00
|
|
|
self.update_hg_bookmarks(refs)
|
|
|
|
if remote_name:
|
|
|
|
self.update_remote_branches(remote_name, refs)
|
|
|
|
elif not self.paths:
|
|
|
|
# intial cloning
|
|
|
|
self.update_remote_branches('default', refs)
|
2009-06-23 22:22:49 +04:00
|
|
|
else:
|
|
|
|
self.ui.status(_("nothing new on the server\n"))
|
2009-06-16 17:44:19 +04:00
|
|
|
|
2009-04-25 01:05:50 +04:00
|
|
|
self.save_map()
|
|
|
|
|
2009-06-03 20:55:45 +04:00
|
|
|
def export_commits(self, export_objects=True):
|
|
|
|
if export_objects:
|
|
|
|
self.export_git_objects()
|
2009-05-14 00:54:54 +04:00
|
|
|
self.export_hg_tags()
|
2009-04-28 03:15:48 +04:00
|
|
|
self.update_references()
|
2009-04-27 04:56:16 +04:00
|
|
|
self.save_map()
|
2009-04-27 05:27:47 +04:00
|
|
|
|
2009-06-23 22:22:49 +04:00
|
|
|
def push(self, remote):
|
|
|
|
self.fetch(remote) # get and convert objects if they already exist
|
|
|
|
self.ui.status(_("pushing to : %s\n") % remote)
|
2009-06-03 20:55:45 +04:00
|
|
|
self.export_commits(False)
|
2009-06-23 22:22:49 +04:00
|
|
|
changed_refs = self.upload_pack(remote)
|
|
|
|
remote_name = self.remote_name(remote)
|
|
|
|
|
|
|
|
if remote_name and changed_refs:
|
|
|
|
for ref, sha in changed_refs.iteritems():
|
|
|
|
self.ui.status(" "+ remote_name + "::" + ref + " => GIT:" + sha[0:8] + "\n")
|
|
|
|
|
|
|
|
self.update_remote_branches(remote_name, changed_refs)
|
|
|
|
|
2009-05-10 17:14:36 +04:00
|
|
|
|
2009-06-16 16:39:11 +04:00
|
|
|
def clear(self):
|
|
|
|
mapfile = self.repo.join(self.mapfile)
|
|
|
|
if os.path.exists(self.gitdir):
|
|
|
|
for root, dirs, files in os.walk(self.gitdir, topdown=False):
|
|
|
|
for name in files:
|
|
|
|
os.remove(os.path.join(root, name))
|
|
|
|
for name in dirs:
|
|
|
|
os.rmdir(os.path.join(root, name))
|
|
|
|
os.rmdir(self.gitdir)
|
|
|
|
if os.path.exists(mapfile):
|
|
|
|
os.remove(mapfile)
|
2009-05-18 03:42:34 +04:00
|
|
|
|
2009-06-16 16:39:11 +04:00
|
|
|
## CHANGESET CONVERSION METHODS
|
2009-05-18 02:29:18 +04:00
|
|
|
|
2009-04-27 23:26:44 +04:00
|
|
|
def export_git_objects(self):
|
2009-06-04 03:32:03 +04:00
|
|
|
self.previous_entries = {}
|
2009-06-03 20:55:45 +04:00
|
|
|
self.written_trees = {}
|
2009-05-26 03:16:28 +04:00
|
|
|
self.ui.status(_("importing Hg objects into Git\n"))
|
2009-06-21 14:07:44 +04:00
|
|
|
nodes = [self.repo.lookup(n) for n in self.repo]
|
|
|
|
export = [node for node in nodes if not hex(node) in self._map_hg]
|
|
|
|
total = len(export)
|
2009-05-13 23:48:26 +04:00
|
|
|
if total:
|
|
|
|
magnitude = int(math.log(total, 10)) + 1
|
|
|
|
else:
|
|
|
|
magnitude = 1
|
2009-06-21 14:07:44 +04:00
|
|
|
for i, rev in enumerate(export):
|
2009-05-10 18:29:15 +04:00
|
|
|
if i%100 == 0:
|
|
|
|
self.ui.status(_("at: %*d/%d\n") % (magnitude, i, total))
|
2009-06-05 14:56:22 +04:00
|
|
|
|
2009-05-28 04:14:41 +04:00
|
|
|
ctx = self.repo.changectx(rev)
|
|
|
|
state = ctx.extra().get('hg-git', None)
|
|
|
|
if state == 'octopus':
|
2009-06-29 18:28:25 +04:00
|
|
|
self.ui.debug("revision %d is a part of octopus explosion\n" % ctx.rev())
|
2009-05-28 04:14:41 +04:00
|
|
|
continue
|
2009-06-21 14:07:44 +04:00
|
|
|
self.export_hg_commit(rev)
|
|
|
|
self.save_map()
|
2009-04-29 06:33:03 +04:00
|
|
|
|
2009-04-28 03:15:48 +04:00
|
|
|
# convert this commit into git objects
|
|
|
|
# go through the manifest, convert all blobs/trees we don't have
|
|
|
|
# write the commit object (with metadata info)
|
2009-04-27 23:26:44 +04:00
|
|
|
def export_hg_commit(self, rev):
|
2009-05-28 04:14:41 +04:00
|
|
|
def is_octopus_part(ctx):
|
|
|
|
return ctx.extra().get('hg-git', None) in set(['octopus', 'octopus-done'])
|
|
|
|
|
2009-06-05 14:56:22 +04:00
|
|
|
self.ui.note(_("converting revision %s\n") % rev)
|
2009-04-29 06:33:03 +04:00
|
|
|
|
2009-06-29 18:28:25 +04:00
|
|
|
oldenc = self.swap_out_encoding()
|
|
|
|
|
2009-04-28 03:15:48 +04:00
|
|
|
# make sure parents are converted first
|
2009-05-28 04:14:41 +04:00
|
|
|
ctx = self.repo.changectx(rev)
|
|
|
|
extra = ctx.extra()
|
|
|
|
|
|
|
|
parents = []
|
|
|
|
if extra.get('hg-git', None) == 'octopus-done':
|
|
|
|
# implode octopus parents
|
|
|
|
part = ctx
|
|
|
|
while is_octopus_part(part):
|
|
|
|
(p1, p2) = part.parents()
|
|
|
|
assert not is_octopus_part(p1)
|
|
|
|
parents.append(p1)
|
|
|
|
part = p2
|
|
|
|
parents.append(p2)
|
|
|
|
else:
|
|
|
|
parents = ctx.parents()
|
|
|
|
|
2009-04-28 03:15:48 +04:00
|
|
|
for parent in parents:
|
2009-06-21 14:07:44 +04:00
|
|
|
p_node = parent.node()
|
|
|
|
if p_node != nullid and not hex(p_node) in self._map_hg:
|
|
|
|
self.export_hg_commit(p_rev)
|
2009-04-29 06:33:03 +04:00
|
|
|
|
2009-04-30 23:55:56 +04:00
|
|
|
tree_sha, renames = self.write_git_tree(ctx)
|
2009-06-05 14:56:22 +04:00
|
|
|
|
2009-04-28 03:15:48 +04:00
|
|
|
commit = {}
|
|
|
|
commit['tree'] = tree_sha
|
|
|
|
(time, timezone) = ctx.date()
|
2009-04-30 23:46:54 +04:00
|
|
|
|
2009-06-29 18:28:25 +04:00
|
|
|
if 'author' in extra:
|
|
|
|
author = extra['author']
|
2009-06-03 06:33:48 +04:00
|
|
|
else:
|
2009-06-18 19:49:13 +04:00
|
|
|
# hg authors might not have emails
|
|
|
|
author = ctx.user()
|
|
|
|
|
|
|
|
# check for git author pattern compliance
|
|
|
|
regex = re.compile('^(.*?) \<(.*?)\>(.*)$')
|
|
|
|
a = regex.match(author)
|
|
|
|
|
|
|
|
if a:
|
|
|
|
name = a.group(1)
|
|
|
|
email = a.group(2)
|
|
|
|
if len(a.group(3)) > 0:
|
|
|
|
name += ' ext:(' + urllib.quote(a.group(3)) + ')'
|
|
|
|
author = name + ' <' + email + '>'
|
|
|
|
else:
|
|
|
|
author = author + ' <none@none>'
|
|
|
|
|
2009-05-15 07:20:48 +04:00
|
|
|
commit['author'] = author + ' ' + str(int(time)) + ' ' + format_timezone(-timezone)
|
2009-06-18 19:49:13 +04:00
|
|
|
|
2009-06-29 18:28:25 +04:00
|
|
|
if 'message' in extra:
|
|
|
|
commit['message'] = extra['message']
|
2009-06-18 19:49:13 +04:00
|
|
|
else:
|
|
|
|
message = ctx.description()
|
|
|
|
commit['message'] = ctx.description() + "\n"
|
2009-04-30 03:18:37 +04:00
|
|
|
|
2009-05-09 07:57:02 +04:00
|
|
|
if 'committer' in extra:
|
2009-05-15 07:25:48 +04:00
|
|
|
# fixup timezone
|
|
|
|
(name_timestamp, timezone) = extra['committer'].rsplit(' ', 1)
|
2009-05-25 03:44:38 +04:00
|
|
|
try:
|
|
|
|
timezone = format_timezone(-int(timezone))
|
|
|
|
commit['committer'] = '%s %s' % (name_timestamp, timezone)
|
|
|
|
except ValueError:
|
2009-06-25 19:24:36 +04:00
|
|
|
self.ui.warn(_("Ignoring committer in extra, invalid timezone in r%d: '%s'.\n") % (ctx, timezone))
|
2009-05-12 03:03:57 +04:00
|
|
|
if 'encoding' in extra:
|
|
|
|
commit['encoding'] = extra['encoding']
|
2009-05-09 07:57:02 +04:00
|
|
|
|
2009-04-30 00:26:13 +04:00
|
|
|
# HG EXTRA INFORMATION
|
|
|
|
add_extras = False
|
2009-04-30 23:55:56 +04:00
|
|
|
extra_message = ''
|
2009-04-30 00:26:13 +04:00
|
|
|
if not ctx.branch() == 'default':
|
|
|
|
add_extras = True
|
2009-04-30 23:55:56 +04:00
|
|
|
extra_message += "branch : " + ctx.branch() + "\n"
|
2009-04-30 03:18:37 +04:00
|
|
|
|
2009-04-30 23:55:56 +04:00
|
|
|
if renames:
|
|
|
|
add_extras = True
|
|
|
|
for oldfile, newfile in renames:
|
|
|
|
extra_message += "rename : " + oldfile + " => " + newfile + "\n"
|
2009-05-30 00:10:14 +04:00
|
|
|
|
|
|
|
for key, value in extra.iteritems():
|
2009-06-29 18:28:25 +04:00
|
|
|
if key in ('author', 'committer', 'encoding', 'message', 'branch', 'hg-git'):
|
2009-05-30 00:10:14 +04:00
|
|
|
continue
|
|
|
|
else:
|
2009-06-05 14:56:22 +04:00
|
|
|
add_extras = True
|
2009-05-30 00:10:14 +04:00
|
|
|
extra_message += "extra : " + key + " : " + urllib.quote(value) + "\n"
|
|
|
|
|
2009-04-30 00:26:13 +04:00
|
|
|
if add_extras:
|
2009-05-08 22:35:14 +04:00
|
|
|
commit['message'] += "\n--HG--\n" + extra_message
|
2009-04-30 03:18:37 +04:00
|
|
|
|
2009-04-28 03:15:48 +04:00
|
|
|
commit['parents'] = []
|
|
|
|
for parent in parents:
|
|
|
|
hgsha = hex(parent.node())
|
|
|
|
git_sha = self.map_git_get(hgsha)
|
2009-04-28 23:46:51 +04:00
|
|
|
if git_sha:
|
|
|
|
commit['parents'].append(git_sha)
|
2009-04-29 06:33:03 +04:00
|
|
|
|
2009-04-28 03:15:48 +04:00
|
|
|
commit_sha = self.git.write_commit_hash(commit) # writing new blobs to git
|
2009-06-21 14:07:44 +04:00
|
|
|
self.map_set(commit_sha, ctx.hex())
|
2009-06-29 18:28:25 +04:00
|
|
|
|
|
|
|
self.swap_out_encoding(oldenc)
|
|
|
|
|
2009-06-21 14:07:44 +04:00
|
|
|
return commit_sha
|
2009-04-29 06:33:03 +04:00
|
|
|
|
2009-04-28 01:50:54 +04:00
|
|
|
def write_git_tree(self, ctx):
|
|
|
|
trees = {}
|
|
|
|
man = ctx.manifest()
|
2009-06-04 03:32:03 +04:00
|
|
|
ctx_id = hex(ctx.node())
|
|
|
|
|
2009-04-30 23:55:56 +04:00
|
|
|
renames = []
|
2009-06-03 20:22:17 +04:00
|
|
|
for filenm, nodesha in man.iteritems():
|
|
|
|
file_id = hex(nodesha)
|
2009-06-04 03:32:03 +04:00
|
|
|
if ctx_id not in self.previous_entries:
|
|
|
|
self.previous_entries[ctx_id] = {}
|
|
|
|
self.previous_entries[ctx_id][filenm] = file_id
|
|
|
|
|
2009-04-28 01:50:54 +04:00
|
|
|
# write blob if not in our git database
|
2009-06-04 03:32:03 +04:00
|
|
|
fctx = ctx.filectx(filenm)
|
|
|
|
|
|
|
|
same_as_last = False
|
|
|
|
for par in ctx.parents():
|
|
|
|
par_id = hex(par.node())
|
|
|
|
if par_id in self.previous_entries:
|
|
|
|
if filenm in self.previous_entries[par_id]:
|
|
|
|
if self.previous_entries[par_id][filenm] == file_id:
|
|
|
|
same_as_last = True
|
|
|
|
if not same_as_last:
|
2009-06-03 20:22:17 +04:00
|
|
|
rename = fctx.renamed()
|
|
|
|
if rename:
|
|
|
|
filerename, sha = rename
|
2009-06-04 03:32:03 +04:00
|
|
|
renames.append((filerename, filenm))
|
2009-04-28 01:50:54 +04:00
|
|
|
is_exec = 'x' in fctx.flags()
|
|
|
|
is_link = 'l' in fctx.flags()
|
|
|
|
blob_sha = self.map_git_get(file_id)
|
|
|
|
if not blob_sha:
|
|
|
|
blob_sha = self.git.write_blob(fctx.data()) # writing new blobs to git
|
|
|
|
self.map_set(blob_sha, file_id)
|
|
|
|
|
|
|
|
parts = filenm.split('/')
|
|
|
|
if len(parts) > 1:
|
|
|
|
# get filename and path for leading subdir
|
|
|
|
filepath = parts[-1:][0]
|
|
|
|
dirpath = "/".join([v for v in parts[0:-1]]) + '/'
|
|
|
|
|
|
|
|
# get subdir name and path for parent dir
|
2009-04-30 00:26:13 +04:00
|
|
|
parpath = '/'
|
|
|
|
nparpath = '/'
|
|
|
|
for part in parts[0:-1]:
|
|
|
|
if nparpath == '/':
|
|
|
|
nparpath = part + '/'
|
|
|
|
else:
|
|
|
|
nparpath += part + '/'
|
2009-04-30 03:18:37 +04:00
|
|
|
|
2009-04-30 00:26:13 +04:00
|
|
|
treeentry = ['tree', part + '/', nparpath]
|
2009-04-30 03:18:37 +04:00
|
|
|
|
2009-04-30 00:26:13 +04:00
|
|
|
if parpath not in trees:
|
|
|
|
trees[parpath] = []
|
|
|
|
if treeentry not in trees[parpath]:
|
|
|
|
trees[parpath].append( treeentry )
|
2009-04-30 03:18:37 +04:00
|
|
|
|
2009-04-30 00:26:13 +04:00
|
|
|
parpath = nparpath
|
2009-04-28 01:50:54 +04:00
|
|
|
|
|
|
|
# set file entry
|
|
|
|
fileentry = ['blob', filepath, blob_sha, is_exec, is_link]
|
|
|
|
if dirpath not in trees:
|
|
|
|
trees[dirpath] = []
|
|
|
|
trees[dirpath].append(fileentry)
|
|
|
|
|
|
|
|
else:
|
2009-04-29 06:33:03 +04:00
|
|
|
fileentry = ['blob', parts[0], blob_sha, is_exec, is_link]
|
2009-04-28 01:50:54 +04:00
|
|
|
if '/' not in trees:
|
|
|
|
trees['/'] = []
|
|
|
|
trees['/'].append(fileentry)
|
2009-04-30 03:18:37 +04:00
|
|
|
|
2009-04-28 01:50:54 +04:00
|
|
|
dirs = trees.keys()
|
2009-05-19 17:21:31 +04:00
|
|
|
if dirs:
|
|
|
|
# sort by tree depth, so we write the deepest trees first
|
|
|
|
dirs.sort(lambda a, b: len(b.split('/'))-len(a.split('/')))
|
|
|
|
dirs.remove('/')
|
|
|
|
dirs.append('/')
|
|
|
|
else:
|
|
|
|
# manifest is empty => make empty root tree
|
|
|
|
trees['/'] = []
|
|
|
|
dirs = ['/']
|
2009-06-05 14:56:22 +04:00
|
|
|
|
2009-04-28 01:50:54 +04:00
|
|
|
# write all the trees
|
|
|
|
tree_sha = None
|
|
|
|
tree_shas = {}
|
|
|
|
for dirnm in dirs:
|
|
|
|
tree_data = []
|
2009-06-03 20:55:45 +04:00
|
|
|
sha_group = []
|
2009-06-04 00:45:32 +04:00
|
|
|
|
|
|
|
# calculating a sha for the tree, so we don't write it twice
|
|
|
|
listsha = make_sha()
|
2009-04-28 01:50:54 +04:00
|
|
|
for entry in trees[dirnm]:
|
|
|
|
# replace tree path with tree SHA
|
|
|
|
if entry[0] == 'tree':
|
|
|
|
sha = tree_shas[entry[2]]
|
|
|
|
entry[2] = sha
|
2009-06-04 00:45:32 +04:00
|
|
|
listsha.update(entry[1])
|
|
|
|
listsha.update(entry[2])
|
2009-04-28 01:50:54 +04:00
|
|
|
tree_data.append(entry)
|
2009-06-03 20:55:45 +04:00
|
|
|
listsha = listsha.hexdigest()
|
2009-06-05 14:56:22 +04:00
|
|
|
|
2009-06-03 20:55:45 +04:00
|
|
|
if listsha in self.written_trees:
|
2009-06-04 00:45:32 +04:00
|
|
|
tree_sha = self.written_trees[listsha]
|
|
|
|
tree_shas[dirnm] = tree_sha
|
2009-06-03 20:55:45 +04:00
|
|
|
else:
|
|
|
|
tree_sha = self.git.write_tree_array(tree_data) # writing new trees to git
|
|
|
|
tree_shas[dirnm] = tree_sha
|
|
|
|
self.written_trees[listsha] = tree_sha
|
2009-06-05 14:56:22 +04:00
|
|
|
|
2009-04-30 23:55:56 +04:00
|
|
|
return (tree_sha, renames) # should be the last root tree sha
|
2009-04-29 06:33:03 +04:00
|
|
|
|
2009-06-16 16:39:11 +04:00
|
|
|
def import_git_objects(self, remote_name=None, refs=None):
|
|
|
|
self.ui.status(_("importing Git objects into Hg\n"))
|
|
|
|
# import heads and fetched tags as remote references
|
|
|
|
todo = []
|
|
|
|
done = set()
|
|
|
|
convert_list = {}
|
|
|
|
|
|
|
|
# get a list of all the head shas
|
|
|
|
if refs:
|
|
|
|
for head, sha in refs.iteritems():
|
2009-06-21 14:07:44 +04:00
|
|
|
todo.append(sha)
|
2009-06-23 22:20:15 +04:00
|
|
|
elif remote_name:
|
|
|
|
todo = self.git.remote_refs(remote_name).values()[:]
|
2009-06-16 16:39:11 +04:00
|
|
|
else:
|
2009-06-23 22:20:15 +04:00
|
|
|
todo = self.git.heads().values()[:]
|
2009-06-16 16:39:11 +04:00
|
|
|
|
|
|
|
# traverse the heads getting a list of all the unique commits
|
|
|
|
while todo:
|
|
|
|
sha = todo.pop()
|
|
|
|
assert isinstance(sha, str)
|
|
|
|
if sha in done:
|
|
|
|
continue
|
|
|
|
done.add(sha)
|
|
|
|
obj = self.git.get_object(sha)
|
|
|
|
if isinstance (obj, Commit):
|
|
|
|
convert_list[sha] = obj
|
|
|
|
todo.extend([p for p in obj.parents if p not in done])
|
|
|
|
if isinstance(obj, Tag):
|
|
|
|
(obj_type, obj_sha) = obj.get_object()
|
|
|
|
obj = self.git.get_object(obj_sha)
|
|
|
|
if isinstance (obj, Commit):
|
|
|
|
convert_list[sha] = obj
|
|
|
|
todo.extend([p for p in obj.parents if p not in done])
|
|
|
|
|
|
|
|
# sort the commits
|
|
|
|
commits = toposort.TopoSort(convert_list).items()
|
|
|
|
|
2009-06-21 14:07:44 +04:00
|
|
|
commits = [commit for commit in commits if not commit in self._map_git]
|
2009-06-16 16:39:11 +04:00
|
|
|
# import each of the commits, oldest first
|
|
|
|
total = len(commits)
|
|
|
|
magnitude = int(math.log(total, 10)) + 1 if total else 1
|
|
|
|
for i, csha in enumerate(commits):
|
|
|
|
if i%100 == 0:
|
|
|
|
self.ui.status(_("at: %*d/%d\n") % (magnitude, i, total))
|
|
|
|
commit = convert_list[csha]
|
2009-06-21 14:07:44 +04:00
|
|
|
self.import_git_commit(commit)
|
2009-06-16 16:39:11 +04:00
|
|
|
|
|
|
|
def import_git_commit(self, commit):
|
|
|
|
self.ui.debug(_("importing: %s\n") % commit.id)
|
|
|
|
# TODO : Do something less coarse-grained than try/except on the
|
|
|
|
# get_file call for removed files
|
|
|
|
|
2009-06-30 02:20:37 +04:00
|
|
|
(strip_message, hg_renames, hg_branch, extra) = self.extract_hg_metadata(commit.message)
|
2009-06-16 16:39:11 +04:00
|
|
|
|
|
|
|
# get a list of the changed, added, removed files
|
|
|
|
files = self.git.get_files_changed(commit)
|
|
|
|
|
|
|
|
date = (commit.author_time, -commit.author_timezone)
|
|
|
|
text = strip_message
|
|
|
|
|
2009-06-18 19:49:13 +04:00
|
|
|
try:
|
|
|
|
text.decode('utf-8')
|
|
|
|
except UnicodeDecodeError:
|
2009-06-29 18:28:25 +04:00
|
|
|
extra['message'] = text
|
2009-06-18 19:49:13 +04:00
|
|
|
text = self.decode_guess(text, commit._encoding)
|
|
|
|
|
|
|
|
author = commit.author
|
|
|
|
|
|
|
|
# convert extra data back to the end
|
|
|
|
if ' ext:' in commit.author:
|
|
|
|
regex = re.compile('^(.*?)\ ext:\((.*)\) <(.*)\>$')
|
|
|
|
m = regex.match(commit.author)
|
|
|
|
if m:
|
|
|
|
name = m.group(1)
|
|
|
|
ex = urllib.unquote(m.group(2))
|
|
|
|
email = m.group(3)
|
|
|
|
author = name + ' <' + email + '>' + ex
|
|
|
|
|
|
|
|
if ' <none@none>' in commit.author:
|
|
|
|
author = commit.author[:-12]
|
|
|
|
|
|
|
|
try:
|
|
|
|
author.decode('utf-8')
|
|
|
|
except UnicodeDecodeError:
|
2009-06-29 18:28:25 +04:00
|
|
|
extra['author'] = author
|
2009-06-18 19:49:13 +04:00
|
|
|
author = self.decode_guess(author, commit._encoding)
|
|
|
|
|
|
|
|
oldenc = self.swap_out_encoding()
|
|
|
|
|
2009-06-16 16:39:11 +04:00
|
|
|
def getfilectx(repo, memctx, f):
|
|
|
|
try:
|
|
|
|
(mode, sha, data) = self.git.get_file(commit, f)
|
|
|
|
e = self.convert_git_int_mode(mode)
|
|
|
|
except TypeError:
|
|
|
|
raise IOError()
|
|
|
|
if f in hg_renames:
|
|
|
|
copied_path = hg_renames[f]
|
|
|
|
else:
|
|
|
|
copied_path = None
|
|
|
|
return context.memfilectx(f, data, 'l' in e, 'x' in e, copied_path)
|
|
|
|
|
|
|
|
gparents = map(self.map_hg_get, commit.parents)
|
|
|
|
p1, p2 = (nullid, nullid)
|
|
|
|
octopus = False
|
|
|
|
|
|
|
|
if len(gparents) > 1:
|
|
|
|
# merge, possibly octopus
|
|
|
|
def commit_octopus(p1, p2):
|
|
|
|
ctx = context.memctx(self.repo, (p1, p2), text, files, getfilectx,
|
2009-06-18 19:49:13 +04:00
|
|
|
author, date, {'hg-git': 'octopus'})
|
2009-06-16 16:39:11 +04:00
|
|
|
return hex(self.repo.commitctx(ctx))
|
|
|
|
|
|
|
|
octopus = len(gparents) > 2
|
|
|
|
p2 = gparents.pop()
|
|
|
|
p1 = gparents.pop()
|
|
|
|
while len(gparents) > 0:
|
|
|
|
p2 = commit_octopus(p1, p2)
|
|
|
|
p1 = gparents.pop()
|
|
|
|
else:
|
|
|
|
if gparents:
|
|
|
|
p1 = gparents.pop()
|
|
|
|
|
|
|
|
files = list(set(files))
|
|
|
|
|
|
|
|
pa = None
|
|
|
|
if not (p2 == nullid):
|
|
|
|
node1 = self.repo.changectx(p1)
|
|
|
|
node2 = self.repo.changectx(p2)
|
|
|
|
pa = node1.ancestor(node2)
|
|
|
|
|
|
|
|
# if named branch, add to extra
|
|
|
|
if hg_branch:
|
|
|
|
extra['branch'] = hg_branch
|
|
|
|
|
|
|
|
# if committer is different than author, add it to extra
|
|
|
|
if not commit._author_raw == commit._committer_raw:
|
|
|
|
extra['committer'] = "%s %d %d" % (commit.committer, commit.commit_time, -commit.commit_timezone)
|
|
|
|
|
|
|
|
if commit._encoding:
|
|
|
|
extra['encoding'] = commit._encoding
|
|
|
|
|
|
|
|
if hg_branch:
|
|
|
|
extra['branch'] = hg_branch
|
|
|
|
|
|
|
|
if octopus:
|
|
|
|
extra['hg-git'] ='octopus-done'
|
|
|
|
|
|
|
|
ctx = context.memctx(self.repo, (p1, p2), text, files, getfilectx,
|
|
|
|
author, date, extra)
|
|
|
|
|
2009-06-30 02:20:37 +04:00
|
|
|
node = self.repo.commit_import_ctx(ctx, pa)
|
2009-06-16 16:39:11 +04:00
|
|
|
|
2009-06-18 19:49:13 +04:00
|
|
|
self.swap_out_encoding(oldenc)
|
|
|
|
|
2009-06-16 16:39:11 +04:00
|
|
|
# save changeset to mapping file
|
|
|
|
cs = hex(node)
|
|
|
|
self.map_set(commit.id, cs)
|
|
|
|
|
|
|
|
## PACK UPLOADING AND FETCHING
|
2009-04-27 05:27:47 +04:00
|
|
|
|
2009-06-16 17:44:19 +04:00
|
|
|
def upload_pack(self, remote):
|
|
|
|
client, path = self.get_transport_and_path(remote)
|
2009-04-28 23:46:51 +04:00
|
|
|
changed = self.get_changed_refs
|
2009-04-28 10:35:49 +04:00
|
|
|
genpack = self.generate_pack_contents
|
|
|
|
try:
|
2009-05-10 05:04:19 +04:00
|
|
|
self.ui.status(_("creating and sending data\n"))
|
2009-04-29 21:03:16 +04:00
|
|
|
changed_refs = client.send_pack(path, changed, genpack)
|
2009-04-28 10:35:49 +04:00
|
|
|
except:
|
2009-05-10 05:04:40 +04:00
|
|
|
# TODO : remove try/except or do something useful here
|
2009-04-28 10:35:49 +04:00
|
|
|
raise
|
2009-04-28 23:46:51 +04:00
|
|
|
|
2009-04-29 04:28:04 +04:00
|
|
|
# TODO : for now, we'll just push all heads that match remote heads
|
2009-04-28 23:46:51 +04:00
|
|
|
# * we should have specified push, tracking branches and --all
|
2009-04-29 06:33:03 +04:00
|
|
|
# takes a dict of refs:shas from the server and returns what should be
|
2009-04-28 23:46:51 +04:00
|
|
|
# pushed up
|
|
|
|
def get_changed_refs(self, refs):
|
|
|
|
keys = refs.keys()
|
2009-04-29 06:33:03 +04:00
|
|
|
|
2009-05-09 07:54:33 +04:00
|
|
|
changed = {}
|
2009-04-29 06:33:03 +04:00
|
|
|
if not keys:
|
2009-04-29 04:28:04 +04:00
|
|
|
return None
|
2009-04-29 06:33:03 +04:00
|
|
|
|
2009-04-29 04:28:04 +04:00
|
|
|
# TODO : this is a huge hack
|
2009-06-22 01:44:41 +04:00
|
|
|
if keys[0] == 'capabilities^{}':
|
|
|
|
# nothing on the server yet - first push
|
|
|
|
if not 'master' in self.repo.tags():
|
|
|
|
tip = self.repo.lookup('tip')
|
|
|
|
changed['refs/heads/master'] = self.map_git_get(hex(tip))
|
2009-04-29 06:33:03 +04:00
|
|
|
|
2009-06-22 01:44:41 +04:00
|
|
|
for tag, sha in self.tags.iteritems():
|
2009-05-28 03:28:53 +04:00
|
|
|
tag_name = 'refs/tags/' + tag
|
|
|
|
if tag_name not in refs:
|
2009-06-22 01:44:41 +04:00
|
|
|
changed[tag_name] = self.map_git_get(sha)
|
2009-05-28 02:58:07 +04:00
|
|
|
|
2009-04-28 23:46:51 +04:00
|
|
|
for ref_name in keys:
|
|
|
|
parts = ref_name.split('/')
|
2009-06-22 01:44:41 +04:00
|
|
|
if parts[0] == 'refs' and parts[1] == 'heads':
|
|
|
|
# strip off 'refs/heads'
|
|
|
|
head = "/".join([v for v in parts[2:]])
|
|
|
|
try:
|
|
|
|
local_ref = self.repo.lookup(head)
|
|
|
|
remote_ref = self.map_hg_get(refs[ref_name])
|
|
|
|
if remote_ref:
|
|
|
|
remotectx = self.repo[remote_ref]
|
|
|
|
localctx = self.repo[local_ref]
|
|
|
|
if remotectx.ancestor(localctx) == remotectx:
|
|
|
|
# fast forward push
|
|
|
|
changed[ref_name] = self.map_git_get(hex(local_ref))
|
|
|
|
else:
|
|
|
|
# XXX: maybe abort completely
|
|
|
|
ui.warn('not pushing branch %s, please merge'% head)
|
|
|
|
except RepoError:
|
|
|
|
# remote_ref is not here
|
|
|
|
pass
|
2009-06-05 14:56:22 +04:00
|
|
|
|
2009-05-18 03:42:34 +04:00
|
|
|
# Also push any local branches not on the server yet
|
|
|
|
for head in self.local_heads():
|
2009-06-22 01:44:41 +04:00
|
|
|
ref = 'refs/heads/' + head
|
|
|
|
if not ref in refs:
|
|
|
|
node = self.repo.lookup(head)
|
|
|
|
changed[ref] = self.map_git_get(hex(node))
|
2009-05-18 03:42:34 +04:00
|
|
|
|
2009-04-28 23:46:51 +04:00
|
|
|
return changed
|
2009-04-29 06:33:03 +04:00
|
|
|
|
2009-04-28 23:46:51 +04:00
|
|
|
# takes a list of shas the server wants and shas the server has
|
|
|
|
# and generates a list of commit shas we need to push up
|
|
|
|
def generate_pack_contents(self, want, have):
|
|
|
|
graph_walker = SimpleFetchGraphWalker(want, self.git.get_parents)
|
|
|
|
next = graph_walker.next()
|
2009-05-02 07:16:07 +04:00
|
|
|
shas = set()
|
2009-04-28 23:46:51 +04:00
|
|
|
while next:
|
|
|
|
if next in have:
|
|
|
|
graph_walker.ack(next)
|
|
|
|
else:
|
2009-05-02 07:16:07 +04:00
|
|
|
shas.add(next)
|
2009-04-28 23:46:51 +04:00
|
|
|
next = graph_walker.next()
|
2009-06-05 14:56:22 +04:00
|
|
|
|
2009-05-02 07:16:07 +04:00
|
|
|
seen = []
|
2009-06-05 14:56:22 +04:00
|
|
|
|
2009-04-29 06:33:03 +04:00
|
|
|
# so now i have the shas, need to turn them into a list of
|
2009-04-29 01:28:27 +04:00
|
|
|
# tuples (sha, path) for ALL the objects i'm sending
|
|
|
|
# TODO : don't send blobs or trees they already have
|
|
|
|
def get_objects(tree, path):
|
|
|
|
changes = list()
|
|
|
|
changes.append((tree, path))
|
|
|
|
for (mode, name, sha) in tree.entries():
|
2009-05-19 16:58:06 +04:00
|
|
|
if mode == 0160000: # TODO : properly handle submodules and document what 57344 means
|
2009-04-29 01:28:27 +04:00
|
|
|
continue
|
2009-05-02 07:16:07 +04:00
|
|
|
if sha in seen:
|
|
|
|
continue
|
2009-06-05 14:56:22 +04:00
|
|
|
|
2009-04-29 01:28:27 +04:00
|
|
|
obj = self.git.get_object(sha)
|
2009-05-02 07:16:07 +04:00
|
|
|
seen.append(sha)
|
2009-05-02 07:05:30 +04:00
|
|
|
if isinstance (obj, Blob):
|
2009-04-29 01:28:27 +04:00
|
|
|
changes.append((obj, path + name))
|
|
|
|
elif isinstance(obj, Tree):
|
2009-05-02 07:16:07 +04:00
|
|
|
changes.extend(get_objects(obj, path + name + '/'))
|
2009-04-29 01:28:27 +04:00
|
|
|
return changes
|
2009-04-29 06:33:03 +04:00
|
|
|
|
2009-04-29 01:28:27 +04:00
|
|
|
objects = []
|
|
|
|
for commit_sha in shas:
|
|
|
|
commit = self.git.commit(commit_sha)
|
|
|
|
objects.append((commit, 'commit'))
|
|
|
|
tree = self.git.get_object(commit.tree)
|
|
|
|
objects.extend( get_objects(tree, '/') )
|
2009-04-29 06:33:03 +04:00
|
|
|
|
2009-04-29 01:28:27 +04:00
|
|
|
return objects
|
2009-04-29 06:33:03 +04:00
|
|
|
|
2009-04-27 01:49:38 +04:00
|
|
|
def fetch_pack(self, remote_name):
|
2009-06-08 22:15:58 +04:00
|
|
|
client, path = self.get_transport_and_path(remote_name)
|
2009-04-24 02:26:10 +04:00
|
|
|
graphwalker = SimpleFetchGraphWalker(self.git.heads().values(), self.git.get_parents)
|
|
|
|
f, commit = self.git.object_store.add_pack()
|
|
|
|
try:
|
|
|
|
determine_wants = self.git.object_store.determine_wants_all
|
|
|
|
refs = client.fetch_pack(path, determine_wants, graphwalker, f.write, sys.stdout.write)
|
|
|
|
f.close()
|
|
|
|
commit()
|
2009-04-30 00:55:22 +04:00
|
|
|
return refs
|
2009-06-05 14:56:22 +04:00
|
|
|
finally:
|
2009-04-24 02:26:10 +04:00
|
|
|
f.close()
|
|
|
|
|
2009-06-16 16:39:11 +04:00
|
|
|
## REFERENCES HANDLING
|
|
|
|
|
|
|
|
def update_references(self):
|
2009-06-23 22:20:15 +04:00
|
|
|
heads = self.local_heads()
|
2009-06-16 16:39:11 +04:00
|
|
|
|
2009-06-23 22:20:15 +04:00
|
|
|
# Create a local Git branch name for each
|
|
|
|
# Mercurial bookmark.
|
|
|
|
for key in heads:
|
|
|
|
self.git.set_ref('refs/heads/' + key, heads[key])
|
2009-06-16 16:39:11 +04:00
|
|
|
|
|
|
|
def export_hg_tags(self):
|
|
|
|
for tag, sha in self.repo.tags().iteritems():
|
2009-06-19 01:38:09 +04:00
|
|
|
if self.repo.tagtype(tag) == 'git':
|
|
|
|
self.git.set_ref('refs/tags/' + tag, self.map_git_get(hex(sha)))
|
2009-06-16 16:39:11 +04:00
|
|
|
|
|
|
|
def local_heads(self):
|
2009-06-22 01:44:41 +04:00
|
|
|
try:
|
|
|
|
bms = bookmarks.parse(self.repo)
|
|
|
|
return dict([(bm, self.map_git_get(hex(bms[bm]))) for bm in bms])
|
|
|
|
except AttributeError:
|
|
|
|
return {}
|
2009-06-16 16:39:11 +04:00
|
|
|
|
2009-06-19 01:38:09 +04:00
|
|
|
def import_tags(self, refs):
|
2009-05-28 02:58:07 +04:00
|
|
|
keys = refs.keys()
|
|
|
|
if not keys:
|
2009-06-19 01:38:09 +04:00
|
|
|
return
|
|
|
|
for k in keys[:]:
|
2009-05-28 02:58:07 +04:00
|
|
|
ref_name = k
|
|
|
|
parts = k.split('/')
|
2009-06-22 01:44:41 +04:00
|
|
|
if parts[0] == 'refs' and parts[1] == 'tags':
|
2009-05-28 02:58:07 +04:00
|
|
|
ref_name = "/".join([v for v in parts[2:]])
|
|
|
|
if ref_name[-3:] == '^{}':
|
|
|
|
ref_name = ref_name[:-3]
|
|
|
|
if not ref_name in self.repo.tags():
|
|
|
|
obj = self.git.get_object(refs[k])
|
|
|
|
sha = None
|
|
|
|
if isinstance (obj, Commit): # lightweight
|
|
|
|
sha = self.map_hg_get(refs[k])
|
2009-06-19 01:38:09 +04:00
|
|
|
self.tags[ref_name] = sha
|
|
|
|
elif isinstance (obj, Tag): # annotated
|
2009-05-28 02:58:07 +04:00
|
|
|
(obj_type, obj_sha) = obj.get_object()
|
|
|
|
obj = self.git.get_object(obj_sha)
|
2009-06-05 14:56:22 +04:00
|
|
|
if isinstance (obj, Commit):
|
2009-05-28 02:58:07 +04:00
|
|
|
sha = self.map_hg_get(obj_sha)
|
2009-06-19 01:38:09 +04:00
|
|
|
# TODO: better handling for annotated tags
|
|
|
|
self.tags[ref_name] = sha
|
|
|
|
self.save_tags()
|
2009-06-05 14:56:22 +04:00
|
|
|
|
2009-06-16 17:44:19 +04:00
|
|
|
def update_hg_bookmarks(self, refs):
|
2009-04-28 17:30:11 +04:00
|
|
|
try:
|
2009-04-29 21:03:16 +04:00
|
|
|
bms = bookmarks.parse(self.repo)
|
2009-06-16 17:44:19 +04:00
|
|
|
heads = dict([(ref[11:],refs[ref]) for ref in refs
|
|
|
|
if ref.startswith('refs/heads/')])
|
2009-05-26 22:55:03 +04:00
|
|
|
|
|
|
|
for head, sha in heads.iteritems():
|
2009-06-19 19:53:39 +04:00
|
|
|
hgsha = bin(self.map_hg_get(sha))
|
2009-06-16 17:44:19 +04:00
|
|
|
if not head in bms:
|
|
|
|
# new branch
|
|
|
|
bms[head] = hgsha
|
|
|
|
else:
|
|
|
|
bm = self.repo[bms[head]]
|
|
|
|
if bm.ancestor(self.repo[hgsha]) == bm:
|
|
|
|
# fast forward
|
|
|
|
bms[head] = hgsha
|
2009-05-26 22:55:03 +04:00
|
|
|
if heads:
|
|
|
|
bookmarks.write(self.repo, bms)
|
2009-05-28 04:19:11 +04:00
|
|
|
|
2009-04-28 17:30:11 +04:00
|
|
|
except AttributeError:
|
2009-05-10 05:04:19 +04:00
|
|
|
self.ui.warn(_('creating bookmarks failed, do you have'
|
|
|
|
' bookmarks enabled?\n'))
|
2009-04-30 03:18:37 +04:00
|
|
|
|
2009-06-16 17:44:19 +04:00
|
|
|
def update_remote_branches(self, remote_name, refs):
|
|
|
|
heads = dict([(ref[11:],refs[ref]) for ref in refs
|
|
|
|
if ref.startswith('refs/heads/')])
|
|
|
|
|
|
|
|
for head, sha in heads.iteritems():
|
2009-06-19 19:53:39 +04:00
|
|
|
hgsha = bin(self.map_hg_get(sha))
|
2009-06-16 17:44:19 +04:00
|
|
|
tag = '%s/%s' % (remote_name, head)
|
|
|
|
self.repo.tag(tag, hgsha, '', True, None, None)
|
|
|
|
|
2009-06-23 22:22:49 +04:00
|
|
|
self.git.set_remote_refs(refs, remote_name)
|
|
|
|
|
|
|
|
|
2009-06-16 16:39:11 +04:00
|
|
|
## UTILITY FUNCTIONS
|
|
|
|
|
2009-04-29 22:50:56 +04:00
|
|
|
def convert_git_int_mode(self, mode):
|
2009-05-10 05:04:40 +04:00
|
|
|
# TODO : make these into constants
|
2009-04-29 22:50:56 +04:00
|
|
|
convert = {
|
2009-05-19 16:58:06 +04:00
|
|
|
0100644: '',
|
|
|
|
0100755: 'x',
|
|
|
|
0120000: 'l'}
|
2009-04-29 22:50:56 +04:00
|
|
|
if mode in convert:
|
|
|
|
return convert[mode]
|
|
|
|
return ''
|
2009-04-30 03:18:37 +04:00
|
|
|
|
2009-05-01 00:54:33 +04:00
|
|
|
def extract_hg_metadata(self, message):
|
|
|
|
split = message.split("\n\n--HG--\n", 1)
|
|
|
|
renames = {}
|
2009-05-30 00:10:14 +04:00
|
|
|
extra = {}
|
2009-05-08 02:07:18 +04:00
|
|
|
branch = False
|
2009-05-01 00:54:33 +04:00
|
|
|
if len(split) == 2:
|
|
|
|
message, meta = split
|
|
|
|
lines = meta.split("\n")
|
|
|
|
for line in lines:
|
|
|
|
if line == '':
|
2009-06-05 14:56:22 +04:00
|
|
|
continue
|
|
|
|
|
2009-05-01 00:54:33 +04:00
|
|
|
command, data = line.split(" : ", 1)
|
2009-06-05 14:56:22 +04:00
|
|
|
|
2009-05-01 00:54:33 +04:00
|
|
|
if command == 'rename':
|
|
|
|
before, after = data.split(" => ", 1)
|
|
|
|
renames[after] = before
|
|
|
|
if command == 'branch':
|
2009-05-08 02:07:18 +04:00
|
|
|
branch = data
|
2009-05-30 00:10:14 +04:00
|
|
|
if command == 'extra':
|
|
|
|
before, after = data.split(" : ", 1)
|
|
|
|
extra[before] = urllib.unquote(after)
|
2009-06-30 02:20:37 +04:00
|
|
|
return (message, renames, branch, extra)
|
2009-06-05 14:56:22 +04:00
|
|
|
|
2009-06-16 17:44:19 +04:00
|
|
|
def remote_name(self, remote):
|
|
|
|
names = [name for name, path in self.paths if path == remote]
|
|
|
|
if names:
|
|
|
|
return names[0]
|
|
|
|
|
2009-06-18 19:49:13 +04:00
|
|
|
# Stolen from hgsubversion
|
|
|
|
def swap_out_encoding(self, new_encoding='UTF-8'):
|
|
|
|
try:
|
|
|
|
from mercurial import encoding
|
|
|
|
old = encoding.encoding
|
|
|
|
encoding.encoding = new_encoding
|
|
|
|
except ImportError:
|
|
|
|
old = hgutil._encoding
|
|
|
|
hgutil._encoding = new_encoding
|
|
|
|
return old
|
|
|
|
|
|
|
|
def decode_guess(self, string, encoding):
|
|
|
|
# text is not valid utf-8, try to make sense of it
|
|
|
|
if encoding:
|
|
|
|
try:
|
|
|
|
return string.decode(encoding).encode('utf-8')
|
|
|
|
except UnicodeDecodeError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
try:
|
|
|
|
return string.decode('latin-1').encode('utf-8')
|
|
|
|
except UnicodeDecodeError:
|
|
|
|
return string.decode('ascii', 'replace').encode('utf-8')
|
|
|
|
|
2009-04-24 02:26:10 +04:00
|
|
|
def check_bookmarks(self):
|
|
|
|
if self.ui.config('extensions', 'hgext.bookmarks') is not None:
|
2009-04-30 03:18:37 +04:00
|
|
|
self.ui.warn("YOU NEED TO SETUP BOOKMARKS\n")
|
2009-04-24 02:26:10 +04:00
|
|
|
|
|
|
|
def get_transport_and_path(self, uri):
|
|
|
|
from dulwich.client import TCPGitClient, SSHGitClient, SubprocessGitClient
|
2009-04-28 22:08:54 +04:00
|
|
|
for handler, transport in (("git://", TCPGitClient), ("git@", SSHGitClient), ("git+ssh://", SSHGitClient)):
|
2009-04-24 02:26:10 +04:00
|
|
|
if uri.startswith(handler):
|
2009-04-28 10:35:49 +04:00
|
|
|
if handler == 'git@':
|
2009-04-28 17:26:21 +04:00
|
|
|
host, path = uri[len(handler):].split(":", 1)
|
2009-04-28 10:35:49 +04:00
|
|
|
host = 'git@' + host
|
2009-04-28 17:26:21 +04:00
|
|
|
else:
|
2009-04-28 22:08:54 +04:00
|
|
|
host, path = uri[len(handler):].split("/", 1)
|
2009-04-28 10:35:49 +04:00
|
|
|
return transport(host), '/' + path
|
2009-04-24 02:26:10 +04:00
|
|
|
# if its not git or git+ssh, try a local url..
|
|
|
|
return SubprocessGitClient(), uri
|