2009-11-29 18:29:18 +03:00
|
|
|
import os, math, urllib, re
|
2009-05-02 07:05:30 +04:00
|
|
|
import toposort
|
2009-07-03 00:58:51 +04:00
|
|
|
|
2009-07-31 01:09:53 +04:00
|
|
|
from dulwich.errors import HangupException
|
2009-07-03 00:58:51 +04:00
|
|
|
from dulwich.index import commit_tree
|
2009-08-04 15:32:01 +04:00
|
|
|
from dulwich.objects import Blob, Commit, Tag, Tree, parse_timezone
|
2009-07-25 00:23:35 +04:00
|
|
|
from dulwich.pack import create_delta, apply_delta
|
2009-04-24 02:26:10 +04:00
|
|
|
from dulwich.repo import Repo
|
2009-07-03 00:58:51 +04:00
|
|
|
|
2009-04-24 02:26:10 +04:00
|
|
|
from hgext import bookmarks
|
|
|
|
from mercurial.i18n import _
|
2009-06-19 19:53:39 +04:00
|
|
|
from mercurial.node import hex, bin, nullid
|
2009-07-23 12:28:37 +04:00
|
|
|
from mercurial import context, util as hgutil
|
|
|
|
|
2009-04-28 03:15:48 +04:00
|
|
|
|
2009-04-24 02:26:10 +04:00
|
|
|
class GitHandler(object):
|
2009-04-27 05:27:47 +04:00
|
|
|
|
2009-04-24 02:26:10 +04:00
|
|
|
def __init__(self, dest_repo, ui):
|
|
|
|
self.repo = dest_repo
|
|
|
|
self.ui = ui
|
2009-05-10 21:37:23 +04:00
|
|
|
self.mapfile = 'git-mapfile'
|
2009-06-19 01:38:09 +04:00
|
|
|
self.tagsfile = 'git-tags'
|
2009-05-14 22:32:54 +04:00
|
|
|
|
|
|
|
if ui.config('git', 'intree'):
|
|
|
|
self.gitdir = self.repo.wjoin('.git')
|
|
|
|
else:
|
|
|
|
self.gitdir = self.repo.join('git')
|
|
|
|
|
2009-06-16 17:44:19 +04:00
|
|
|
self.paths = ui.configitems('paths')
|
2009-05-25 06:20:44 +04:00
|
|
|
|
2009-04-25 01:05:50 +04:00
|
|
|
self.load_map()
|
2009-06-19 01:38:09 +04:00
|
|
|
self.load_tags()
|
2009-04-27 05:27:47 +04:00
|
|
|
|
|
|
|
# make the git data directory
|
2009-04-27 04:23:06 +04:00
|
|
|
def init_if_missing(self):
|
2009-10-16 00:45:15 +04:00
|
|
|
if os.path.exists(self.gitdir):
|
|
|
|
self.git = Repo(self.gitdir)
|
|
|
|
else:
|
2009-05-10 21:42:44 +04:00
|
|
|
os.mkdir(self.gitdir)
|
2009-10-16 00:45:15 +04:00
|
|
|
self.git = Repo.init_bare(self.gitdir)
|
2009-04-25 01:05:50 +04:00
|
|
|
|
2009-04-27 03:25:04 +04:00
|
|
|
## FILE LOAD AND SAVE METHODS
|
|
|
|
|
2009-04-27 23:26:44 +04:00
|
|
|
def map_set(self, gitsha, hgsha):
|
|
|
|
self._map_git[gitsha] = hgsha
|
|
|
|
self._map_hg[hgsha] = gitsha
|
|
|
|
|
|
|
|
def map_hg_get(self, gitsha):
|
2009-07-07 20:05:43 +04:00
|
|
|
return self._map_git.get(gitsha)
|
2009-04-27 23:26:44 +04:00
|
|
|
|
|
|
|
def map_git_get(self, hgsha):
|
2009-07-07 20:05:43 +04:00
|
|
|
return self._map_hg.get(hgsha)
|
2009-04-29 06:33:03 +04:00
|
|
|
|
2009-04-25 01:05:50 +04:00
|
|
|
def load_map(self):
|
2009-04-27 23:26:44 +04:00
|
|
|
self._map_git = {}
|
|
|
|
self._map_hg = {}
|
2009-05-10 21:37:23 +04:00
|
|
|
if os.path.exists(self.repo.join(self.mapfile)):
|
|
|
|
for line in self.repo.opener(self.mapfile):
|
2009-04-25 01:05:50 +04:00
|
|
|
gitsha, hgsha = line.strip().split(' ', 1)
|
2009-04-27 23:26:44 +04:00
|
|
|
self._map_git[gitsha] = hgsha
|
|
|
|
self._map_hg[hgsha] = gitsha
|
2009-04-27 05:27:47 +04:00
|
|
|
|
2009-04-25 01:05:50 +04:00
|
|
|
def save_map(self):
|
2009-05-10 21:37:23 +04:00
|
|
|
file = self.repo.opener(self.mapfile, 'w+', atomictemp=True)
|
2009-08-01 02:27:20 +04:00
|
|
|
for hgsha, gitsha in sorted(self._map_hg.iteritems()):
|
2009-04-25 01:05:50 +04:00
|
|
|
file.write("%s %s\n" % (gitsha, hgsha))
|
2009-05-10 19:54:47 +04:00
|
|
|
file.rename()
|
2009-04-27 05:27:47 +04:00
|
|
|
|
2009-06-19 01:38:09 +04:00
|
|
|
|
|
|
|
def load_tags(self):
|
|
|
|
self.tags = {}
|
|
|
|
if os.path.exists(self.repo.join(self.tagsfile)):
|
|
|
|
for line in self.repo.opener(self.tagsfile):
|
|
|
|
sha, name = line.strip().split(' ', 1)
|
|
|
|
self.tags[name] = sha
|
|
|
|
|
|
|
|
def save_tags(self):
|
|
|
|
file = self.repo.opener(self.tagsfile, 'w+', atomictemp=True)
|
|
|
|
for name, sha in sorted(self.tags.iteritems()):
|
2009-07-07 14:46:19 +04:00
|
|
|
if not self.repo.tagtype(name) == 'global':
|
|
|
|
file.write("%s %s\n" % (sha, name))
|
2009-06-19 01:38:09 +04:00
|
|
|
file.rename()
|
|
|
|
|
2009-04-27 03:25:04 +04:00
|
|
|
## END FILE LOAD AND SAVE METHODS
|
2009-04-25 01:05:50 +04:00
|
|
|
|
2009-06-16 16:39:11 +04:00
|
|
|
## COMMANDS METHODS
|
|
|
|
|
2009-05-15 02:48:24 +04:00
|
|
|
def import_commits(self, remote_name):
|
|
|
|
self.import_git_objects(remote_name)
|
|
|
|
self.save_map()
|
|
|
|
|
2009-08-01 20:55:54 +04:00
|
|
|
def fetch(self, remote, heads):
|
2009-08-04 15:30:16 +04:00
|
|
|
self.export_commits()
|
2009-08-01 20:55:54 +04:00
|
|
|
refs = self.fetch_pack(remote, heads)
|
2009-06-16 17:44:19 +04:00
|
|
|
remote_name = self.remote_name(remote)
|
|
|
|
|
2009-04-30 00:55:22 +04:00
|
|
|
if refs:
|
2009-05-28 02:58:07 +04:00
|
|
|
self.import_git_objects(remote_name, refs)
|
2009-06-19 01:38:09 +04:00
|
|
|
self.import_tags(refs)
|
2009-06-16 17:44:19 +04:00
|
|
|
self.update_hg_bookmarks(refs)
|
|
|
|
if remote_name:
|
|
|
|
self.update_remote_branches(remote_name, refs)
|
|
|
|
elif not self.paths:
|
|
|
|
# intial cloning
|
|
|
|
self.update_remote_branches('default', refs)
|
2009-06-23 22:22:49 +04:00
|
|
|
else:
|
|
|
|
self.ui.status(_("nothing new on the server\n"))
|
2009-06-16 17:44:19 +04:00
|
|
|
|
2009-04-25 01:05:50 +04:00
|
|
|
self.save_map()
|
|
|
|
|
2009-08-04 15:30:16 +04:00
|
|
|
def export_commits(self):
|
|
|
|
try:
|
2009-06-03 20:55:45 +04:00
|
|
|
self.export_git_objects()
|
2009-08-04 15:30:16 +04:00
|
|
|
self.export_hg_tags()
|
|
|
|
self.update_references()
|
|
|
|
finally:
|
|
|
|
self.save_map()
|
2009-04-27 05:27:47 +04:00
|
|
|
|
2009-07-31 21:15:02 +04:00
|
|
|
def get_refs(self, remote):
|
|
|
|
self.export_commits()
|
|
|
|
client, path = self.get_transport_and_path(remote)
|
|
|
|
old_refs = {}
|
|
|
|
new_refs = {}
|
|
|
|
def changed(refs):
|
|
|
|
old_refs.update(refs)
|
|
|
|
to_push = set(self.local_heads().values() + self.tags.values())
|
|
|
|
new_refs.update(self.get_changed_refs(refs, to_push, True))
|
|
|
|
# don't push anything
|
|
|
|
return {}
|
|
|
|
|
|
|
|
try:
|
|
|
|
client.send_pack(path, changed, None)
|
2009-08-08 14:14:49 +04:00
|
|
|
|
|
|
|
changed_refs = [ref for ref, sha in new_refs.iteritems()
|
|
|
|
if sha != old_refs.get(ref)]
|
|
|
|
new = [bin(self.map_hg_get(new_refs[ref])) for ref in changed_refs]
|
|
|
|
old = dict( (bin(self.map_hg_get(old_refs[r])), 1)
|
2009-08-10 19:05:57 +04:00
|
|
|
for r in changed_refs if r in old_refs)
|
2009-07-31 21:15:02 +04:00
|
|
|
|
|
|
|
return old, new
|
|
|
|
except HangupException:
|
|
|
|
raise hgutil.Abort("the remote end hung up unexpectedly")
|
|
|
|
|
2009-07-31 01:09:53 +04:00
|
|
|
def push(self, remote, revs, force):
|
2009-07-23 11:47:23 +04:00
|
|
|
self.export_commits()
|
2009-07-31 01:09:53 +04:00
|
|
|
changed_refs = self.upload_pack(remote, revs, force)
|
2009-06-23 22:22:49 +04:00
|
|
|
remote_name = self.remote_name(remote)
|
|
|
|
|
|
|
|
if remote_name and changed_refs:
|
|
|
|
for ref, sha in changed_refs.iteritems():
|
|
|
|
self.ui.status(" "+ remote_name + "::" + ref + " => GIT:" + sha[0:8] + "\n")
|
|
|
|
|
|
|
|
self.update_remote_branches(remote_name, changed_refs)
|
|
|
|
|
2009-06-16 16:39:11 +04:00
|
|
|
def clear(self):
|
|
|
|
mapfile = self.repo.join(self.mapfile)
|
|
|
|
if os.path.exists(self.gitdir):
|
|
|
|
for root, dirs, files in os.walk(self.gitdir, topdown=False):
|
|
|
|
for name in files:
|
|
|
|
os.remove(os.path.join(root, name))
|
|
|
|
for name in dirs:
|
|
|
|
os.rmdir(os.path.join(root, name))
|
|
|
|
os.rmdir(self.gitdir)
|
|
|
|
if os.path.exists(mapfile):
|
|
|
|
os.remove(mapfile)
|
2009-05-18 03:42:34 +04:00
|
|
|
|
2009-06-16 16:39:11 +04:00
|
|
|
## CHANGESET CONVERSION METHODS
|
2009-05-18 02:29:18 +04:00
|
|
|
|
2009-04-27 23:26:44 +04:00
|
|
|
def export_git_objects(self):
|
2009-05-26 03:16:28 +04:00
|
|
|
self.ui.status(_("importing Hg objects into Git\n"))
|
2009-10-16 00:45:15 +04:00
|
|
|
self.init_if_missing()
|
|
|
|
|
2009-06-21 14:07:44 +04:00
|
|
|
nodes = [self.repo.lookup(n) for n in self.repo]
|
|
|
|
export = [node for node in nodes if not hex(node) in self._map_hg]
|
|
|
|
total = len(export)
|
2009-05-13 23:48:26 +04:00
|
|
|
if total:
|
|
|
|
magnitude = int(math.log(total, 10)) + 1
|
|
|
|
else:
|
|
|
|
magnitude = 1
|
2009-06-21 14:07:44 +04:00
|
|
|
for i, rev in enumerate(export):
|
2009-05-10 18:29:15 +04:00
|
|
|
if i%100 == 0:
|
|
|
|
self.ui.status(_("at: %*d/%d\n") % (magnitude, i, total))
|
2009-06-05 14:56:22 +04:00
|
|
|
|
2009-05-28 04:14:41 +04:00
|
|
|
ctx = self.repo.changectx(rev)
|
|
|
|
state = ctx.extra().get('hg-git', None)
|
|
|
|
if state == 'octopus':
|
2009-06-29 18:28:25 +04:00
|
|
|
self.ui.debug("revision %d is a part of octopus explosion\n" % ctx.rev())
|
2009-05-28 04:14:41 +04:00
|
|
|
continue
|
2009-06-21 14:07:44 +04:00
|
|
|
self.export_hg_commit(rev)
|
2009-04-29 06:33:03 +04:00
|
|
|
|
2009-04-28 03:15:48 +04:00
|
|
|
# convert this commit into git objects
|
|
|
|
# go through the manifest, convert all blobs/trees we don't have
|
|
|
|
# write the commit object (with metadata info)
|
2009-04-27 23:26:44 +04:00
|
|
|
def export_hg_commit(self, rev):
|
2009-06-05 14:56:22 +04:00
|
|
|
self.ui.note(_("converting revision %s\n") % rev)
|
2009-04-29 06:33:03 +04:00
|
|
|
|
2009-06-29 18:28:25 +04:00
|
|
|
oldenc = self.swap_out_encoding()
|
|
|
|
|
2009-05-28 04:14:41 +04:00
|
|
|
ctx = self.repo.changectx(rev)
|
|
|
|
extra = ctx.extra()
|
|
|
|
|
2009-08-04 16:19:30 +04:00
|
|
|
commit = Commit()
|
|
|
|
|
|
|
|
(time, timezone) = ctx.date()
|
|
|
|
commit.author = self.get_git_author(ctx)
|
|
|
|
commit.author_time = int(time)
|
|
|
|
commit.author_timezone = -timezone
|
|
|
|
|
|
|
|
if 'committer' in extra:
|
|
|
|
# fixup timezone
|
|
|
|
(name, timestamp, timezone) = extra['committer'].rsplit(' ', 2)
|
|
|
|
commit.committer = name
|
|
|
|
commit.commit_time = timestamp
|
|
|
|
|
|
|
|
# work around a timezone format change
|
|
|
|
if int(timezone) % 60 != 0: #pragma: no cover
|
|
|
|
timezone = parse_timezone(timezone)
|
|
|
|
else:
|
|
|
|
timezone = -int(timezone)
|
|
|
|
commit.commit_timezone = timezone
|
2009-05-28 04:14:41 +04:00
|
|
|
else:
|
2009-08-04 16:19:30 +04:00
|
|
|
commit.committer = commit.author
|
|
|
|
commit.commit_time = commit.author_time
|
|
|
|
commit.commit_timezone = commit.author_timezone
|
2009-05-28 04:14:41 +04:00
|
|
|
|
2009-08-04 16:19:30 +04:00
|
|
|
commit.parents = []
|
|
|
|
for parent in self.get_git_parents(ctx):
|
|
|
|
hgsha = hex(parent.node())
|
|
|
|
git_sha = self.map_git_get(hgsha)
|
|
|
|
if git_sha:
|
|
|
|
commit.parents.append(git_sha)
|
2009-04-29 06:33:03 +04:00
|
|
|
|
2009-08-04 16:19:30 +04:00
|
|
|
commit.message = self.get_git_message(ctx)
|
2009-06-05 14:56:22 +04:00
|
|
|
|
2009-08-04 16:19:30 +04:00
|
|
|
if 'encoding' in extra:
|
|
|
|
commit.encoding = extra['encoding']
|
|
|
|
|
|
|
|
tree_sha = commit_tree(self.git.object_store, self.iterblobs(ctx))
|
|
|
|
commit.tree = tree_sha
|
|
|
|
|
|
|
|
self.git.object_store.add_object(commit)
|
|
|
|
self.map_set(commit.id, ctx.hex())
|
|
|
|
|
|
|
|
self.swap_out_encoding(oldenc)
|
|
|
|
return commit.id
|
2009-04-30 23:46:54 +04:00
|
|
|
|
2009-08-04 16:19:30 +04:00
|
|
|
def get_git_author(self, ctx):
|
2009-07-25 00:23:35 +04:00
|
|
|
# hg authors might not have emails
|
|
|
|
author = ctx.user()
|
|
|
|
|
|
|
|
# check for git author pattern compliance
|
|
|
|
regex = re.compile('^(.*?) \<(.*?)\>(.*)$')
|
|
|
|
a = regex.match(author)
|
|
|
|
|
|
|
|
if a:
|
|
|
|
name = a.group(1)
|
|
|
|
email = a.group(2)
|
|
|
|
if len(a.group(3)) > 0:
|
|
|
|
name += ' ext:(' + urllib.quote(a.group(3)) + ')'
|
|
|
|
author = name + ' <' + email + '>'
|
2009-06-03 06:33:48 +04:00
|
|
|
else:
|
2009-07-25 00:23:35 +04:00
|
|
|
author = author + ' <none@none>'
|
|
|
|
|
2009-08-04 16:19:30 +04:00
|
|
|
if 'author' in ctx.extra():
|
|
|
|
author = apply_delta(author, ctx.extra()['author'])
|
2009-06-18 19:49:13 +04:00
|
|
|
|
2009-08-04 16:19:30 +04:00
|
|
|
return author
|
2009-08-02 22:53:08 +04:00
|
|
|
|
2009-08-04 16:19:30 +04:00
|
|
|
def get_git_parents(self, ctx):
|
|
|
|
def is_octopus_part(ctx):
|
|
|
|
return ctx.extra().get('hg-git', None) in ('octopus', 'octopus-done')
|
2009-06-18 19:49:13 +04:00
|
|
|
|
2009-08-04 16:19:30 +04:00
|
|
|
parents = []
|
|
|
|
if ctx.extra().get('hg-git', None) == 'octopus-done':
|
|
|
|
# implode octopus parents
|
|
|
|
part = ctx
|
|
|
|
while is_octopus_part(part):
|
|
|
|
(p1, p2) = part.parents()
|
|
|
|
assert not is_octopus_part(p1)
|
|
|
|
parents.append(p1)
|
|
|
|
part = p2
|
|
|
|
parents.append(p2)
|
|
|
|
else:
|
|
|
|
parents = ctx.parents()
|
2009-04-30 03:18:37 +04:00
|
|
|
|
2009-08-04 16:19:30 +04:00
|
|
|
return parents
|
2009-08-04 15:32:01 +04:00
|
|
|
|
2009-08-04 16:19:30 +04:00
|
|
|
def get_git_message(self, ctx):
|
|
|
|
extra = ctx.extra()
|
2009-07-03 00:58:51 +04:00
|
|
|
|
2009-08-04 16:19:30 +04:00
|
|
|
message = ctx.description() + "\n"
|
|
|
|
if 'message' in extra:
|
|
|
|
message = apply_delta(message, extra['message'])
|
2009-05-09 07:57:02 +04:00
|
|
|
|
2009-04-30 00:26:13 +04:00
|
|
|
# HG EXTRA INFORMATION
|
|
|
|
add_extras = False
|
2009-04-30 23:55:56 +04:00
|
|
|
extra_message = ''
|
2009-04-30 00:26:13 +04:00
|
|
|
if not ctx.branch() == 'default':
|
|
|
|
add_extras = True
|
2009-04-30 23:55:56 +04:00
|
|
|
extra_message += "branch : " + ctx.branch() + "\n"
|
2009-04-30 03:18:37 +04:00
|
|
|
|
2009-08-04 16:19:30 +04:00
|
|
|
renames = []
|
2009-08-16 01:41:45 +04:00
|
|
|
for f in ctx.files():
|
|
|
|
if f not in ctx.manifest():
|
|
|
|
continue
|
2009-08-04 16:19:30 +04:00
|
|
|
rename = ctx.filectx(f).renamed()
|
|
|
|
if rename:
|
|
|
|
renames.append((rename[0], f))
|
|
|
|
|
2009-04-30 23:55:56 +04:00
|
|
|
if renames:
|
|
|
|
add_extras = True
|
|
|
|
for oldfile, newfile in renames:
|
|
|
|
extra_message += "rename : " + oldfile + " => " + newfile + "\n"
|
2009-05-30 00:10:14 +04:00
|
|
|
|
|
|
|
for key, value in extra.iteritems():
|
2009-06-29 18:28:25 +04:00
|
|
|
if key in ('author', 'committer', 'encoding', 'message', 'branch', 'hg-git'):
|
2009-05-30 00:10:14 +04:00
|
|
|
continue
|
|
|
|
else:
|
2009-06-05 14:56:22 +04:00
|
|
|
add_extras = True
|
2009-05-30 00:10:14 +04:00
|
|
|
extra_message += "extra : " + key + " : " + urllib.quote(value) + "\n"
|
|
|
|
|
2009-04-30 00:26:13 +04:00
|
|
|
if add_extras:
|
2009-08-04 16:19:30 +04:00
|
|
|
message += "\n--HG--\n" + extra_message
|
2009-04-30 03:18:37 +04:00
|
|
|
|
2009-08-04 16:19:30 +04:00
|
|
|
return message
|
2009-07-03 00:58:51 +04:00
|
|
|
|
|
|
|
def iterblobs(self, ctx):
|
|
|
|
for f in ctx:
|
2009-08-02 22:12:16 +04:00
|
|
|
fctx = ctx[f]
|
|
|
|
blobid = self.map_git_get(hex(fctx.filenode()))
|
|
|
|
|
|
|
|
if not blobid:
|
|
|
|
blob = Blob.from_string(fctx.data())
|
2009-07-03 00:58:51 +04:00
|
|
|
self.git.object_store.add_object(blob)
|
2009-08-02 22:12:16 +04:00
|
|
|
self.map_set(blob.id, hex(fctx.filenode()))
|
|
|
|
blobid = blob.id
|
|
|
|
|
2009-07-03 00:58:51 +04:00
|
|
|
if 'l' in ctx.flags(f):
|
|
|
|
mode = 0120000
|
|
|
|
elif 'x' in ctx.flags(f):
|
|
|
|
mode = 0100755
|
2009-06-03 20:55:45 +04:00
|
|
|
else:
|
2009-07-03 00:58:51 +04:00
|
|
|
mode = 0100644
|
2009-08-02 22:12:16 +04:00
|
|
|
|
|
|
|
yield f, blobid, mode
|
2009-04-29 06:33:03 +04:00
|
|
|
|
2009-06-16 16:39:11 +04:00
|
|
|
def import_git_objects(self, remote_name=None, refs=None):
|
|
|
|
self.ui.status(_("importing Git objects into Hg\n"))
|
2009-10-16 00:45:15 +04:00
|
|
|
self.init_if_missing()
|
|
|
|
|
2009-06-16 16:39:11 +04:00
|
|
|
# import heads and fetched tags as remote references
|
|
|
|
todo = []
|
|
|
|
done = set()
|
|
|
|
convert_list = {}
|
|
|
|
|
|
|
|
# get a list of all the head shas
|
2010-02-24 19:21:20 +03:00
|
|
|
seenheads = set()
|
|
|
|
if refs is None:
|
|
|
|
refs = self.git.refs
|
2009-06-16 16:39:11 +04:00
|
|
|
if refs:
|
2010-02-24 19:21:20 +03:00
|
|
|
for sha in refs.itervalues():
|
|
|
|
# refs contains all the refs in the server, not just the ones
|
|
|
|
# we are pulling
|
|
|
|
if sha in self.git.object_store:
|
|
|
|
obj = self.git.get_object(sha)
|
|
|
|
while isinstance(obj, Tag):
|
|
|
|
obj_type, sha = obj.get_object()
|
|
|
|
obj = self.git.get_object(sha)
|
|
|
|
if isinstance (obj, Commit) and sha not in seenheads:
|
|
|
|
seenheads.add(sha)
|
|
|
|
todo.append(sha)
|
2009-06-16 16:39:11 +04:00
|
|
|
|
|
|
|
# traverse the heads getting a list of all the unique commits
|
|
|
|
while todo:
|
|
|
|
sha = todo.pop()
|
|
|
|
assert isinstance(sha, str)
|
|
|
|
if sha in done:
|
|
|
|
continue
|
|
|
|
done.add(sha)
|
|
|
|
obj = self.git.get_object(sha)
|
2010-02-24 19:21:20 +03:00
|
|
|
assert isinstance(obj, Commit)
|
|
|
|
convert_list[sha] = obj
|
|
|
|
todo.extend([p for p in obj.parents if p not in done])
|
2009-06-16 16:39:11 +04:00
|
|
|
|
|
|
|
# sort the commits
|
|
|
|
commits = toposort.TopoSort(convert_list).items()
|
|
|
|
|
2009-06-21 14:07:44 +04:00
|
|
|
commits = [commit for commit in commits if not commit in self._map_git]
|
2009-06-16 16:39:11 +04:00
|
|
|
# import each of the commits, oldest first
|
|
|
|
total = len(commits)
|
2009-07-08 23:14:52 +04:00
|
|
|
if total:
|
|
|
|
magnitude = int(math.log(total, 10)) + 1
|
|
|
|
else:
|
|
|
|
magnitude = 1
|
2009-06-16 16:39:11 +04:00
|
|
|
for i, csha in enumerate(commits):
|
|
|
|
if i%100 == 0:
|
|
|
|
self.ui.status(_("at: %*d/%d\n") % (magnitude, i, total))
|
|
|
|
commit = convert_list[csha]
|
2009-06-21 14:07:44 +04:00
|
|
|
self.import_git_commit(commit)
|
2009-06-16 16:39:11 +04:00
|
|
|
|
|
|
|
def import_git_commit(self, commit):
|
|
|
|
self.ui.debug(_("importing: %s\n") % commit.id)
|
|
|
|
|
2009-06-30 02:20:37 +04:00
|
|
|
(strip_message, hg_renames, hg_branch, extra) = self.extract_hg_metadata(commit.message)
|
2009-06-16 16:39:11 +04:00
|
|
|
|
|
|
|
# get a list of the changed, added, removed files
|
2009-07-03 00:58:51 +04:00
|
|
|
files = self.get_files_changed(commit)
|
2009-06-16 16:39:11 +04:00
|
|
|
|
|
|
|
date = (commit.author_time, -commit.author_timezone)
|
|
|
|
text = strip_message
|
|
|
|
|
2009-07-29 00:29:27 +04:00
|
|
|
origtext = text
|
2009-06-18 19:49:13 +04:00
|
|
|
try:
|
|
|
|
text.decode('utf-8')
|
|
|
|
except UnicodeDecodeError:
|
2009-07-03 00:58:51 +04:00
|
|
|
text = self.decode_guess(text, commit.encoding)
|
2009-07-29 00:29:27 +04:00
|
|
|
|
|
|
|
text = '\n'.join([l.rstrip() for l in text.splitlines()]).strip('\n')
|
|
|
|
if text + '\n' != origtext:
|
|
|
|
extra['message'] = create_delta(text +'\n', origtext)
|
2009-06-18 19:49:13 +04:00
|
|
|
|
|
|
|
author = commit.author
|
|
|
|
|
|
|
|
# convert extra data back to the end
|
|
|
|
if ' ext:' in commit.author:
|
|
|
|
regex = re.compile('^(.*?)\ ext:\((.*)\) <(.*)\>$')
|
|
|
|
m = regex.match(commit.author)
|
|
|
|
if m:
|
|
|
|
name = m.group(1)
|
|
|
|
ex = urllib.unquote(m.group(2))
|
|
|
|
email = m.group(3)
|
|
|
|
author = name + ' <' + email + '>' + ex
|
|
|
|
|
|
|
|
if ' <none@none>' in commit.author:
|
|
|
|
author = commit.author[:-12]
|
|
|
|
|
|
|
|
try:
|
|
|
|
author.decode('utf-8')
|
|
|
|
except UnicodeDecodeError:
|
2009-07-25 00:23:35 +04:00
|
|
|
origauthor = author
|
2009-07-03 00:58:51 +04:00
|
|
|
author = self.decode_guess(author, commit.encoding)
|
2009-07-25 00:23:35 +04:00
|
|
|
extra['author'] = create_delta(author, origauthor)
|
2009-06-18 19:49:13 +04:00
|
|
|
|
|
|
|
oldenc = self.swap_out_encoding()
|
|
|
|
|
2009-06-16 16:39:11 +04:00
|
|
|
def getfilectx(repo, memctx, f):
|
2009-12-25 10:56:20 +03:00
|
|
|
delete, mode, sha = files[f]
|
|
|
|
if delete:
|
|
|
|
raise IOError
|
|
|
|
|
|
|
|
data = self.git[sha].data
|
|
|
|
copied_path = hg_renames.get(f)
|
|
|
|
e = self.convert_git_int_mode(mode)
|
|
|
|
|
2009-06-16 16:39:11 +04:00
|
|
|
return context.memfilectx(f, data, 'l' in e, 'x' in e, copied_path)
|
|
|
|
|
|
|
|
gparents = map(self.map_hg_get, commit.parents)
|
|
|
|
p1, p2 = (nullid, nullid)
|
|
|
|
octopus = False
|
|
|
|
|
|
|
|
if len(gparents) > 1:
|
|
|
|
# merge, possibly octopus
|
|
|
|
def commit_octopus(p1, p2):
|
2009-12-25 10:56:20 +03:00
|
|
|
ctx = context.memctx(self.repo, (p1, p2), text, list(files), getfilectx,
|
2009-06-18 19:49:13 +04:00
|
|
|
author, date, {'hg-git': 'octopus'})
|
2009-06-16 16:39:11 +04:00
|
|
|
return hex(self.repo.commitctx(ctx))
|
|
|
|
|
|
|
|
octopus = len(gparents) > 2
|
|
|
|
p2 = gparents.pop()
|
|
|
|
p1 = gparents.pop()
|
|
|
|
while len(gparents) > 0:
|
|
|
|
p2 = commit_octopus(p1, p2)
|
|
|
|
p1 = gparents.pop()
|
|
|
|
else:
|
|
|
|
if gparents:
|
|
|
|
p1 = gparents.pop()
|
|
|
|
|
|
|
|
pa = None
|
|
|
|
if not (p2 == nullid):
|
|
|
|
node1 = self.repo.changectx(p1)
|
|
|
|
node2 = self.repo.changectx(p2)
|
|
|
|
pa = node1.ancestor(node2)
|
|
|
|
|
|
|
|
# if named branch, add to extra
|
|
|
|
if hg_branch:
|
|
|
|
extra['branch'] = hg_branch
|
|
|
|
|
|
|
|
# if committer is different than author, add it to extra
|
2009-07-03 00:58:51 +04:00
|
|
|
if commit.author != commit.committer \
|
|
|
|
or commit.author_time != commit.commit_time \
|
|
|
|
or commit.author_timezone != commit.commit_timezone:
|
2009-06-16 16:39:11 +04:00
|
|
|
extra['committer'] = "%s %d %d" % (commit.committer, commit.commit_time, -commit.commit_timezone)
|
|
|
|
|
2009-07-03 00:58:51 +04:00
|
|
|
if commit.encoding:
|
|
|
|
extra['encoding'] = commit.encoding
|
2009-06-16 16:39:11 +04:00
|
|
|
|
|
|
|
if hg_branch:
|
|
|
|
extra['branch'] = hg_branch
|
|
|
|
|
|
|
|
if octopus:
|
|
|
|
extra['hg-git'] ='octopus-done'
|
|
|
|
|
2009-12-25 10:56:20 +03:00
|
|
|
ctx = context.memctx(self.repo, (p1, p2), text, list(files), getfilectx,
|
2009-06-16 16:39:11 +04:00
|
|
|
author, date, extra)
|
|
|
|
|
2009-07-29 00:29:27 +04:00
|
|
|
node = self.repo.commitctx(ctx)
|
2009-06-16 16:39:11 +04:00
|
|
|
|
2009-06-18 19:49:13 +04:00
|
|
|
self.swap_out_encoding(oldenc)
|
|
|
|
|
2009-06-16 16:39:11 +04:00
|
|
|
# save changeset to mapping file
|
|
|
|
cs = hex(node)
|
|
|
|
self.map_set(commit.id, cs)
|
|
|
|
|
|
|
|
## PACK UPLOADING AND FETCHING
|
2009-04-27 05:27:47 +04:00
|
|
|
|
2009-07-31 01:09:53 +04:00
|
|
|
def upload_pack(self, remote, revs, force):
|
2009-06-16 17:44:19 +04:00
|
|
|
client, path = self.get_transport_and_path(remote)
|
2009-07-31 01:09:53 +04:00
|
|
|
def changed(refs):
|
|
|
|
to_push = revs or set(self.local_heads().values() + self.tags.values())
|
|
|
|
return self.get_changed_refs(refs, to_push, force)
|
|
|
|
|
2009-07-03 00:58:51 +04:00
|
|
|
genpack = self.git.object_store.generate_pack_contents
|
2009-04-28 10:35:49 +04:00
|
|
|
try:
|
2009-05-10 05:04:19 +04:00
|
|
|
self.ui.status(_("creating and sending data\n"))
|
2009-04-29 21:03:16 +04:00
|
|
|
changed_refs = client.send_pack(path, changed, genpack)
|
2009-07-02 16:27:14 +04:00
|
|
|
return changed_refs
|
2009-07-31 01:09:53 +04:00
|
|
|
except HangupException:
|
|
|
|
raise hgutil.Abort("the remote end hung up unexpectedly")
|
|
|
|
|
|
|
|
def get_changed_refs(self, refs, revs, force):
|
|
|
|
new_refs = refs.copy()
|
2009-08-08 13:52:30 +04:00
|
|
|
|
|
|
|
#The remote repo is empty and the local one doesn't have bookmarks/tags
|
2009-08-17 02:18:08 +04:00
|
|
|
if refs.keys()[0] == 'capabilities^{}':
|
2009-08-08 13:52:30 +04:00
|
|
|
del new_refs['capabilities^{}']
|
2009-08-17 02:18:08 +04:00
|
|
|
if not self.local_heads():
|
|
|
|
tip = hex(self.repo.lookup('tip'))
|
2009-12-26 21:22:06 +03:00
|
|
|
bookmarks.bookmark(self.ui, self.repo, 'master', tip, force=True)
|
2009-08-17 02:18:08 +04:00
|
|
|
bookmarks.setcurrent(self.repo, 'master')
|
|
|
|
new_refs['refs/heads/master'] = self.map_git_get(tip)
|
2009-08-08 13:52:30 +04:00
|
|
|
|
2009-07-31 01:09:53 +04:00
|
|
|
for rev in revs:
|
|
|
|
ctx = self.repo[rev]
|
|
|
|
heads = [t for t in ctx.tags() if t in self.local_heads()]
|
|
|
|
tags = [t for t in ctx.tags() if t in self.tags]
|
|
|
|
|
|
|
|
if not (heads or tags):
|
|
|
|
raise hgutil.Abort("revision %s cannot be pushed since"
|
|
|
|
" it doesn't have a ref" % ctx)
|
|
|
|
|
|
|
|
for r in heads + tags:
|
|
|
|
if r in heads:
|
|
|
|
ref = 'refs/heads/'+r
|
|
|
|
else:
|
|
|
|
ref = 'refs/tags/'+r
|
|
|
|
|
|
|
|
if ref not in refs:
|
|
|
|
new_refs[ref] = self.map_git_get(ctx.hex())
|
|
|
|
elif new_refs[ref] in self._map_git:
|
|
|
|
rctx = self.repo[self.map_hg_get(new_refs[ref])]
|
|
|
|
if rctx.ancestor(ctx) == rctx or force:
|
|
|
|
new_refs[ref] = self.map_git_get(ctx.hex())
|
|
|
|
else:
|
|
|
|
raise hgutil.Abort("pushing %s overwrites %s"
|
|
|
|
% (ref, ctx))
|
|
|
|
else:
|
|
|
|
raise hgutil.Abort("%s changed on the server, please pull "
|
|
|
|
"and merge before pushing" % ref)
|
|
|
|
|
|
|
|
return new_refs
|
2009-04-29 06:33:03 +04:00
|
|
|
|
|
|
|
|
2009-08-01 20:55:54 +04:00
|
|
|
def fetch_pack(self, remote_name, heads):
|
2009-06-08 22:15:58 +04:00
|
|
|
client, path = self.get_transport_and_path(remote_name)
|
2009-07-03 00:58:51 +04:00
|
|
|
graphwalker = self.git.get_graph_walker()
|
2009-08-01 20:55:54 +04:00
|
|
|
def determine_wants(refs):
|
|
|
|
if heads:
|
|
|
|
want = []
|
|
|
|
for h in heads:
|
|
|
|
r = [ref for ref in refs if ref.endswith('/'+h)]
|
|
|
|
if not r:
|
|
|
|
raise hgutil.Abort("ref %s not found on remote server")
|
|
|
|
elif len(r) == 1:
|
|
|
|
want.append(refs[r[0]])
|
|
|
|
else:
|
|
|
|
raise hgutil.Abort("ambiguous reference %s: %r"%(h, r))
|
|
|
|
else:
|
|
|
|
want = [sha for ref, sha in refs.iteritems()
|
|
|
|
if not ref.endswith('^{}')]
|
|
|
|
return want
|
2009-04-24 02:26:10 +04:00
|
|
|
f, commit = self.git.object_store.add_pack()
|
|
|
|
try:
|
2009-11-29 18:24:34 +03:00
|
|
|
try:
|
|
|
|
return client.fetch_pack(path, determine_wants, graphwalker, f.write, self.ui.status)
|
|
|
|
except HangupException:
|
|
|
|
raise hgutil.Abort("the remote end hung up unexpectedly")
|
2009-06-05 14:56:22 +04:00
|
|
|
finally:
|
2009-07-03 00:58:51 +04:00
|
|
|
commit()
|
2009-04-24 02:26:10 +04:00
|
|
|
|
2009-06-16 16:39:11 +04:00
|
|
|
## REFERENCES HANDLING
|
|
|
|
|
|
|
|
def update_references(self):
|
2009-06-23 22:20:15 +04:00
|
|
|
heads = self.local_heads()
|
2009-06-16 16:39:11 +04:00
|
|
|
|
2009-06-23 22:20:15 +04:00
|
|
|
# Create a local Git branch name for each
|
|
|
|
# Mercurial bookmark.
|
|
|
|
for key in heads:
|
2009-07-31 01:09:53 +04:00
|
|
|
self.git.refs['refs/heads/' + key] = self.map_git_get(heads[key])
|
2009-06-16 16:39:11 +04:00
|
|
|
|
|
|
|
def export_hg_tags(self):
|
|
|
|
for tag, sha in self.repo.tags().iteritems():
|
2009-07-05 19:39:28 +04:00
|
|
|
if self.repo.tagtype(tag) in ('global', 'git'):
|
2009-07-03 00:58:51 +04:00
|
|
|
self.git.refs['refs/tags/' + tag] = self.map_git_get(hex(sha))
|
2009-07-07 14:46:19 +04:00
|
|
|
self.tags[tag] = hex(sha)
|
2009-06-16 16:39:11 +04:00
|
|
|
|
|
|
|
def local_heads(self):
|
2009-06-22 01:44:41 +04:00
|
|
|
try:
|
2009-12-26 21:22:06 +03:00
|
|
|
if getattr(bookmarks, 'parse', None):
|
|
|
|
bms = bookmarks.parse(self.repo)
|
|
|
|
else:
|
|
|
|
bms = self.repo._bookmarks
|
2009-07-31 01:09:53 +04:00
|
|
|
return dict([(bm, hex(bms[bm])) for bm in bms])
|
2009-07-07 20:46:14 +04:00
|
|
|
except AttributeError: #pragma: no cover
|
2009-06-22 01:44:41 +04:00
|
|
|
return {}
|
2009-06-16 16:39:11 +04:00
|
|
|
|
2009-06-19 01:38:09 +04:00
|
|
|
def import_tags(self, refs):
|
2009-05-28 02:58:07 +04:00
|
|
|
keys = refs.keys()
|
|
|
|
if not keys:
|
2009-06-19 01:38:09 +04:00
|
|
|
return
|
|
|
|
for k in keys[:]:
|
2009-05-28 02:58:07 +04:00
|
|
|
ref_name = k
|
|
|
|
parts = k.split('/')
|
2009-06-22 01:44:41 +04:00
|
|
|
if parts[0] == 'refs' and parts[1] == 'tags':
|
2009-05-28 02:58:07 +04:00
|
|
|
ref_name = "/".join([v for v in parts[2:]])
|
2009-08-01 20:55:54 +04:00
|
|
|
# refs contains all the refs in the server, not just
|
|
|
|
# the ones we are pulling
|
|
|
|
if refs[k] not in self.git.object_store:
|
|
|
|
continue
|
2009-05-28 02:58:07 +04:00
|
|
|
if ref_name[-3:] == '^{}':
|
|
|
|
ref_name = ref_name[:-3]
|
|
|
|
if not ref_name in self.repo.tags():
|
|
|
|
obj = self.git.get_object(refs[k])
|
|
|
|
sha = None
|
|
|
|
if isinstance (obj, Commit): # lightweight
|
|
|
|
sha = self.map_hg_get(refs[k])
|
2009-06-19 01:38:09 +04:00
|
|
|
self.tags[ref_name] = sha
|
|
|
|
elif isinstance (obj, Tag): # annotated
|
2009-05-28 02:58:07 +04:00
|
|
|
(obj_type, obj_sha) = obj.get_object()
|
|
|
|
obj = self.git.get_object(obj_sha)
|
2009-06-05 14:56:22 +04:00
|
|
|
if isinstance (obj, Commit):
|
2009-05-28 02:58:07 +04:00
|
|
|
sha = self.map_hg_get(obj_sha)
|
2009-06-19 01:38:09 +04:00
|
|
|
# TODO: better handling for annotated tags
|
|
|
|
self.tags[ref_name] = sha
|
|
|
|
self.save_tags()
|
2009-06-05 14:56:22 +04:00
|
|
|
|
2009-06-16 17:44:19 +04:00
|
|
|
def update_hg_bookmarks(self, refs):
|
2009-04-28 17:30:11 +04:00
|
|
|
try:
|
2009-12-26 21:22:06 +03:00
|
|
|
oldbm = getattr(bookmarks, 'parse', None)
|
|
|
|
if oldbm:
|
|
|
|
bms = bookmarks.parse(self.repo)
|
|
|
|
else:
|
|
|
|
bms = self.repo._bookmarks
|
2009-06-16 17:44:19 +04:00
|
|
|
heads = dict([(ref[11:],refs[ref]) for ref in refs
|
|
|
|
if ref.startswith('refs/heads/')])
|
2009-05-26 22:55:03 +04:00
|
|
|
|
|
|
|
for head, sha in heads.iteritems():
|
2009-08-01 20:55:54 +04:00
|
|
|
# refs contains all the refs in the server, not just
|
|
|
|
# the ones we are pulling
|
|
|
|
if sha not in self.git.object_store:
|
|
|
|
continue
|
2009-06-19 19:53:39 +04:00
|
|
|
hgsha = bin(self.map_hg_get(sha))
|
2009-06-16 17:44:19 +04:00
|
|
|
if not head in bms:
|
|
|
|
# new branch
|
|
|
|
bms[head] = hgsha
|
|
|
|
else:
|
|
|
|
bm = self.repo[bms[head]]
|
|
|
|
if bm.ancestor(self.repo[hgsha]) == bm:
|
|
|
|
# fast forward
|
|
|
|
bms[head] = hgsha
|
2009-05-26 22:55:03 +04:00
|
|
|
if heads:
|
2009-12-26 21:22:06 +03:00
|
|
|
if oldbm:
|
|
|
|
bookmarks.write(self.repo, bms)
|
|
|
|
else:
|
|
|
|
self.repo._bookmarks = bms
|
|
|
|
bookmarks.write(self.repo)
|
2009-05-28 04:19:11 +04:00
|
|
|
|
2009-04-28 17:30:11 +04:00
|
|
|
except AttributeError:
|
2009-05-10 05:04:19 +04:00
|
|
|
self.ui.warn(_('creating bookmarks failed, do you have'
|
|
|
|
' bookmarks enabled?\n'))
|
2009-04-30 03:18:37 +04:00
|
|
|
|
2009-06-16 17:44:19 +04:00
|
|
|
def update_remote_branches(self, remote_name, refs):
|
|
|
|
heads = dict([(ref[11:],refs[ref]) for ref in refs
|
|
|
|
if ref.startswith('refs/heads/')])
|
|
|
|
|
|
|
|
for head, sha in heads.iteritems():
|
2009-08-01 20:55:54 +04:00
|
|
|
# refs contains all the refs in the server, not just the ones
|
|
|
|
# we are pulling
|
|
|
|
if sha not in self.git.object_store:
|
|
|
|
continue
|
2009-06-19 19:53:39 +04:00
|
|
|
hgsha = bin(self.map_hg_get(sha))
|
2009-06-16 17:44:19 +04:00
|
|
|
tag = '%s/%s' % (remote_name, head)
|
|
|
|
self.repo.tag(tag, hgsha, '', True, None, None)
|
|
|
|
|
2009-07-03 00:58:51 +04:00
|
|
|
for ref_name in refs:
|
|
|
|
if ref_name.startswith('refs/heads'):
|
|
|
|
new_ref = 'refs/remotes/%s/%s' % (remote_name, ref_name[10:])
|
|
|
|
self.git.refs[new_ref] = refs[ref_name]
|
|
|
|
elif ref_name.startswith('refs/tags'):
|
|
|
|
self.git.refs[ref_name] = refs[ref_name]
|
2009-06-23 22:22:49 +04:00
|
|
|
|
|
|
|
|
2009-06-16 16:39:11 +04:00
|
|
|
## UTILITY FUNCTIONS
|
|
|
|
|
2009-04-29 22:50:56 +04:00
|
|
|
def convert_git_int_mode(self, mode):
|
2009-07-07 20:05:43 +04:00
|
|
|
# TODO: make these into constants
|
2009-04-29 22:50:56 +04:00
|
|
|
convert = {
|
2009-05-19 16:58:06 +04:00
|
|
|
0100644: '',
|
|
|
|
0100755: 'x',
|
|
|
|
0120000: 'l'}
|
2009-04-29 22:50:56 +04:00
|
|
|
if mode in convert:
|
|
|
|
return convert[mode]
|
|
|
|
return ''
|
2009-04-30 03:18:37 +04:00
|
|
|
|
2009-05-01 00:54:33 +04:00
|
|
|
def extract_hg_metadata(self, message):
|
2009-07-29 02:07:37 +04:00
|
|
|
split = message.split("\n--HG--\n", 1)
|
2009-05-01 00:54:33 +04:00
|
|
|
renames = {}
|
2009-05-30 00:10:14 +04:00
|
|
|
extra = {}
|
2009-05-08 02:07:18 +04:00
|
|
|
branch = False
|
2009-05-01 00:54:33 +04:00
|
|
|
if len(split) == 2:
|
|
|
|
message, meta = split
|
|
|
|
lines = meta.split("\n")
|
|
|
|
for line in lines:
|
|
|
|
if line == '':
|
2009-06-05 14:56:22 +04:00
|
|
|
continue
|
|
|
|
|
2009-05-01 00:54:33 +04:00
|
|
|
command, data = line.split(" : ", 1)
|
2009-06-05 14:56:22 +04:00
|
|
|
|
2009-05-01 00:54:33 +04:00
|
|
|
if command == 'rename':
|
|
|
|
before, after = data.split(" => ", 1)
|
|
|
|
renames[after] = before
|
|
|
|
if command == 'branch':
|
2009-05-08 02:07:18 +04:00
|
|
|
branch = data
|
2009-05-30 00:10:14 +04:00
|
|
|
if command == 'extra':
|
|
|
|
before, after = data.split(" : ", 1)
|
|
|
|
extra[before] = urllib.unquote(after)
|
2009-06-30 02:20:37 +04:00
|
|
|
return (message, renames, branch, extra)
|
2009-06-05 14:56:22 +04:00
|
|
|
|
2009-07-03 00:58:51 +04:00
|
|
|
def get_file(self, commit, f):
|
|
|
|
otree = self.git.tree(commit.tree)
|
|
|
|
parts = f.split('/')
|
|
|
|
for part in parts:
|
|
|
|
(mode, sha) = otree[part]
|
|
|
|
obj = self.git.get_object(sha)
|
|
|
|
if isinstance (obj, Blob):
|
|
|
|
return (mode, sha, obj._text)
|
|
|
|
elif isinstance(obj, Tree):
|
|
|
|
otree = obj
|
|
|
|
|
|
|
|
def get_files_changed(self, commit):
|
2009-12-25 10:56:20 +03:00
|
|
|
tree = commit.tree
|
|
|
|
btree = None
|
|
|
|
|
|
|
|
if commit.parents:
|
|
|
|
btree = self.git[commit.parents[0]].tree
|
|
|
|
|
|
|
|
changes = self.git.object_store.tree_changes(btree, tree)
|
|
|
|
files = {}
|
|
|
|
for (oldfile, newfile), (oldmode, newmode), (oldsha, newsha) in changes:
|
|
|
|
if newfile is None:
|
|
|
|
file = oldfile
|
|
|
|
delete = True
|
|
|
|
else:
|
|
|
|
file = newfile
|
|
|
|
delete = False
|
|
|
|
|
|
|
|
files[file] = (delete, newmode, newsha)
|
|
|
|
|
|
|
|
return files
|
2009-07-03 00:58:51 +04:00
|
|
|
|
2009-06-16 17:44:19 +04:00
|
|
|
def remote_name(self, remote):
|
|
|
|
names = [name for name, path in self.paths if path == remote]
|
|
|
|
if names:
|
|
|
|
return names[0]
|
|
|
|
|
2009-06-18 19:49:13 +04:00
|
|
|
# Stolen from hgsubversion
|
|
|
|
def swap_out_encoding(self, new_encoding='UTF-8'):
|
|
|
|
try:
|
|
|
|
from mercurial import encoding
|
|
|
|
old = encoding.encoding
|
|
|
|
encoding.encoding = new_encoding
|
|
|
|
except ImportError:
|
|
|
|
old = hgutil._encoding
|
|
|
|
hgutil._encoding = new_encoding
|
|
|
|
return old
|
|
|
|
|
|
|
|
def decode_guess(self, string, encoding):
|
|
|
|
# text is not valid utf-8, try to make sense of it
|
|
|
|
if encoding:
|
|
|
|
try:
|
|
|
|
return string.decode(encoding).encode('utf-8')
|
|
|
|
except UnicodeDecodeError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
try:
|
|
|
|
return string.decode('latin-1').encode('utf-8')
|
|
|
|
except UnicodeDecodeError:
|
|
|
|
return string.decode('ascii', 'replace').encode('utf-8')
|
|
|
|
|
2009-04-24 02:26:10 +04:00
|
|
|
def get_transport_and_path(self, uri):
|
|
|
|
from dulwich.client import TCPGitClient, SSHGitClient, SubprocessGitClient
|
2009-04-28 22:08:54 +04:00
|
|
|
for handler, transport in (("git://", TCPGitClient), ("git@", SSHGitClient), ("git+ssh://", SSHGitClient)):
|
2009-04-24 02:26:10 +04:00
|
|
|
if uri.startswith(handler):
|
2009-10-19 19:48:07 +04:00
|
|
|
# We need to split around : or /, whatever comes first
|
|
|
|
hostpath = uri[len(handler):]
|
|
|
|
if (hostpath.find(':') > 0 and hostpath.find('/') > 0):
|
|
|
|
# we have both, whatever is first wins.
|
|
|
|
if hostpath.find(':') < hostpath.find('/'):
|
|
|
|
hostpath_seper = ':'
|
|
|
|
else:
|
|
|
|
hostpath_seper = '/'
|
|
|
|
elif hostpath.find(':') > 0:
|
|
|
|
hostpath_seper = ':'
|
|
|
|
else:
|
|
|
|
hostpath_seper = '/'
|
|
|
|
|
|
|
|
host, path = hostpath.split(hostpath_seper, 1)
|
|
|
|
if hostpath_seper == '/':
|
|
|
|
transportpath = '/' + path
|
|
|
|
else:
|
|
|
|
transportpath = path
|
|
|
|
return transport(host, thin_packs=False), transportpath
|
2009-04-24 02:26:10 +04:00
|
|
|
# if its not git or git+ssh, try a local url..
|
2009-08-01 21:33:58 +04:00
|
|
|
return SubprocessGitClient(thin_packs=False), uri
|