2015-04-15 23:10:15 +03:00
|
|
|
import collections
|
|
|
|
import itertools
|
|
|
|
import os
|
|
|
|
import re
|
2018-12-07 03:07:27 +03:00
|
|
|
import shutil
|
2015-04-15 23:10:15 +03:00
|
|
|
import StringIO
|
|
|
|
|
2018-07-06 03:45:27 +03:00
|
|
|
import _ssh
|
|
|
|
import compat
|
|
|
|
import git2hg
|
|
|
|
import hg2git
|
|
|
|
import util
|
|
|
|
from dulwich import client, config as dul_config, diff_tree
|
|
|
|
from dulwich.errors import GitProtocolError, HangupException
|
2015-04-15 23:10:15 +03:00
|
|
|
from dulwich.objects import Blob, Commit, Tag, Tree, parse_timezone
|
2018-07-06 03:45:27 +03:00
|
|
|
from dulwich.pack import apply_delta, create_delta
|
2013-08-12 18:20:41 +04:00
|
|
|
from dulwich.repo import Repo, check_ref_format
|
2019-01-30 03:25:33 +03:00
|
|
|
from edenscm.mercurial import (
|
2015-12-31 23:25:00 +03:00
|
|
|
bookmarks,
|
|
|
|
commands,
|
|
|
|
context,
|
2016-01-01 02:49:17 +03:00
|
|
|
encoding,
|
2018-01-09 16:53:20 +03:00
|
|
|
error,
|
2015-05-16 01:02:24 +03:00
|
|
|
phases,
|
2018-03-21 23:45:33 +03:00
|
|
|
progress,
|
2018-01-09 17:08:01 +03:00
|
|
|
pycompat,
|
2015-12-31 23:25:00 +03:00
|
|
|
util as hgutil,
|
2017-10-26 19:20:00 +03:00
|
|
|
vfs as vfsmod,
|
2015-12-31 23:25:00 +03:00
|
|
|
)
|
2019-01-30 03:25:33 +03:00
|
|
|
from edenscm.mercurial.i18n import _
|
|
|
|
from edenscm.mercurial.node import bin, hex, nullid, nullrev
|
2019-02-02 04:48:29 +03:00
|
|
|
from edenscm.mercurial.rust.bindings import nodemap
|
2011-05-24 22:16:45 +04:00
|
|
|
from overlay import overlayrepo
|
2009-04-28 03:15:48 +04:00
|
|
|
|
2018-07-06 03:45:27 +03:00
|
|
|
|
2018-05-30 12:16:33 +03:00
|
|
|
RE_GIT_AUTHOR = re.compile("^(.*?) ?\<(.*?)(?:\>(.*))?$")
|
2012-09-22 06:28:46 +04:00
|
|
|
|
2018-05-30 12:16:33 +03:00
|
|
|
RE_GIT_SANITIZE_AUTHOR = re.compile("[<>\n]")
|
2012-09-22 06:36:57 +04:00
|
|
|
|
2018-05-30 12:16:33 +03:00
|
|
|
RE_GIT_AUTHOR_EXTRA = re.compile("^(.*?)\ ext:\((.*)\) <(.*)\>$")
|
2012-09-22 06:39:53 +04:00
|
|
|
|
2018-05-30 12:16:33 +03:00
|
|
|
RE_GIT_EXTRA_KEY = re.compile("GIT([0-9]*)-(.*)")
|
2014-09-01 01:01:07 +04:00
|
|
|
|
2012-09-22 06:32:16 +04:00
|
|
|
# Test for git:// and git+ssh:// URI.
|
|
|
|
# Support several URL forms, including separating the
|
|
|
|
# host and path with either a / or : (sepr)
|
|
|
|
RE_GIT_URI = re.compile(
|
2018-05-30 12:16:33 +03:00
|
|
|
r"^(?P<scheme>git([+]ssh)?://)(?P<host>.*?)(:(?P<port>\d+))?"
|
|
|
|
r"(?P<sepr>[:/])(?P<path>.*)$"
|
|
|
|
)
|
|
|
|
|
|
|
|
RE_NEWLINES = re.compile("[\r\n]")
|
|
|
|
RE_GIT_PROGRESS = re.compile("\((\d+)/(\d+)\)")
|
2012-09-22 06:32:16 +04:00
|
|
|
|
2018-05-30 12:16:33 +03:00
|
|
|
RE_AUTHOR_FILE = re.compile("\s*=\s*")
|
2012-09-22 06:42:24 +04:00
|
|
|
|
2018-05-30 12:16:33 +03:00
|
|
|
CALLBACK_BUFFER = ""
|
2012-09-22 06:43:50 +04:00
|
|
|
|
2016-01-12 03:28:28 +03:00
|
|
|
|
2011-06-16 10:40:12 +04:00
|
|
|
class GitProgress(object):
|
|
|
|
"""convert git server progress strings into mercurial progress"""
|
2018-05-30 12:16:33 +03:00
|
|
|
|
2018-03-21 23:45:33 +03:00
|
|
|
def __init__(self, ui, prog):
|
2011-06-16 10:40:12 +04:00
|
|
|
self.ui = ui
|
2018-03-21 23:45:33 +03:00
|
|
|
self.prog = prog
|
2011-06-16 10:40:12 +04:00
|
|
|
|
|
|
|
self.lasttopic = None
|
2018-05-30 12:16:33 +03:00
|
|
|
self.msgbuf = ""
|
2011-06-16 10:40:12 +04:00
|
|
|
|
|
|
|
def progress(self, msg):
|
|
|
|
# 'Counting objects: 33640, done.\n'
|
|
|
|
# 'Compressing objects: 0% (1/9955) \r
|
2012-09-22 06:42:24 +04:00
|
|
|
msgs = RE_NEWLINES.split(self.msgbuf + msg)
|
2011-06-16 10:40:12 +04:00
|
|
|
self.msgbuf = msgs.pop()
|
|
|
|
|
|
|
|
for msg in msgs:
|
2018-05-30 12:16:33 +03:00
|
|
|
td = msg.split(":", 1)
|
2011-06-16 10:40:12 +04:00
|
|
|
data = td.pop()
|
|
|
|
if not td:
|
2018-05-30 12:16:33 +03:00
|
|
|
self.ui.note(data + "\n")
|
2011-06-16 10:40:12 +04:00
|
|
|
continue
|
|
|
|
topic = td[0]
|
|
|
|
|
2012-09-22 06:42:24 +04:00
|
|
|
m = RE_GIT_PROGRESS.search(data)
|
2011-06-16 10:40:12 +04:00
|
|
|
if m:
|
|
|
|
pos, total = map(int, m.group(1, 2))
|
2018-03-21 23:45:33 +03:00
|
|
|
if topic != self.lasttopic:
|
|
|
|
self.prog.reset(topic, total=total)
|
|
|
|
self.lasttopic = topic
|
|
|
|
self.prog.value = pos
|
2011-06-16 10:40:12 +04:00
|
|
|
else:
|
2018-05-30 12:16:33 +03:00
|
|
|
self.ui.note(msg + "\n")
|
|
|
|
|
2011-06-16 10:40:12 +04:00
|
|
|
|
2009-04-24 02:26:10 +04:00
|
|
|
class GitHandler(object):
|
2018-05-30 12:16:33 +03:00
|
|
|
map_file = "git-mapfile"
|
|
|
|
remote_refs_file = "git-remote-refs"
|
|
|
|
tags_file = "git-tags"
|
2009-04-27 05:27:47 +04:00
|
|
|
|
2009-04-24 02:26:10 +04:00
|
|
|
def __init__(self, dest_repo, ui):
|
|
|
|
self.repo = dest_repo
|
|
|
|
self.ui = ui
|
2018-09-28 17:08:53 +03:00
|
|
|
self.vfs = self.repo.sharedvfs
|
2009-05-14 22:32:54 +04:00
|
|
|
|
2018-05-30 12:16:33 +03:00
|
|
|
if compat.config(ui, "bool", "git", "intree"):
|
|
|
|
self.gitdir = self.repo.wvfs.join(".git")
|
2009-05-14 22:32:54 +04:00
|
|
|
else:
|
2018-05-30 12:16:33 +03:00
|
|
|
self.gitdir = self.vfs.join("git")
|
2009-05-14 22:32:54 +04:00
|
|
|
|
2012-02-23 22:49:07 +04:00
|
|
|
self.init_author_file()
|
|
|
|
|
2018-05-30 12:16:33 +03:00
|
|
|
self.paths = ui.configitems("paths")
|
2009-05-25 06:20:44 +04:00
|
|
|
|
2017-10-24 22:22:19 +03:00
|
|
|
self.branch_bookmark_suffix = compat.config(
|
2018-05-30 12:16:33 +03:00
|
|
|
ui, "string", "git", "branch_bookmark_suffix"
|
|
|
|
)
|
2011-12-19 03:54:16 +04:00
|
|
|
|
2018-12-06 22:15:51 +03:00
|
|
|
self._map_real = None
|
2018-10-24 18:37:37 +03:00
|
|
|
self._map_hg_modifications = set()
|
2009-06-19 01:38:09 +04:00
|
|
|
self.load_tags()
|
2014-09-03 21:46:42 +04:00
|
|
|
self._remote_refs = None
|
2009-04-27 05:27:47 +04:00
|
|
|
|
2012-10-17 19:50:55 +04:00
|
|
|
@property
|
2018-12-06 22:15:51 +03:00
|
|
|
def _map(self):
|
|
|
|
if self._map_real is None:
|
2015-04-16 01:31:06 +03:00
|
|
|
self.load_map()
|
2018-12-06 22:15:51 +03:00
|
|
|
return self._map_real
|
2012-10-17 19:50:55 +04:00
|
|
|
|
2014-09-03 21:46:42 +04:00
|
|
|
@property
|
|
|
|
def remote_refs(self):
|
|
|
|
if self._remote_refs is None:
|
|
|
|
self.load_remote_refs()
|
|
|
|
return self._remote_refs
|
|
|
|
|
2014-02-26 07:51:02 +04:00
|
|
|
@hgutil.propertycache
|
|
|
|
def git(self):
|
2016-01-01 02:49:17 +03:00
|
|
|
# Dulwich is going to try and join unicode ref names against
|
|
|
|
# the repository path to try and read unpacked refs. This
|
|
|
|
# doesn't match hg's bytes-only view of filesystems, we just
|
|
|
|
# have to cope with that. As a workaround, try decoding our
|
|
|
|
# (bytes) path to the repo in hg's active encoding and hope
|
|
|
|
# for the best.
|
|
|
|
gitpath = self.gitdir.decode(encoding.encoding, encoding.encodingmode)
|
2014-02-26 07:51:02 +04:00
|
|
|
# make the git data directory
|
2009-10-16 00:45:15 +04:00
|
|
|
if os.path.exists(self.gitdir):
|
2016-01-01 02:49:17 +03:00
|
|
|
return Repo(gitpath)
|
2009-10-16 00:45:15 +04:00
|
|
|
else:
|
2018-04-13 20:43:19 +03:00
|
|
|
# Set disallowinitbare to prevent hggit from creating a .hg/git
|
|
|
|
# directory. This is useful when the .hg/git directory should be
|
|
|
|
# managed externally.
|
|
|
|
if self.ui.configbool("hggit", "disallowinitbare"):
|
|
|
|
raise error.Abort(_("missing .hg/git repo"))
|
2009-05-10 21:42:44 +04:00
|
|
|
os.mkdir(self.gitdir)
|
2016-01-01 02:49:17 +03:00
|
|
|
return Repo.init_bare(gitpath)
|
2014-02-26 07:51:02 +04:00
|
|
|
|
2012-02-23 22:49:07 +04:00
|
|
|
def init_author_file(self):
|
|
|
|
self.author_map = {}
|
2018-05-30 12:16:33 +03:00
|
|
|
authors_path = compat.config(self.ui, "string", "git", "authors")
|
2017-10-24 22:26:35 +03:00
|
|
|
if authors_path:
|
|
|
|
with open(self.repo.wvfs.join(authors_path)) as f:
|
2012-02-23 22:49:07 +04:00
|
|
|
for line in f:
|
|
|
|
line = line.strip()
|
2018-05-30 12:16:33 +03:00
|
|
|
if not line or line.startswith("#"):
|
2012-02-23 22:49:07 +04:00
|
|
|
continue
|
2012-09-22 06:43:50 +04:00
|
|
|
from_, to = RE_AUTHOR_FILE.split(line, 2)
|
2012-02-23 22:49:07 +04:00
|
|
|
self.author_map[from_] = to
|
|
|
|
|
2015-04-16 01:31:06 +03:00
|
|
|
# FILE LOAD AND SAVE METHODS
|
2009-04-27 03:25:04 +04:00
|
|
|
|
2009-04-27 23:26:44 +04:00
|
|
|
def map_set(self, gitsha, hgsha):
|
2018-12-06 22:15:51 +03:00
|
|
|
hgnode = bin(hgsha)
|
|
|
|
self._map.add(bin(gitsha), hgnode)
|
|
|
|
self._map_hg_modifications.add(hgnode)
|
2009-04-27 23:26:44 +04:00
|
|
|
|
|
|
|
def map_hg_get(self, gitsha):
|
2018-12-06 22:15:51 +03:00
|
|
|
node = self._map.lookupbyfirst(bin(gitsha))
|
|
|
|
if node is not None:
|
|
|
|
node = hex(node)
|
|
|
|
return node
|
2009-04-27 23:26:44 +04:00
|
|
|
|
|
|
|
def map_git_get(self, hgsha):
|
2018-12-06 22:15:51 +03:00
|
|
|
node = self._map.lookupbysecond(bin(hgsha))
|
|
|
|
if node is not None:
|
|
|
|
node = hex(node)
|
|
|
|
return node
|
2009-04-29 06:33:03 +04:00
|
|
|
|
2009-04-25 01:05:50 +04:00
|
|
|
def load_map(self):
|
2018-12-07 03:07:27 +03:00
|
|
|
if self.ui.configbool("hggit", "indexedlognodemap", False):
|
|
|
|
dir = self.vfs.join(self.map_file + "-log")
|
2018-12-06 22:15:51 +03:00
|
|
|
|
2018-12-07 03:07:27 +03:00
|
|
|
# Check for log existance before we instantiate it.
|
|
|
|
logexists = self.vfs.exists(dir)
|
|
|
|
|
2019-02-02 04:48:29 +03:00
|
|
|
self._map_real = nodemap.nodemap(dir)
|
2018-12-07 03:07:27 +03:00
|
|
|
|
|
|
|
# If the indexedlog map doesn't exist, populate it.
|
|
|
|
if not logexists and self.vfs.exists(self.map_file):
|
|
|
|
# Don't construct
|
|
|
|
oldmap = GitMap(self.vfs(self.map_file))
|
|
|
|
for gitsha, hgsha in oldmap.items():
|
|
|
|
self._map_real.add(gitsha, hgsha)
|
|
|
|
self._map_real.flush()
|
|
|
|
else:
|
|
|
|
content = []
|
|
|
|
if os.path.exists(self.vfs.join(self.map_file)):
|
|
|
|
content = self.vfs(self.map_file)
|
|
|
|
|
|
|
|
self._map_real = GitMap(content)
|
2009-04-27 05:27:47 +04:00
|
|
|
|
2015-04-10 06:24:10 +03:00
|
|
|
def save_map(self, map_file):
|
2017-07-24 23:07:44 +03:00
|
|
|
wlock = self.repo.wlock()
|
|
|
|
try:
|
2018-12-06 22:15:51 +03:00
|
|
|
map = self._map
|
2018-12-07 03:07:27 +03:00
|
|
|
|
|
|
|
if self.ui.configbool("hggit", "indexedlognodemap", False):
|
|
|
|
# If using index log, also write the flat map, so we can roll
|
|
|
|
# back easily.
|
|
|
|
self._map_real.flush()
|
|
|
|
|
|
|
|
file = self.vfs(map_file, "a+", atomictemp=True)
|
2018-01-09 16:53:20 +03:00
|
|
|
buf = hgutil.stringio()
|
2017-07-17 15:49:23 +03:00
|
|
|
bwrite = buf.write
|
2018-10-24 18:37:37 +03:00
|
|
|
# Append new entries to the end of the file so we can search
|
|
|
|
# backwards from the end for recently added entries.
|
2018-12-06 22:15:51 +03:00
|
|
|
for hgnode in self._map_hg_modifications:
|
|
|
|
gitnode = map.lookupbysecond(hgnode)
|
|
|
|
if gitnode is None:
|
|
|
|
raise KeyError(hex(hgnode))
|
|
|
|
bwrite("%s %s\n" % (hex(gitnode), hex(hgnode)))
|
2018-10-24 18:37:37 +03:00
|
|
|
self._map_hg_modifications.clear()
|
2017-07-17 15:49:23 +03:00
|
|
|
file.write(buf.getvalue())
|
|
|
|
buf.close()
|
|
|
|
# If this complains, atomictempfile no longer has close
|
|
|
|
file.close()
|
2017-07-24 23:07:44 +03:00
|
|
|
finally:
|
|
|
|
wlock.release()
|
2009-04-27 05:27:47 +04:00
|
|
|
|
2009-06-19 01:38:09 +04:00
|
|
|
def load_tags(self):
|
|
|
|
self.tags = {}
|
2017-10-26 19:20:00 +03:00
|
|
|
if os.path.exists(self.vfs.join(self.tags_file)):
|
|
|
|
for line in self.vfs(self.tags_file):
|
2018-05-30 12:16:33 +03:00
|
|
|
sha, name = line.strip().split(" ", 1)
|
2009-06-19 01:38:09 +04:00
|
|
|
self.tags[name] = sha
|
|
|
|
|
|
|
|
def save_tags(self):
|
2018-05-30 12:16:33 +03:00
|
|
|
file = self.vfs(self.tags_file, "w+", atomictemp=True)
|
2009-06-19 01:38:09 +04:00
|
|
|
for name, sha in sorted(self.tags.iteritems()):
|
2018-05-30 12:16:33 +03:00
|
|
|
if not self.repo.tagtype(name) == "global":
|
2009-07-07 14:46:19 +04:00
|
|
|
file.write("%s %s\n" % (sha, name))
|
2014-02-20 04:12:27 +04:00
|
|
|
# If this complains, atomictempfile no longer has close
|
|
|
|
file.close()
|
2009-06-19 01:38:09 +04:00
|
|
|
|
2014-09-03 21:46:42 +04:00
|
|
|
def load_remote_refs(self):
|
|
|
|
self._remote_refs = {}
|
2018-05-30 12:16:33 +03:00
|
|
|
refdir = os.path.join(self.git.path, "refs", "remotes")
|
2015-09-25 00:06:36 +03:00
|
|
|
|
|
|
|
paths = self.paths
|
|
|
|
# if paths are set, we should still check 'default'
|
|
|
|
if not paths:
|
2018-05-30 12:16:33 +03:00
|
|
|
paths = [("default", None)]
|
2015-09-25 00:06:36 +03:00
|
|
|
|
|
|
|
# we avoid using dulwich's refs method because it is incredibly slow;
|
|
|
|
# on a repo with a few hundred branches and a few thousand tags,
|
|
|
|
# dulwich took about 200ms
|
|
|
|
for p in paths:
|
|
|
|
remotedir = os.path.join(refdir, p[0])
|
|
|
|
for root, dirs, files in os.walk(remotedir):
|
|
|
|
for f in files:
|
|
|
|
try:
|
2018-05-30 12:16:33 +03:00
|
|
|
ref = root.replace(refdir + pycompat.ossep, "") + "/"
|
2015-09-25 00:06:36 +03:00
|
|
|
node = open(os.path.join(root, f)).read().strip()
|
2018-12-06 22:15:51 +03:00
|
|
|
hgsha = self._map.lookupbyfirst(bin(node))
|
|
|
|
if hgsha is None:
|
|
|
|
raise KeyError(hex(node))
|
|
|
|
self._remote_refs[ref + f] = hgsha
|
2015-09-25 00:06:36 +03:00
|
|
|
except (KeyError, IOError):
|
|
|
|
pass
|
2014-09-03 21:55:28 +04:00
|
|
|
|
2015-04-16 01:31:06 +03:00
|
|
|
# END FILE LOAD AND SAVE METHODS
|
2009-04-25 01:05:50 +04:00
|
|
|
|
2015-04-16 01:31:06 +03:00
|
|
|
# COMMANDS METHODS
|
2009-06-16 16:39:11 +04:00
|
|
|
|
2009-05-15 02:48:24 +04:00
|
|
|
def import_commits(self, remote_name):
|
2014-10-23 09:26:40 +04:00
|
|
|
refs = self.git.refs.as_dict()
|
2014-10-30 05:37:23 +03:00
|
|
|
filteredrefs = self.filter_min_date(refs)
|
|
|
|
self.import_git_objects(remote_name, filteredrefs)
|
2014-10-23 09:26:40 +04:00
|
|
|
self.update_hg_bookmarks(refs)
|
2015-04-10 06:24:10 +03:00
|
|
|
self.save_map(self.map_file)
|
2009-05-15 02:48:24 +04:00
|
|
|
|
2009-08-01 20:55:54 +04:00
|
|
|
def fetch(self, remote, heads):
|
|
|
|
refs = self.fetch_pack(remote, heads)
|
2009-06-16 17:44:19 +04:00
|
|
|
remote_name = self.remote_name(remote)
|
|
|
|
|
2016-01-09 01:02:20 +03:00
|
|
|
# if remote returns a symref for HEAD, then let's store that
|
|
|
|
rhead = None
|
|
|
|
rnode = None
|
2014-03-05 04:05:19 +04:00
|
|
|
oldheads = self.repo.changelog.heads()
|
|
|
|
imported = 0
|
2009-04-30 00:55:22 +04:00
|
|
|
if refs:
|
2014-10-30 05:37:10 +03:00
|
|
|
filteredrefs = self.filter_min_date(self.filter_refs(refs, heads))
|
2014-03-05 04:23:11 +04:00
|
|
|
imported = self.import_git_objects(remote_name, filteredrefs)
|
2009-06-19 01:38:09 +04:00
|
|
|
self.import_tags(refs)
|
2009-06-16 17:44:19 +04:00
|
|
|
self.update_hg_bookmarks(refs)
|
2016-01-09 01:02:20 +03:00
|
|
|
|
|
|
|
try:
|
2018-05-30 12:16:33 +03:00
|
|
|
symref = refs["HEAD"]
|
|
|
|
if symref.startswith("refs/heads"):
|
|
|
|
rhead = symref.replace("refs/heads/", "")
|
2016-01-09 01:02:20 +03:00
|
|
|
|
2018-05-30 12:16:33 +03:00
|
|
|
rnode = refs["refs/heads/%s" % rhead]
|
2018-12-06 22:15:51 +03:00
|
|
|
hgrnode = self._map.lookupbyfirst(bin(rnode))
|
|
|
|
if hgrnode is None:
|
|
|
|
raise KeyError(rnode)
|
|
|
|
rnode = self.repo[hgrnode].node()
|
2016-01-09 01:02:20 +03:00
|
|
|
except KeyError:
|
|
|
|
# if there is any error make sure to clear the variables
|
|
|
|
rhead = None
|
|
|
|
rnode = None
|
|
|
|
|
2009-06-16 17:44:19 +04:00
|
|
|
if remote_name:
|
|
|
|
self.update_remote_branches(remote_name, refs)
|
|
|
|
elif not self.paths:
|
|
|
|
# intial cloning
|
2018-05-30 12:16:33 +03:00
|
|
|
self.update_remote_branches("default", refs)
|
2011-05-15 21:24:25 +04:00
|
|
|
|
2011-05-11 08:13:18 +04:00
|
|
|
# "Activate" a tipmost bookmark.
|
2018-05-30 12:16:33 +03:00
|
|
|
bms = self.repo["tip"].bookmarks()
|
2015-05-16 00:02:55 +03:00
|
|
|
|
2016-01-09 01:02:20 +03:00
|
|
|
# override the 'tipmost' behavior if we know the remote HEAD
|
|
|
|
if rnode:
|
|
|
|
# make sure the bookmark exists; at the point the remote
|
|
|
|
# branches has already been set up
|
2018-05-30 12:16:33 +03:00
|
|
|
suffix = self.branch_bookmark_suffix or ""
|
2017-07-19 15:51:44 +03:00
|
|
|
changes = [(rhead + suffix, rnode)]
|
|
|
|
util.updatebookmarks(self.repo, changes)
|
2016-01-09 01:02:20 +03:00
|
|
|
bms = [rhead + suffix]
|
|
|
|
|
2011-05-11 08:13:18 +04:00
|
|
|
if bms:
|
2015-05-13 03:05:13 +03:00
|
|
|
try:
|
|
|
|
bookmarks.activate(self.repo, bms[0])
|
|
|
|
except AttributeError:
|
|
|
|
# hg < 3.5
|
|
|
|
bookmarks.setcurrent(self.repo, bms[0])
|
2011-05-19 02:12:32 +04:00
|
|
|
|
2015-04-10 06:24:10 +03:00
|
|
|
self.save_map(self.map_file)
|
2009-04-25 01:05:50 +04:00
|
|
|
|
2015-12-25 00:51:17 +03:00
|
|
|
# also mark public any branches the user specified
|
2018-05-30 12:16:33 +03:00
|
|
|
blist = [
|
|
|
|
self.repo[branch].node() for branch in self.ui.configlist("git", "public")
|
|
|
|
]
|
|
|
|
if rnode and self.ui.configbool("hggit", "usephases"):
|
2015-12-25 00:51:17 +03:00
|
|
|
blist.append(rnode)
|
|
|
|
|
|
|
|
if blist:
|
2015-05-16 01:02:24 +03:00
|
|
|
lock = self.repo.lock()
|
|
|
|
try:
|
|
|
|
tr = self.repo.transaction("phase")
|
|
|
|
try:
|
2018-05-30 12:16:33 +03:00
|
|
|
phases.advanceboundary(self.repo, tr, phases.public, blist)
|
2015-05-16 01:02:24 +03:00
|
|
|
except TypeError:
|
|
|
|
# hg < 3.2
|
2018-05-30 12:16:33 +03:00
|
|
|
phases.advanceboundary(self.repo, phases.public, blist)
|
2015-05-16 01:02:24 +03:00
|
|
|
tr.close()
|
|
|
|
finally:
|
|
|
|
if tr is not None:
|
|
|
|
tr.release()
|
|
|
|
lock.release()
|
|
|
|
|
2014-03-05 04:05:19 +04:00
|
|
|
if imported == 0:
|
|
|
|
return 0
|
|
|
|
|
|
|
|
# code taken from localrepo.py:addchangegroup
|
|
|
|
dh = 0
|
|
|
|
if oldheads:
|
|
|
|
heads = self.repo.changelog.heads()
|
|
|
|
dh = len(heads) - len(oldheads)
|
|
|
|
for h in heads:
|
|
|
|
if h not in oldheads and self.repo[h].closesbranch():
|
|
|
|
dh -= 1
|
|
|
|
|
|
|
|
if dh < 0:
|
|
|
|
return dh - 1
|
|
|
|
else:
|
|
|
|
return dh + 1
|
2011-05-19 02:12:32 +04:00
|
|
|
|
2009-08-04 15:30:16 +04:00
|
|
|
def export_commits(self):
|
|
|
|
try:
|
2009-06-03 20:55:45 +04:00
|
|
|
self.export_git_objects()
|
2009-08-04 15:30:16 +04:00
|
|
|
self.export_hg_tags()
|
|
|
|
self.update_references()
|
|
|
|
finally:
|
2015-04-10 06:24:10 +03:00
|
|
|
self.save_map(self.map_file)
|
2009-04-27 05:27:47 +04:00
|
|
|
|
2009-07-31 21:15:02 +04:00
|
|
|
def get_refs(self, remote):
|
|
|
|
self.export_commits()
|
|
|
|
client, path = self.get_transport_and_path(remote)
|
|
|
|
old_refs = {}
|
|
|
|
new_refs = {}
|
2015-04-16 01:31:06 +03:00
|
|
|
|
2009-07-31 21:15:02 +04:00
|
|
|
def changed(refs):
|
|
|
|
old_refs.update(refs)
|
2014-10-29 22:30:23 +03:00
|
|
|
exportable = self.get_exportable()
|
|
|
|
new_refs.update(self.get_changed_refs(refs, exportable, True))
|
2015-04-16 01:31:06 +03:00
|
|
|
return refs # always return the same refs to make the send a no-op
|
2009-07-31 21:15:02 +04:00
|
|
|
|
|
|
|
try:
|
2012-09-28 06:32:01 +04:00
|
|
|
client.send_pack(path, changed, lambda have, want: [])
|
2009-08-08 14:14:49 +04:00
|
|
|
|
2018-05-30 12:16:33 +03:00
|
|
|
changed_refs = [
|
|
|
|
ref for ref, sha in new_refs.iteritems() if sha != old_refs.get(ref)
|
|
|
|
]
|
2009-08-08 14:14:49 +04:00
|
|
|
new = [bin(self.map_hg_get(new_refs[ref])) for ref in changed_refs]
|
2011-07-14 09:15:04 +04:00
|
|
|
old = {}
|
|
|
|
for r in old_refs:
|
|
|
|
old_ref = self.map_hg_get(old_refs[r])
|
|
|
|
if old_ref:
|
|
|
|
old[bin(old_ref)] = 1
|
2009-07-31 21:15:02 +04:00
|
|
|
|
|
|
|
return old, new
|
2018-01-09 16:53:20 +03:00
|
|
|
except (HangupException, GitProtocolError) as e:
|
|
|
|
raise error.Abort(_("git remote error: ") + str(e))
|
2009-07-31 21:15:02 +04:00
|
|
|
|
2009-07-31 01:09:53 +04:00
|
|
|
def push(self, remote, revs, force):
|
2009-07-23 11:47:23 +04:00
|
|
|
self.export_commits()
|
2012-09-06 07:27:31 +04:00
|
|
|
old_refs, new_refs = self.upload_pack(remote, revs, force)
|
2009-06-23 22:22:49 +04:00
|
|
|
remote_name = self.remote_name(remote)
|
|
|
|
|
2012-09-06 07:27:31 +04:00
|
|
|
if remote_name and new_refs:
|
2013-12-13 22:02:08 +04:00
|
|
|
for ref, new_sha in sorted(new_refs.iteritems()):
|
2013-01-06 11:31:37 +04:00
|
|
|
old_sha = old_refs.get(ref)
|
|
|
|
if old_sha is None:
|
|
|
|
if self.ui.verbose:
|
2018-05-30 12:16:33 +03:00
|
|
|
self.ui.note(
|
|
|
|
_("adding reference %s::%s => GIT:%s\n")
|
|
|
|
% (remote_name, ref, new_sha[0:8])
|
|
|
|
)
|
2013-01-06 11:31:37 +04:00
|
|
|
else:
|
2018-01-09 16:53:20 +03:00
|
|
|
self.ui.status(_("adding reference %s\n") % ref)
|
2013-01-06 11:31:37 +04:00
|
|
|
elif new_sha != old_sha:
|
|
|
|
if self.ui.verbose:
|
2018-01-09 16:53:20 +03:00
|
|
|
self.ui.note(
|
2018-05-30 12:16:33 +03:00
|
|
|
_("updating reference %s::%s => GIT:%s\n")
|
|
|
|
% (remote_name, ref, new_sha[0:8])
|
|
|
|
)
|
2013-01-06 11:31:37 +04:00
|
|
|
else:
|
2018-01-09 16:53:20 +03:00
|
|
|
self.ui.status(_("updating reference %s\n") % ref)
|
2012-10-26 05:47:36 +04:00
|
|
|
else:
|
2018-05-30 12:16:33 +03:00
|
|
|
self.ui.debug(
|
|
|
|
"unchanged reference %s::%s => GIT:%s\n"
|
|
|
|
% (remote_name, ref, new_sha[0:8])
|
|
|
|
)
|
2009-06-23 22:22:49 +04:00
|
|
|
|
2012-09-06 07:27:31 +04:00
|
|
|
self.update_remote_branches(remote_name, new_refs)
|
|
|
|
if old_refs == new_refs:
|
2012-09-06 07:27:31 +04:00
|
|
|
self.ui.status(_("no changes found\n"))
|
2012-09-06 07:27:31 +04:00
|
|
|
ret = None
|
|
|
|
elif len(new_refs) > len(old_refs):
|
|
|
|
ret = 1 + (len(new_refs) - len(old_refs))
|
|
|
|
elif len(old_refs) > len(new_refs):
|
|
|
|
ret = -1 - (len(new_refs) - len(old_refs))
|
|
|
|
else:
|
|
|
|
ret = 1
|
|
|
|
return ret
|
2009-06-23 22:22:49 +04:00
|
|
|
|
2009-06-16 16:39:11 +04:00
|
|
|
def clear(self):
|
2017-10-26 19:20:00 +03:00
|
|
|
mapfile = self.vfs.join(self.map_file)
|
2018-12-07 03:07:27 +03:00
|
|
|
if self.ui.configbool("hggit", "indexedlognodemap", False):
|
|
|
|
shutil.rmtree(mapfile + "-log")
|
|
|
|
|
2009-06-16 16:39:11 +04:00
|
|
|
if os.path.exists(self.gitdir):
|
|
|
|
for root, dirs, files in os.walk(self.gitdir, topdown=False):
|
|
|
|
for name in files:
|
|
|
|
os.remove(os.path.join(root, name))
|
|
|
|
for name in dirs:
|
|
|
|
os.rmdir(os.path.join(root, name))
|
|
|
|
os.rmdir(self.gitdir)
|
|
|
|
if os.path.exists(mapfile):
|
|
|
|
os.remove(mapfile)
|
2009-05-18 03:42:34 +04:00
|
|
|
|
2011-05-24 22:16:45 +04:00
|
|
|
# incoming support
|
|
|
|
def getremotechanges(self, remote, revs):
|
|
|
|
self.export_commits()
|
|
|
|
refs = self.fetch_pack(remote.path, revs)
|
|
|
|
|
|
|
|
# refs contains all remote refs. Prune to only those requested.
|
|
|
|
if revs:
|
|
|
|
reqrefs = {}
|
|
|
|
for rev in revs:
|
2018-05-30 12:16:33 +03:00
|
|
|
for n in ("refs/heads/" + rev, "refs/tags/" + rev):
|
2011-05-24 22:16:45 +04:00
|
|
|
if n in refs:
|
|
|
|
reqrefs[n] = refs[n]
|
|
|
|
else:
|
|
|
|
reqrefs = refs
|
|
|
|
|
2014-10-15 03:35:37 +04:00
|
|
|
commits = [bin(c) for c in self.get_git_incoming(reqrefs).commits]
|
2011-05-24 22:16:45 +04:00
|
|
|
|
|
|
|
b = overlayrepo(self, commits, refs)
|
|
|
|
|
|
|
|
return (b, commits, lambda: None)
|
|
|
|
|
2015-04-16 01:31:06 +03:00
|
|
|
# CHANGESET CONVERSION METHODS
|
2009-05-18 02:29:18 +04:00
|
|
|
|
2009-04-27 23:26:44 +04:00
|
|
|
def export_git_objects(self):
|
2016-09-05 13:03:24 +03:00
|
|
|
self.ui.note(_("finding hg commits to export\n"))
|
2014-11-30 11:26:24 +03:00
|
|
|
repo = self.repo
|
|
|
|
clnode = repo.changelog.node
|
2018-10-26 21:23:52 +03:00
|
|
|
clparents = repo.changelog.parentrevs
|
|
|
|
|
|
|
|
# Starting at the heads, walk back and find any commits that aren't in
|
|
|
|
# the git/hg mapping.
|
|
|
|
pending = []
|
|
|
|
if len(repo) > 0:
|
|
|
|
# repo.heads() returns the nullrev in an empty repo
|
|
|
|
pending = list(repo.changelog.headrevs())
|
|
|
|
exportrevs = set()
|
|
|
|
while pending:
|
|
|
|
rev = pending.pop()
|
|
|
|
node = clnode(rev)
|
2018-12-06 22:15:51 +03:00
|
|
|
if self._map.lookupbysecond(node) is None:
|
2018-10-26 21:23:52 +03:00
|
|
|
exportrevs.add(rev)
|
|
|
|
for parentrev in clparents(rev):
|
|
|
|
if parentrev != nullrev and parentrev not in exportrevs:
|
|
|
|
pending.append(parentrev)
|
|
|
|
# Sorting here is important, because the below code expects to process
|
|
|
|
# these in topological order.
|
|
|
|
to_export = list(repo[r] for r in sorted(exportrevs))
|
2016-09-05 13:03:24 +03:00
|
|
|
|
2018-12-06 22:15:51 +03:00
|
|
|
todo_total = len(exportrevs)
|
2016-09-05 13:03:24 +03:00
|
|
|
pos = 0
|
|
|
|
export = []
|
2018-05-30 12:16:33 +03:00
|
|
|
with progress.bar(
|
|
|
|
repo.ui, "find commits to export", "commits", todo_total
|
|
|
|
) as prog:
|
2018-03-21 23:45:33 +03:00
|
|
|
for ctx in to_export:
|
|
|
|
item = hex(ctx.node())
|
|
|
|
pos += 1
|
|
|
|
prog.value = (pos, item)
|
2018-05-30 12:16:33 +03:00
|
|
|
if ctx.extra().get("hg-git", None) != "octopus":
|
2018-03-21 23:45:33 +03:00
|
|
|
export.append(ctx)
|
2016-09-05 13:03:24 +03:00
|
|
|
|
2009-06-21 14:07:44 +04:00
|
|
|
total = len(export)
|
2014-03-15 06:18:19 +04:00
|
|
|
if not total:
|
|
|
|
return
|
|
|
|
|
|
|
|
self.ui.note(_("exporting hg objects to git\n"))
|
2013-03-20 09:44:01 +04:00
|
|
|
|
|
|
|
# By only exporting deltas, the assertion is that all previous objects
|
|
|
|
# for all other changesets are already present in the Git repository.
|
2014-03-15 07:45:09 +04:00
|
|
|
# This assertion is necessary to prevent redundant work. Here, nodes,
|
|
|
|
# and therefore export, is in topological order. By definition,
|
|
|
|
# export[0]'s parents must be present in Git, so we start the
|
|
|
|
# incremental exporter from there.
|
2014-11-30 11:26:24 +03:00
|
|
|
pctx = export[0].p1()
|
2014-03-15 07:45:09 +04:00
|
|
|
pnode = pctx.node()
|
|
|
|
if pnode == nullid:
|
|
|
|
gitcommit = None
|
|
|
|
else:
|
2018-12-06 22:15:51 +03:00
|
|
|
gitnode = self._map.lookupbysecond(pnode)
|
|
|
|
if gitnode is None:
|
|
|
|
raise KeyError(hex(pnode))
|
|
|
|
gitsha = hex(gitnode)
|
2014-03-15 07:45:09 +04:00
|
|
|
try:
|
|
|
|
gitcommit = self.git[gitsha]
|
|
|
|
except KeyError:
|
2018-05-30 12:16:33 +03:00
|
|
|
raise error.Abort(
|
|
|
|
_("Parent SHA-1 not present in Git " "repo: %s") % gitsha
|
|
|
|
)
|
2014-03-15 07:45:09 +04:00
|
|
|
|
|
|
|
exporter = hg2git.IncrementalChangesetExporter(
|
2018-05-30 12:16:33 +03:00
|
|
|
self.repo, pctx, self.git.object_store, gitcommit
|
|
|
|
)
|
2013-03-20 09:44:01 +04:00
|
|
|
|
2018-05-30 12:16:33 +03:00
|
|
|
mapsavefreq = compat.config(self.ui, "int", "hggit", "mapsavefrequency")
|
|
|
|
with progress.bar(self.ui, _("exporting"), total=total) as prog:
|
2018-03-21 23:45:33 +03:00
|
|
|
for i, ctx in enumerate(export):
|
|
|
|
prog.value = i
|
|
|
|
self.export_hg_commit(ctx.node(), exporter)
|
|
|
|
if mapsavefreq and i % mapsavefreq == 0:
|
|
|
|
self.ui.debug("saving mapfile\n")
|
|
|
|
self.save_map(self.map_file)
|
2010-02-25 06:08:38 +03:00
|
|
|
|
2016-09-05 13:04:24 +03:00
|
|
|
def set_commiter_from_author(self, commit):
|
|
|
|
commit.committer = commit.author
|
|
|
|
commit.commit_time = commit.author_time
|
|
|
|
commit.commit_timezone = commit.author_timezone
|
|
|
|
|
2009-04-28 03:15:48 +04:00
|
|
|
# convert this commit into git objects
|
|
|
|
# go through the manifest, convert all blobs/trees we don't have
|
|
|
|
# write the commit object (with metadata info)
|
2013-03-20 09:44:01 +04:00
|
|
|
def export_hg_commit(self, rev, exporter):
|
2010-03-21 19:36:39 +03:00
|
|
|
self.ui.note(_("converting revision %s\n") % hex(rev))
|
2009-04-29 06:33:03 +04:00
|
|
|
|
2009-06-29 18:28:25 +04:00
|
|
|
oldenc = self.swap_out_encoding()
|
|
|
|
|
2009-05-28 04:14:41 +04:00
|
|
|
ctx = self.repo.changectx(rev)
|
|
|
|
extra = ctx.extra()
|
|
|
|
|
2009-08-04 16:19:30 +04:00
|
|
|
commit = Commit()
|
|
|
|
|
|
|
|
(time, timezone) = ctx.date()
|
2013-02-05 20:25:37 +04:00
|
|
|
# work around to bad timezone offets - dulwich does not handle
|
|
|
|
# sub minute based timezones. In the one known case, it was a
|
|
|
|
# manual edit that led to the unusual value. Based on that,
|
|
|
|
# there is no reason to round one way or the other, so do the
|
|
|
|
# simplest and round down.
|
2018-05-30 12:16:33 +03:00
|
|
|
timezone -= timezone % 60
|
2009-08-04 16:19:30 +04:00
|
|
|
commit.author = self.get_git_author(ctx)
|
|
|
|
commit.author_time = int(time)
|
|
|
|
commit.author_timezone = -timezone
|
|
|
|
|
2018-05-30 12:16:33 +03:00
|
|
|
if "committer" in extra:
|
2016-09-05 13:04:24 +03:00
|
|
|
try:
|
|
|
|
# fixup timezone
|
2018-05-30 12:16:33 +03:00
|
|
|
(name, timestamp, timezone) = extra["committer"].rsplit(" ", 2)
|
2016-09-05 13:04:24 +03:00
|
|
|
commit.committer = name
|
|
|
|
commit.commit_time = timestamp
|
|
|
|
|
|
|
|
# work around a timezone format change
|
|
|
|
if int(timezone) % 60 != 0: # pragma: no cover
|
|
|
|
timezone = parse_timezone(timezone)
|
|
|
|
# Newer versions of Dulwich return a tuple here
|
|
|
|
if isinstance(timezone, tuple):
|
|
|
|
timezone, neg_utc = timezone
|
|
|
|
commit._commit_timezone_neg_utc = neg_utc
|
|
|
|
else:
|
|
|
|
timezone = -int(timezone)
|
|
|
|
commit.commit_timezone = timezone
|
2018-05-30 12:16:33 +03:00
|
|
|
except Exception: # extra is essentially user-supplied; be careful
|
2016-09-05 13:04:24 +03:00
|
|
|
self.set_commiter_from_author(commit)
|
2009-05-28 04:14:41 +04:00
|
|
|
else:
|
2016-09-05 13:04:24 +03:00
|
|
|
self.set_commiter_from_author(commit)
|
2009-05-28 04:14:41 +04:00
|
|
|
|
2009-08-04 16:19:30 +04:00
|
|
|
commit.parents = []
|
|
|
|
for parent in self.get_git_parents(ctx):
|
|
|
|
hgsha = hex(parent.node())
|
|
|
|
git_sha = self.map_git_get(hgsha)
|
|
|
|
if git_sha:
|
2012-09-22 07:26:26 +04:00
|
|
|
if git_sha not in self.git.object_store:
|
2018-05-30 12:16:33 +03:00
|
|
|
raise error.Abort(
|
|
|
|
_("Parent SHA-1 not present in Git " "repo: %s") % git_sha
|
|
|
|
)
|
2012-09-22 07:26:26 +04:00
|
|
|
|
2009-08-04 16:19:30 +04:00
|
|
|
commit.parents.append(git_sha)
|
2009-04-29 06:33:03 +04:00
|
|
|
|
2014-09-01 01:01:07 +04:00
|
|
|
commit.message, extra = self.get_git_message_and_extra(ctx)
|
|
|
|
commit.extra.extend(extra)
|
2009-06-05 14:56:22 +04:00
|
|
|
|
2018-05-30 12:16:33 +03:00
|
|
|
if "encoding" in extra:
|
|
|
|
commit.encoding = extra["encoding"]
|
2009-08-04 16:19:30 +04:00
|
|
|
|
2013-03-20 09:44:01 +04:00
|
|
|
for obj, nodeid in exporter.update_changeset(ctx):
|
2018-10-26 21:23:52 +03:00
|
|
|
# In theory we should check if the object exists before adding it,
|
|
|
|
# but in practice it's unlikely to exist, and scanning all the packs
|
|
|
|
# to determine that is expensive.
|
|
|
|
self.git.object_store.add_object(obj)
|
2013-03-20 09:44:01 +04:00
|
|
|
|
|
|
|
tree_sha = exporter.root_tree_sha
|
|
|
|
|
2012-09-22 07:26:26 +04:00
|
|
|
if tree_sha not in self.git.object_store:
|
2018-05-30 12:16:33 +03:00
|
|
|
raise error.Abort(_("Tree SHA-1 not present in Git repo: %s") % tree_sha)
|
2012-09-22 07:26:26 +04:00
|
|
|
|
2009-08-04 16:19:30 +04:00
|
|
|
commit.tree = tree_sha
|
|
|
|
|
2014-03-04 10:44:09 +04:00
|
|
|
if commit.id not in self.git.object_store:
|
|
|
|
self.git.object_store.add_object(commit)
|
2009-08-04 16:19:30 +04:00
|
|
|
self.map_set(commit.id, ctx.hex())
|
|
|
|
|
|
|
|
self.swap_out_encoding(oldenc)
|
|
|
|
return commit.id
|
2009-04-30 23:46:54 +04:00
|
|
|
|
2011-09-10 01:12:49 +04:00
|
|
|
def get_valid_git_username_email(self, name):
|
2012-02-15 05:30:06 +04:00
|
|
|
r"""Sanitize usernames and emails to fit git's restrictions.
|
|
|
|
|
|
|
|
The following is taken from the man page of git's fast-import
|
|
|
|
command:
|
|
|
|
|
|
|
|
[...] Likewise LF means one (and only one) linefeed [...]
|
|
|
|
|
|
|
|
committer
|
|
|
|
The committer command indicates who made this commit,
|
|
|
|
and when they made it.
|
|
|
|
|
|
|
|
Here <name> is the person's display name (for example
|
|
|
|
"Com M Itter") and <email> is the person's email address
|
|
|
|
("cm@example.com[1]"). LT and GT are the literal
|
|
|
|
less-than (\x3c) and greater-than (\x3e) symbols. These
|
|
|
|
are required to delimit the email address from the other
|
|
|
|
fields in the line. Note that <name> and <email> are
|
|
|
|
free-form and may contain any sequence of bytes, except
|
|
|
|
LT, GT and LF. <name> is typically UTF-8 encoded.
|
|
|
|
|
|
|
|
Accordingly, this function makes sure that there are none of the
|
|
|
|
characters <, >, or \n in any string which will be used for
|
|
|
|
a git username or email. Before this, it first removes left
|
|
|
|
angle brackets and spaces from the beginning, and right angle
|
|
|
|
brackets and spaces from the end, of this string, to convert
|
|
|
|
such things as " <john@doe.com> " to "john@doe.com" for
|
|
|
|
convenience.
|
|
|
|
|
|
|
|
TESTS:
|
|
|
|
|
2017-08-06 01:03:33 +03:00
|
|
|
>>> from collections import namedtuple
|
2019-01-30 03:25:33 +03:00
|
|
|
>>> from edenscm.mercurial.ui import ui
|
2018-09-28 17:08:53 +03:00
|
|
|
>>> mockrepo = namedtuple('localrepo', ['sharedvfs'])
|
|
|
|
>>> mockrepo.sharedvfs = ''
|
2017-08-06 01:03:33 +03:00
|
|
|
>>> g = GitHandler(mockrepo, ui()).get_valid_git_username_email
|
2012-02-15 05:30:06 +04:00
|
|
|
>>> g('John Doe')
|
|
|
|
'John Doe'
|
|
|
|
>>> g('john@doe.com')
|
|
|
|
'john@doe.com'
|
|
|
|
>>> g(' <john@doe.com> ')
|
|
|
|
'john@doe.com'
|
|
|
|
>>> g(' <random<\n<garbage\n> > > ')
|
|
|
|
'random???garbage?'
|
|
|
|
>>> g('Typo in hgrc >but.hg-git@handles.it.gracefully>')
|
|
|
|
'Typo in hgrc ?but.hg-git@handles.it.gracefully'
|
|
|
|
"""
|
2018-05-30 12:16:33 +03:00
|
|
|
return RE_GIT_SANITIZE_AUTHOR.sub("?", name.lstrip("< ").rstrip("> "))
|
2011-09-10 01:12:49 +04:00
|
|
|
|
2009-08-04 16:19:30 +04:00
|
|
|
def get_git_author(self, ctx):
|
2009-07-25 00:23:35 +04:00
|
|
|
# hg authors might not have emails
|
|
|
|
author = ctx.user()
|
|
|
|
|
2012-02-23 22:49:07 +04:00
|
|
|
# see if a translation exists
|
2012-09-22 06:28:46 +04:00
|
|
|
author = self.author_map.get(author, author)
|
2012-02-23 22:49:07 +04:00
|
|
|
|
2009-07-25 00:23:35 +04:00
|
|
|
# check for git author pattern compliance
|
2012-09-22 06:28:46 +04:00
|
|
|
a = RE_GIT_AUTHOR.match(author)
|
2009-07-25 00:23:35 +04:00
|
|
|
|
|
|
|
if a:
|
2012-02-15 05:30:06 +04:00
|
|
|
name = self.get_valid_git_username_email(a.group(1))
|
|
|
|
email = self.get_valid_git_username_email(a.group(2))
|
2018-01-09 16:53:20 +03:00
|
|
|
if a.group(3) is not None and len(a.group(3)) != 0:
|
2018-05-30 12:16:33 +03:00
|
|
|
name += " ext:(" + hgutil.urlreq.quote(a.group(3)) + ")"
|
|
|
|
author = "%s <%s>" % (
|
|
|
|
self.get_valid_git_username_email(name),
|
|
|
|
self.get_valid_git_username_email(email),
|
|
|
|
)
|
|
|
|
elif "@" in author:
|
|
|
|
author = "%s <%s>" % (
|
|
|
|
self.get_valid_git_username_email(author),
|
|
|
|
self.get_valid_git_username_email(author),
|
|
|
|
)
|
2009-06-03 06:33:48 +04:00
|
|
|
else:
|
2018-05-30 12:16:33 +03:00
|
|
|
author = self.get_valid_git_username_email(author) + " <none@none>"
|
2009-07-25 00:23:35 +04:00
|
|
|
|
2018-05-30 12:16:33 +03:00
|
|
|
if "author" in ctx.extra():
|
|
|
|
author = "".join(apply_delta(author, ctx.extra()["author"]))
|
2009-06-18 19:49:13 +04:00
|
|
|
|
2009-08-04 16:19:30 +04:00
|
|
|
return author
|
2009-08-02 22:53:08 +04:00
|
|
|
|
2009-08-04 16:19:30 +04:00
|
|
|
def get_git_parents(self, ctx):
|
|
|
|
def is_octopus_part(ctx):
|
2018-05-30 12:16:33 +03:00
|
|
|
olist = ("octopus", "octopus-done")
|
|
|
|
return ctx.extra().get("hg-git", None) in olist
|
2009-06-18 19:49:13 +04:00
|
|
|
|
2009-08-04 16:19:30 +04:00
|
|
|
parents = []
|
2018-05-30 12:16:33 +03:00
|
|
|
if ctx.extra().get("hg-git", None) == "octopus-done":
|
2009-08-04 16:19:30 +04:00
|
|
|
# implode octopus parents
|
|
|
|
part = ctx
|
|
|
|
while is_octopus_part(part):
|
|
|
|
(p1, p2) = part.parents()
|
2018-05-30 12:16:33 +03:00
|
|
|
assert ctx.extra().get("hg-git", None) != "octopus"
|
2009-08-04 16:19:30 +04:00
|
|
|
parents.append(p1)
|
|
|
|
part = p2
|
|
|
|
parents.append(p2)
|
|
|
|
else:
|
|
|
|
parents = ctx.parents()
|
2009-04-30 03:18:37 +04:00
|
|
|
|
2009-08-04 16:19:30 +04:00
|
|
|
return parents
|
2009-08-04 15:32:01 +04:00
|
|
|
|
2014-09-01 01:01:07 +04:00
|
|
|
def get_git_message_and_extra(self, ctx):
|
2009-08-04 16:19:30 +04:00
|
|
|
extra = ctx.extra()
|
2009-07-03 00:58:51 +04:00
|
|
|
|
2009-08-04 16:19:30 +04:00
|
|
|
message = ctx.description() + "\n"
|
2018-05-30 12:16:33 +03:00
|
|
|
if "message" in extra:
|
|
|
|
message = "".join(apply_delta(message, extra["message"]))
|
2009-05-09 07:57:02 +04:00
|
|
|
|
2009-04-30 00:26:13 +04:00
|
|
|
# HG EXTRA INFORMATION
|
2014-09-01 01:01:07 +04:00
|
|
|
|
|
|
|
# test only -- do not document this!
|
2018-05-30 12:16:33 +03:00
|
|
|
extra_in_message = compat.config(self.ui, "bool", "git", "debugextrainmessage")
|
|
|
|
extra_message = ""
|
2014-09-01 01:01:07 +04:00
|
|
|
git_extra = []
|
2018-05-30 12:16:33 +03:00
|
|
|
if ctx.branch() != "default":
|
2014-09-01 01:01:07 +04:00
|
|
|
# we always store the branch in the extra message
|
2009-04-30 23:55:56 +04:00
|
|
|
extra_message += "branch : " + ctx.branch() + "\n"
|
2009-04-30 03:18:37 +04:00
|
|
|
|
2014-09-01 01:01:07 +04:00
|
|
|
# Git native extra items always come first, followed by hg renames,
|
|
|
|
# followed by hg extra keys
|
|
|
|
git_extraitems = []
|
|
|
|
for key, value in extra.items():
|
|
|
|
m = RE_GIT_EXTRA_KEY.match(key)
|
|
|
|
if m is not None:
|
|
|
|
git_extraitems.append((int(m.group(1)), m.group(2), value))
|
|
|
|
del extra[key]
|
|
|
|
|
|
|
|
git_extraitems.sort()
|
|
|
|
for i, field, value in git_extraitems:
|
2018-05-30 12:16:33 +03:00
|
|
|
git_extra.append(
|
|
|
|
(hgutil.urlreq.unquote(field), hgutil.urlreq.unquote(value))
|
|
|
|
)
|
2014-09-01 01:01:07 +04:00
|
|
|
|
2018-05-30 12:16:33 +03:00
|
|
|
if extra.get("hg-git-rename-source", None) != "git":
|
2014-12-03 01:17:09 +03:00
|
|
|
renames = []
|
|
|
|
for f in ctx.files():
|
|
|
|
if f not in ctx.manifest():
|
|
|
|
continue
|
|
|
|
rename = ctx.filectx(f).renamed()
|
|
|
|
if rename:
|
|
|
|
renames.append((rename[0], f))
|
|
|
|
|
|
|
|
if renames:
|
|
|
|
for oldfile, newfile in renames:
|
|
|
|
if extra_in_message:
|
2018-05-30 12:16:33 +03:00
|
|
|
extra_message += "rename : " + oldfile + " => " + newfile + "\n"
|
2014-12-03 01:17:09 +03:00
|
|
|
else:
|
2018-05-30 12:16:33 +03:00
|
|
|
spec = "%s:%s" % (
|
|
|
|
hgutil.urlreq.quote(oldfile),
|
|
|
|
hgutil.urlreq.quote(newfile),
|
|
|
|
)
|
|
|
|
git_extra.append(("HG:rename", spec))
|
2009-05-30 00:10:14 +04:00
|
|
|
|
2014-09-01 01:01:07 +04:00
|
|
|
# hg extra items always go at the end
|
2014-08-31 16:13:39 +04:00
|
|
|
extraitems = extra.items()
|
|
|
|
extraitems.sort()
|
|
|
|
for key, value in extraitems:
|
2018-05-30 12:16:33 +03:00
|
|
|
if key in (
|
|
|
|
"author",
|
|
|
|
"committer",
|
|
|
|
"encoding",
|
|
|
|
"message",
|
|
|
|
"branch",
|
|
|
|
"hg-git",
|
|
|
|
"hg-git-rename-source",
|
|
|
|
):
|
2009-05-30 00:10:14 +04:00
|
|
|
continue
|
|
|
|
else:
|
2014-09-01 01:01:07 +04:00
|
|
|
if extra_in_message:
|
2018-05-30 12:16:33 +03:00
|
|
|
extra_message += (
|
|
|
|
"extra : " + key + " : " + hgutil.urlreq.quote(value) + "\n"
|
|
|
|
)
|
2014-09-01 01:01:07 +04:00
|
|
|
else:
|
2018-05-30 12:16:33 +03:00
|
|
|
spec = "%s:%s" % (
|
|
|
|
hgutil.urlreq.quote(key),
|
|
|
|
hgutil.urlreq.quote(value),
|
|
|
|
)
|
|
|
|
git_extra.append(("HG:extra", spec))
|
2009-05-30 00:10:14 +04:00
|
|
|
|
2014-08-31 14:33:36 +04:00
|
|
|
if extra_message:
|
2009-08-04 16:19:30 +04:00
|
|
|
message += "\n--HG--\n" + extra_message
|
2009-04-30 03:18:37 +04:00
|
|
|
|
2018-05-30 12:16:33 +03:00
|
|
|
if (
|
|
|
|
extra.get("hg-git-rename-source", None) != "git"
|
|
|
|
and not extra_in_message
|
|
|
|
and not git_extra
|
|
|
|
and extra_message == ""
|
|
|
|
):
|
2014-12-03 01:17:09 +03:00
|
|
|
# We need to store this if no other metadata is stored. This
|
|
|
|
# indicates that when reimporting the commit into Mercurial we'll
|
|
|
|
# know not to detect renames.
|
2018-05-30 12:16:33 +03:00
|
|
|
git_extra.append(("HG:rename-source", "hg"))
|
2014-12-03 01:17:09 +03:00
|
|
|
|
2014-09-01 01:01:07 +04:00
|
|
|
return message, git_extra
|
2009-07-03 00:58:51 +04:00
|
|
|
|
2014-10-16 03:06:46 +04:00
|
|
|
def get_git_incoming(self, refs):
|
2018-12-06 22:15:51 +03:00
|
|
|
return git2hg.find_incoming(self.git.object_store, self._map, refs)
|
2011-05-24 22:16:45 +04:00
|
|
|
|
2014-10-23 09:28:56 +04:00
|
|
|
def import_git_objects(self, remote_name, refs):
|
2014-10-15 03:35:37 +04:00
|
|
|
result = self.get_git_incoming(refs)
|
|
|
|
commits = result.commits
|
|
|
|
commit_cache = result.commit_cache
|
2009-06-16 16:39:11 +04:00
|
|
|
# import each of the commits, oldest first
|
|
|
|
total = len(commits)
|
2011-05-18 11:31:36 +04:00
|
|
|
if total:
|
|
|
|
self.ui.status(_("importing git objects into hg\n"))
|
2014-03-05 03:43:54 +04:00
|
|
|
else:
|
|
|
|
self.ui.status(_("no changes found\n"))
|
2011-05-18 11:31:36 +04:00
|
|
|
|
2018-05-30 12:16:33 +03:00
|
|
|
mapsavefreq = compat.config(self.ui, "int", "hggit", "mapsavefrequency")
|
|
|
|
with progress.bar(self.ui, _("importing"), "commits", total=total) as prog:
|
2018-08-08 03:55:51 +03:00
|
|
|
icommits = enumerate(commits)
|
|
|
|
while True:
|
|
|
|
isubcommits = list(itertools.islice(icommits, mapsavefreq or 1))
|
|
|
|
if not isubcommits:
|
|
|
|
break
|
|
|
|
|
|
|
|
with self.repo.transaction("git import"):
|
|
|
|
for i, csha in isubcommits:
|
|
|
|
prog.value = i
|
|
|
|
commit = commit_cache[csha]
|
|
|
|
self.import_git_commit(commit)
|
|
|
|
self.ui.debug("committing transaction\n")
|
|
|
|
|
|
|
|
if mapsavefreq:
|
2018-03-21 23:45:33 +03:00
|
|
|
self.ui.debug("saving mapfile\n")
|
|
|
|
self.save_map(self.map_file)
|
2009-06-16 16:39:11 +04:00
|
|
|
|
2014-02-20 06:45:36 +04:00
|
|
|
# TODO if the tags cache is used, remove any dangling tag references
|
2014-03-05 04:51:43 +04:00
|
|
|
return total
|
2011-05-21 09:41:43 +04:00
|
|
|
|
2009-06-16 16:39:11 +04:00
|
|
|
def import_git_commit(self, commit):
|
2018-01-09 16:53:20 +03:00
|
|
|
self.ui.debug("importing: %s\n" % commit.id)
|
2009-06-16 16:39:11 +04:00
|
|
|
|
2014-12-02 08:18:34 +03:00
|
|
|
detect_renames = False
|
2018-05-30 12:16:33 +03:00
|
|
|
(strip_message, hg_renames, hg_branch, extra) = git2hg.extract_hg_metadata(
|
|
|
|
commit.message, commit.extra
|
|
|
|
)
|
2018-12-06 23:00:57 +03:00
|
|
|
|
|
|
|
# Store the original git commit hash in the Mercurial extras. This
|
|
|
|
# breaks bidirectionality, but makes it possible for a Mercurial client
|
|
|
|
# to compute the git/hg mapping without having the entire git repo.
|
|
|
|
# "convert_revision" was chosen to match the hgsubversion and hg convert
|
|
|
|
# extra field.
|
|
|
|
extra["convert_revision"] = commit.id
|
|
|
|
|
2014-12-02 07:16:24 +03:00
|
|
|
if hg_renames is None:
|
2014-12-02 08:18:34 +03:00
|
|
|
detect_renames = True
|
2014-12-03 01:17:09 +03:00
|
|
|
# We have to store this unconditionally, even if there are no
|
|
|
|
# renames detected from Git. This is because we export an extra
|
|
|
|
# 'HG:rename-source' Git parameter when this isn't set, which will
|
|
|
|
# break bidirectionality.
|
2018-05-30 12:16:33 +03:00
|
|
|
extra["hg-git-rename-source"] = "git"
|
2014-12-02 08:12:45 +03:00
|
|
|
else:
|
|
|
|
renames = hg_renames
|
2009-06-16 16:39:11 +04:00
|
|
|
|
2014-02-15 01:15:45 +04:00
|
|
|
gparents = map(self.map_hg_get, commit.parents)
|
|
|
|
|
2014-02-15 04:16:25 +04:00
|
|
|
for parent in gparents:
|
|
|
|
if parent not in self.repo:
|
2018-05-30 12:16:33 +03:00
|
|
|
raise error.Abort(
|
|
|
|
_("you appear to have run strip - " "please run hg git-cleanup")
|
|
|
|
)
|
2014-02-15 04:16:25 +04:00
|
|
|
|
2014-02-14 23:31:54 +04:00
|
|
|
# get a list of the changed, added, removed files and gitlinks
|
2018-05-30 12:16:33 +03:00
|
|
|
files, gitlinks, git_renames = self.get_files_changed(commit, detect_renames)
|
2014-12-03 02:04:50 +03:00
|
|
|
if detect_renames:
|
|
|
|
renames = git_renames
|
2009-06-16 16:39:11 +04:00
|
|
|
|
|
|
|
date = (commit.author_time, -commit.author_timezone)
|
|
|
|
text = strip_message
|
|
|
|
|
2009-07-29 00:29:27 +04:00
|
|
|
origtext = text
|
2009-06-18 19:49:13 +04:00
|
|
|
try:
|
2018-05-30 12:16:33 +03:00
|
|
|
text.decode("utf-8")
|
2009-06-18 19:49:13 +04:00
|
|
|
except UnicodeDecodeError:
|
2009-07-03 00:58:51 +04:00
|
|
|
text = self.decode_guess(text, commit.encoding)
|
2009-07-29 00:29:27 +04:00
|
|
|
|
2018-05-30 12:16:33 +03:00
|
|
|
text = "\n".join([l.rstrip() for l in text.splitlines()]).strip("\n")
|
|
|
|
if text + "\n" != origtext:
|
|
|
|
extra["message"] = create_delta(text + "\n", origtext)
|
2009-06-18 19:49:13 +04:00
|
|
|
|
|
|
|
author = commit.author
|
|
|
|
|
|
|
|
# convert extra data back to the end
|
2018-05-30 12:16:33 +03:00
|
|
|
if " ext:" in commit.author:
|
2012-09-22 06:39:53 +04:00
|
|
|
m = RE_GIT_AUTHOR_EXTRA.match(commit.author)
|
2009-06-18 19:49:13 +04:00
|
|
|
if m:
|
|
|
|
name = m.group(1)
|
2018-01-09 16:53:20 +03:00
|
|
|
ex = hgutil.urlreq.unquote(m.group(2))
|
2009-06-18 19:49:13 +04:00
|
|
|
email = m.group(3)
|
2018-05-30 12:16:33 +03:00
|
|
|
author = name + " <" + email + ">" + ex
|
2009-06-18 19:49:13 +04:00
|
|
|
|
2018-05-30 12:16:33 +03:00
|
|
|
if " <none@none>" in commit.author:
|
2009-06-18 19:49:13 +04:00
|
|
|
author = commit.author[:-12]
|
|
|
|
|
|
|
|
try:
|
2018-05-30 12:16:33 +03:00
|
|
|
author.decode("utf-8")
|
2009-06-18 19:49:13 +04:00
|
|
|
except UnicodeDecodeError:
|
2009-07-25 00:23:35 +04:00
|
|
|
origauthor = author
|
2009-07-03 00:58:51 +04:00
|
|
|
author = self.decode_guess(author, commit.encoding)
|
2018-05-30 12:16:33 +03:00
|
|
|
extra["author"] = create_delta(author, origauthor)
|
2009-06-18 19:49:13 +04:00
|
|
|
|
|
|
|
oldenc = self.swap_out_encoding()
|
|
|
|
|
2011-05-24 22:04:10 +04:00
|
|
|
def findconvergedfiles(p1, p2):
|
|
|
|
# If any files have the same contents in both parents of a merge
|
|
|
|
# (and are therefore not reported as changed by Git) but are at
|
|
|
|
# different file revisions in Mercurial (because they arrived at
|
|
|
|
# those contents in different ways), we need to include them in
|
|
|
|
# the list of changed files so that Mercurial can join up their
|
|
|
|
# filelog histories (same as if the merge was done in Mercurial to
|
|
|
|
# begin with).
|
|
|
|
if p2 == nullid:
|
|
|
|
return []
|
|
|
|
manifest1 = self.repo.changectx(p1).manifest()
|
|
|
|
manifest2 = self.repo.changectx(p2).manifest()
|
2018-05-30 12:16:33 +03:00
|
|
|
return [
|
|
|
|
path
|
|
|
|
for path, node1 in manifest1.iteritems()
|
|
|
|
if path not in files and manifest2.get(path, node1) != node1
|
|
|
|
]
|
2009-12-25 10:56:20 +03:00
|
|
|
|
2011-05-24 22:04:10 +04:00
|
|
|
def getfilectx(repo, memctx, f):
|
|
|
|
info = files.get(f)
|
2015-04-16 01:31:06 +03:00
|
|
|
if info is not None:
|
2011-05-24 22:04:10 +04:00
|
|
|
# it's a file reported as modified from Git
|
|
|
|
delete, mode, sha = info
|
|
|
|
if delete:
|
2018-05-30 12:16:33 +03:00
|
|
|
if getattr(memctx, "_returnnoneformissingfiles", False):
|
2014-08-30 16:48:24 +04:00
|
|
|
return None
|
|
|
|
else: # Mercurial < 3.2
|
|
|
|
raise IOError
|
2011-05-24 22:04:10 +04:00
|
|
|
|
2015-04-16 01:31:06 +03:00
|
|
|
if not sha: # indicates there's no git counterpart
|
2018-05-30 12:16:33 +03:00
|
|
|
e = ""
|
2012-08-06 20:30:33 +04:00
|
|
|
copied_path = None
|
|
|
|
else:
|
|
|
|
data = self.git[sha].data
|
2014-12-02 08:12:45 +03:00
|
|
|
copied_path = renames.get(f)
|
2012-08-06 20:30:33 +04:00
|
|
|
e = self.convert_git_int_mode(mode)
|
2011-05-24 22:04:10 +04:00
|
|
|
else:
|
|
|
|
# it's a converged file
|
|
|
|
fc = context.filectx(self.repo, f, changeid=memctx.p1().rev())
|
|
|
|
data = fc.data()
|
|
|
|
e = fc.flags()
|
2015-05-14 22:51:37 +03:00
|
|
|
copied_path = None
|
|
|
|
copied = fc.renamed()
|
|
|
|
if copied:
|
|
|
|
copied_path = copied[0]
|
2009-12-25 10:56:20 +03:00
|
|
|
|
2014-06-17 23:49:33 +04:00
|
|
|
try:
|
2018-05-30 12:16:33 +03:00
|
|
|
return context.memfilectx(
|
|
|
|
self.repo,
|
|
|
|
memctx,
|
|
|
|
f,
|
|
|
|
data,
|
|
|
|
islink="l" in e,
|
|
|
|
isexec="x" in e,
|
|
|
|
copied=copied_path,
|
|
|
|
)
|
2014-06-17 23:49:33 +04:00
|
|
|
except TypeError:
|
2017-12-14 21:09:54 +03:00
|
|
|
try:
|
2018-05-30 12:16:33 +03:00
|
|
|
return context.memfilectx(
|
|
|
|
self.repo,
|
|
|
|
f,
|
|
|
|
data,
|
|
|
|
islink="l" in e,
|
|
|
|
isexec="x" in e,
|
|
|
|
copied=copied_path,
|
|
|
|
)
|
2017-12-14 21:09:54 +03:00
|
|
|
except TypeError:
|
2018-05-30 12:16:33 +03:00
|
|
|
return context.memfilectx(
|
|
|
|
f, data, islink="l" in e, isexec="x" in e, copied=copied_path
|
|
|
|
)
|
2009-06-16 16:39:11 +04:00
|
|
|
|
|
|
|
p1, p2 = (nullid, nullid)
|
|
|
|
octopus = False
|
|
|
|
|
|
|
|
if len(gparents) > 1:
|
|
|
|
# merge, possibly octopus
|
|
|
|
def commit_octopus(p1, p2):
|
2018-05-30 12:16:33 +03:00
|
|
|
ctx = context.memctx(
|
|
|
|
self.repo,
|
|
|
|
(p1, p2),
|
|
|
|
text,
|
|
|
|
list(files) + findconvergedfiles(p1, p2),
|
|
|
|
getfilectx,
|
|
|
|
author,
|
|
|
|
date,
|
|
|
|
{"hg-git": "octopus"},
|
|
|
|
)
|
2009-06-16 16:39:11 +04:00
|
|
|
return hex(self.repo.commitctx(ctx))
|
|
|
|
|
|
|
|
octopus = len(gparents) > 2
|
|
|
|
p2 = gparents.pop()
|
|
|
|
p1 = gparents.pop()
|
|
|
|
while len(gparents) > 0:
|
|
|
|
p2 = commit_octopus(p1, p2)
|
|
|
|
p1 = gparents.pop()
|
|
|
|
else:
|
|
|
|
if gparents:
|
|
|
|
p1 = gparents.pop()
|
|
|
|
|
|
|
|
# if named branch, add to extra
|
|
|
|
if hg_branch:
|
2018-05-30 12:16:33 +03:00
|
|
|
extra["branch"] = hg_branch
|
2017-06-09 15:49:23 +03:00
|
|
|
else:
|
2018-05-30 12:16:33 +03:00
|
|
|
extra["branch"] = "default"
|
2009-06-16 16:39:11 +04:00
|
|
|
|
|
|
|
# if committer is different than author, add it to extra
|
2018-05-30 12:16:33 +03:00
|
|
|
if (
|
|
|
|
commit.author != commit.committer
|
|
|
|
or commit.author_time != commit.commit_time
|
|
|
|
or commit.author_timezone != commit.commit_timezone
|
|
|
|
):
|
|
|
|
extra["committer"] = "%s %d %d" % (
|
|
|
|
commit.committer,
|
|
|
|
commit.commit_time,
|
|
|
|
-commit.commit_timezone,
|
|
|
|
)
|
2009-06-16 16:39:11 +04:00
|
|
|
|
2009-07-03 00:58:51 +04:00
|
|
|
if commit.encoding:
|
2018-05-30 12:16:33 +03:00
|
|
|
extra["encoding"] = commit.encoding
|
2009-06-16 16:39:11 +04:00
|
|
|
|
|
|
|
if octopus:
|
2018-05-30 12:16:33 +03:00
|
|
|
extra["hg-git"] = "octopus-done"
|
|
|
|
|
|
|
|
ctx = context.memctx(
|
|
|
|
self.repo,
|
|
|
|
(p1, p2),
|
|
|
|
text,
|
|
|
|
list(files) + findconvergedfiles(p1, p2),
|
|
|
|
getfilectx,
|
|
|
|
author,
|
|
|
|
date,
|
|
|
|
extra,
|
|
|
|
)
|
2009-07-29 00:29:27 +04:00
|
|
|
node = self.repo.commitctx(ctx)
|
2009-06-16 16:39:11 +04:00
|
|
|
|
2009-06-18 19:49:13 +04:00
|
|
|
self.swap_out_encoding(oldenc)
|
|
|
|
|
2009-06-16 16:39:11 +04:00
|
|
|
# save changeset to mapping file
|
|
|
|
cs = hex(node)
|
|
|
|
self.map_set(commit.id, cs)
|
|
|
|
|
2015-04-16 01:31:06 +03:00
|
|
|
# PACK UPLOADING AND FETCHING
|
2009-04-27 05:27:47 +04:00
|
|
|
|
2009-07-31 01:09:53 +04:00
|
|
|
def upload_pack(self, remote, revs, force):
|
2009-06-16 17:44:19 +04:00
|
|
|
client, path = self.get_transport_and_path(remote)
|
2012-09-06 07:27:31 +04:00
|
|
|
old_refs = {}
|
2013-01-06 10:46:57 +04:00
|
|
|
change_totals = {}
|
|
|
|
|
2009-07-31 01:09:53 +04:00
|
|
|
def changed(refs):
|
2013-01-06 10:46:57 +04:00
|
|
|
self.ui.status(_("searching for changes\n"))
|
2012-09-06 07:27:31 +04:00
|
|
|
old_refs.update(refs)
|
2014-10-29 22:30:23 +03:00
|
|
|
all_exportable = self.get_exportable()
|
|
|
|
if revs is None:
|
|
|
|
exportable = all_exportable
|
|
|
|
else:
|
|
|
|
exportable = {}
|
|
|
|
for rev in (hex(r) for r in revs):
|
|
|
|
if rev not in all_exportable:
|
2018-05-30 12:16:33 +03:00
|
|
|
raise error.Abort(
|
|
|
|
"revision %s cannot be pushed since"
|
|
|
|
" it doesn't have a bookmark" % self.repo[rev]
|
|
|
|
)
|
2014-10-29 22:30:23 +03:00
|
|
|
exportable[rev] = all_exportable[rev]
|
|
|
|
return self.get_changed_refs(refs, exportable, force)
|
2009-07-31 01:09:53 +04:00
|
|
|
|
2013-01-06 10:46:57 +04:00
|
|
|
def genpack(have, want):
|
|
|
|
commits = []
|
|
|
|
for mo in self.git.object_store.find_missing_objects(have, want):
|
|
|
|
(sha, name) = mo
|
|
|
|
o = self.git.object_store[sha]
|
|
|
|
t = type(o)
|
|
|
|
change_totals[t] = change_totals.get(t, 0) + 1
|
|
|
|
if isinstance(o, Commit):
|
|
|
|
commits.append(sha)
|
|
|
|
commit_count = len(commits)
|
|
|
|
self.ui.note(_("%d commits found\n") % commit_count)
|
|
|
|
if commit_count > 0:
|
2018-01-09 16:53:20 +03:00
|
|
|
self.ui.debug("list of commits:\n")
|
2013-01-06 10:46:57 +04:00
|
|
|
for commit in commits:
|
|
|
|
self.ui.debug("%s\n" % commit)
|
|
|
|
self.ui.status(_("adding objects\n"))
|
|
|
|
return self.git.object_store.generate_pack_contents(have, want)
|
|
|
|
|
2015-12-03 04:26:20 +03:00
|
|
|
def callback(remote_info):
|
2016-01-12 03:28:28 +03:00
|
|
|
# dulwich (perhaps git?) wraps remote output at a fixed width but
|
|
|
|
# signifies the end of transmission with a double new line
|
|
|
|
global CALLBACK_BUFFER
|
2018-05-30 12:16:33 +03:00
|
|
|
if remote_info and not remote_info.endswith("\n\n"):
|
2016-01-12 03:28:28 +03:00
|
|
|
CALLBACK_BUFFER += remote_info
|
|
|
|
return
|
|
|
|
|
|
|
|
remote_info = CALLBACK_BUFFER + remote_info
|
2018-05-30 12:16:33 +03:00
|
|
|
CALLBACK_BUFFER = ""
|
2015-12-03 04:26:20 +03:00
|
|
|
if not remote_info:
|
2018-05-30 12:16:33 +03:00
|
|
|
remote_info = "\n"
|
2016-01-12 03:28:28 +03:00
|
|
|
|
2018-05-30 12:16:33 +03:00
|
|
|
for line in remote_info[:-1].split("\n"):
|
2015-12-03 04:26:20 +03:00
|
|
|
self.ui.status(_("remote: %s\n") % line)
|
|
|
|
|
2009-04-28 10:35:49 +04:00
|
|
|
try:
|
2018-05-30 12:16:33 +03:00
|
|
|
new_refs = client.send_pack(path, changed, genpack, progress=callback)
|
2013-01-06 10:46:57 +04:00
|
|
|
if len(change_totals) > 0:
|
2018-05-30 12:16:33 +03:00
|
|
|
self.ui.status(
|
|
|
|
_("added %d commits with %d trees" " and %d blobs\n")
|
|
|
|
% (
|
|
|
|
change_totals.get(Commit, 0),
|
|
|
|
change_totals.get(Tree, 0),
|
|
|
|
change_totals.get(Blob, 0),
|
|
|
|
)
|
|
|
|
)
|
2012-09-06 07:27:31 +04:00
|
|
|
return old_refs, new_refs
|
2018-01-09 16:53:20 +03:00
|
|
|
except (HangupException, GitProtocolError) as e:
|
|
|
|
raise error.Abort(_("git remote error: ") + str(e))
|
2009-07-31 01:09:53 +04:00
|
|
|
|
2014-10-29 22:30:23 +03:00
|
|
|
def get_changed_refs(self, refs, exportable, force):
|
2009-07-31 01:09:53 +04:00
|
|
|
new_refs = refs.copy()
|
2009-08-08 13:52:30 +04:00
|
|
|
|
2015-04-16 01:31:06 +03:00
|
|
|
# The remote repo is empty and the local one doesn't have
|
|
|
|
# bookmarks/tags
|
2015-02-23 17:58:01 +03:00
|
|
|
#
|
|
|
|
# (older dulwich versions return the proto-level
|
|
|
|
# capabilities^{} key when the dict should have been
|
|
|
|
# empty. That check can probably be removed at some point in
|
|
|
|
# the future.)
|
2018-05-30 12:16:33 +03:00
|
|
|
if not refs or refs.keys()[0] == "capabilities^{}":
|
2014-10-29 23:12:15 +03:00
|
|
|
if not exportable:
|
2018-05-30 12:16:33 +03:00
|
|
|
tip = self.repo.lookup("tip")
|
2012-10-25 08:40:35 +04:00
|
|
|
if tip != nullid:
|
2018-05-30 12:16:33 +03:00
|
|
|
if "capabilities^{}" in new_refs:
|
|
|
|
del new_refs["capabilities^{}"]
|
2012-10-25 08:40:35 +04:00
|
|
|
tip = hex(tip)
|
|
|
|
try:
|
2018-05-30 12:16:33 +03:00
|
|
|
commands.bookmark(
|
|
|
|
self.ui, self.repo, "master", rev=tip, force=True
|
|
|
|
)
|
2012-10-25 08:40:35 +04:00
|
|
|
except NameError:
|
2018-05-30 12:16:33 +03:00
|
|
|
bookmarks.bookmark(
|
|
|
|
self.ui, self.repo, "master", rev=tip, force=True
|
|
|
|
)
|
2015-05-13 03:05:13 +03:00
|
|
|
try:
|
2018-05-30 12:16:33 +03:00
|
|
|
bookmarks.activate(self.repo, "master")
|
2015-05-13 03:05:13 +03:00
|
|
|
except AttributeError:
|
|
|
|
# hg < 3.5
|
2018-05-30 12:16:33 +03:00
|
|
|
bookmarks.setcurrent(self.repo, "master")
|
|
|
|
new_refs["refs/heads/master"] = self.map_git_get(tip)
|
2009-08-08 13:52:30 +04:00
|
|
|
|
2015-04-28 02:51:27 +03:00
|
|
|
# mapped nodes might be hidden
|
|
|
|
unfiltered = self.repo.unfiltered()
|
2014-10-29 22:30:23 +03:00
|
|
|
for rev, rev_refs in exportable.iteritems():
|
2009-07-31 01:09:53 +04:00
|
|
|
ctx = self.repo[rev]
|
2014-10-29 22:30:23 +03:00
|
|
|
if not rev_refs:
|
2018-05-30 12:16:33 +03:00
|
|
|
raise error.Abort(
|
|
|
|
"revision %s cannot be pushed since"
|
|
|
|
" it doesn't have a bookmark" % ctx
|
|
|
|
)
|
2009-07-31 01:09:53 +04:00
|
|
|
|
2010-03-15 07:53:54 +03:00
|
|
|
# Check if the tags the server is advertising are annotated tags,
|
2015-04-16 01:31:06 +03:00
|
|
|
# by attempting to retrieve it from the our git repo, and building
|
|
|
|
# a list of these tags.
|
2010-03-15 07:53:54 +03:00
|
|
|
#
|
|
|
|
# This is possible, even though (currently) annotated tags are
|
|
|
|
# dereferenced and stored as lightweight ones, as the annotated tag
|
|
|
|
# is still stored in the git repo.
|
|
|
|
uptodate_annotated_tags = []
|
2014-10-29 22:30:23 +03:00
|
|
|
for ref in rev_refs.tags:
|
2010-03-15 07:53:54 +03:00
|
|
|
# Check tag.
|
2015-04-16 01:31:06 +03:00
|
|
|
if ref not in refs:
|
2010-03-15 07:53:54 +03:00
|
|
|
continue
|
|
|
|
try:
|
|
|
|
# We're not using Repo.tag(), as it's deprecated.
|
|
|
|
tag = self.git.get_object(refs[ref])
|
|
|
|
if not isinstance(tag, Tag):
|
|
|
|
continue
|
|
|
|
except KeyError:
|
|
|
|
continue
|
|
|
|
|
|
|
|
# If we've reached here, the tag's good.
|
|
|
|
uptodate_annotated_tags.append(ref)
|
|
|
|
|
2014-10-29 22:30:23 +03:00
|
|
|
for ref in rev_refs:
|
2009-07-31 01:09:53 +04:00
|
|
|
if ref not in refs:
|
|
|
|
new_refs[ref] = self.map_git_get(ctx.hex())
|
2018-12-06 22:15:51 +03:00
|
|
|
elif self._map.lookupbyfirst(bin(new_refs[ref])) is not None:
|
2015-04-28 02:51:27 +03:00
|
|
|
rctx = unfiltered[self.map_hg_get(new_refs[ref])]
|
2009-07-31 01:09:53 +04:00
|
|
|
if rctx.ancestor(ctx) == rctx or force:
|
|
|
|
new_refs[ref] = self.map_git_get(ctx.hex())
|
|
|
|
else:
|
2018-05-30 12:16:33 +03:00
|
|
|
raise error.Abort("pushing %s overwrites %s" % (ref, ctx))
|
2010-03-15 07:53:54 +03:00
|
|
|
elif ref in uptodate_annotated_tags:
|
|
|
|
# we already have the annotated tag.
|
|
|
|
pass
|
2009-07-31 01:09:53 +04:00
|
|
|
else:
|
2018-01-09 16:53:20 +03:00
|
|
|
raise error.Abort(
|
2013-12-16 00:19:22 +04:00
|
|
|
"branch '%s' changed on the server, "
|
2018-05-30 12:16:33 +03:00
|
|
|
"please pull and merge before pushing" % ref
|
|
|
|
)
|
2009-07-31 01:09:53 +04:00
|
|
|
|
|
|
|
return new_refs
|
2009-04-29 06:33:03 +04:00
|
|
|
|
2012-10-26 04:49:08 +04:00
|
|
|
def fetch_pack(self, remote_name, heads=None):
|
2015-05-15 01:15:37 +03:00
|
|
|
localclient, path = self.get_transport_and_path(remote_name)
|
2015-06-24 06:17:10 +03:00
|
|
|
|
|
|
|
# The dulwich default walk only checks refs/heads/. We also want to
|
|
|
|
# consider remotes when doing discovery, so we build our own list. We
|
|
|
|
# can't just do 'refs/' here because the tag class doesn't have a
|
|
|
|
# parents function for walking, and older versions of dulwich don't like
|
|
|
|
# that.
|
2018-05-30 12:16:33 +03:00
|
|
|
haveheads = self.git.refs.as_dict("refs/remotes/").values()
|
|
|
|
haveheads.extend(self.git.refs.as_dict("refs/heads/").values())
|
2015-06-24 06:17:10 +03:00
|
|
|
graphwalker = self.git.get_graph_walker(heads=haveheads)
|
2014-03-05 00:57:37 +04:00
|
|
|
|
2009-08-01 20:55:54 +04:00
|
|
|
def determine_wants(refs):
|
2015-11-09 08:56:38 +03:00
|
|
|
if refs is None:
|
|
|
|
return None
|
2014-03-05 00:57:37 +04:00
|
|
|
filteredrefs = self.filter_refs(refs, heads)
|
|
|
|
return [x for x in filteredrefs.itervalues() if x not in self.git]
|
2013-09-17 17:58:12 +04:00
|
|
|
|
2009-04-24 02:26:10 +04:00
|
|
|
try:
|
2018-03-21 23:45:33 +03:00
|
|
|
with progress.bar(self.ui, "") as prog:
|
|
|
|
gitprogress = GitProgress(self.ui, prog)
|
|
|
|
f = StringIO.StringIO()
|
|
|
|
|
|
|
|
# monkey patch dulwich's read_pkt_refs so that we can determine
|
|
|
|
# on clone which bookmark to activate
|
|
|
|
client.read_pkt_refs = compat.read_pkt_refs
|
2018-05-30 12:16:33 +03:00
|
|
|
ret = localclient.fetch_pack(
|
|
|
|
path, determine_wants, graphwalker, f.write, gitprogress.progress
|
|
|
|
)
|
2018-03-21 23:45:33 +03:00
|
|
|
if f.pos != 0:
|
|
|
|
f.seek(0)
|
|
|
|
self.git.object_store.add_thin_pack(f.read, None)
|
2013-12-04 01:55:17 +04:00
|
|
|
|
|
|
|
# For empty repos dulwich gives us None, but since later
|
|
|
|
# we want to iterate over this, we really want an empty
|
|
|
|
# iterable
|
|
|
|
return ret if ret else {}
|
2018-01-09 16:53:20 +03:00
|
|
|
except (HangupException, GitProtocolError) as e:
|
|
|
|
raise error.Abort(_("git remote error: ") + str(e))
|
2009-04-24 02:26:10 +04:00
|
|
|
|
2015-04-16 01:31:06 +03:00
|
|
|
# REFERENCES HANDLING
|
2009-06-16 16:39:11 +04:00
|
|
|
|
2014-03-05 00:57:37 +04:00
|
|
|
def filter_refs(self, refs, heads):
|
2018-05-30 12:16:33 +03:00
|
|
|
"""For a dictionary of refs: shas, if heads is None then return refs
|
2014-03-05 00:57:37 +04:00
|
|
|
that match the heads. Otherwise, return refs that are heads or tags.
|
|
|
|
|
2018-05-30 12:16:33 +03:00
|
|
|
"""
|
2014-10-31 20:40:58 +03:00
|
|
|
filteredrefs = []
|
2014-05-09 06:30:38 +04:00
|
|
|
if heads is not None:
|
2014-03-05 00:57:37 +04:00
|
|
|
# contains pairs of ('refs/(heads|tags|...)/foo', 'foo')
|
|
|
|
# if ref is just '<foo>', then we get ('foo', 'foo')
|
2018-05-30 12:16:33 +03:00
|
|
|
stripped_refs = [(r, r[r.find("/", r.find("/") + 1) + 1 :]) for r in refs]
|
2014-03-05 00:57:37 +04:00
|
|
|
for h in heads:
|
2018-05-30 12:16:33 +03:00
|
|
|
if h.endswith("/*"):
|
2014-10-31 21:14:35 +03:00
|
|
|
prefix = h[:-1] # include the / but not the *
|
2018-05-30 12:16:33 +03:00
|
|
|
r = [
|
|
|
|
pair[0] for pair in stripped_refs if pair[1].startswith(prefix)
|
|
|
|
]
|
2014-10-31 21:14:35 +03:00
|
|
|
r.sort()
|
|
|
|
filteredrefs.extend(r)
|
2014-03-05 00:57:37 +04:00
|
|
|
else:
|
2014-10-31 21:14:35 +03:00
|
|
|
r = [pair[0] for pair in stripped_refs if pair[1] == h]
|
|
|
|
if not r:
|
2018-05-30 12:16:33 +03:00
|
|
|
raise error.Abort("ref %s not found on remote server" % h)
|
2014-10-31 21:14:35 +03:00
|
|
|
elif len(r) == 1:
|
|
|
|
filteredrefs.append(r[0])
|
|
|
|
else:
|
2018-05-30 12:16:33 +03:00
|
|
|
raise error.Abort("ambiguous reference %s: %r" % (h, r))
|
2014-03-05 00:57:37 +04:00
|
|
|
else:
|
|
|
|
for ref, sha in refs.iteritems():
|
2018-05-30 12:16:33 +03:00
|
|
|
if not ref.endswith("^{}") and (
|
|
|
|
ref.startswith("refs/heads/") or ref.startswith("refs/tags/")
|
|
|
|
):
|
2014-10-31 20:40:58 +03:00
|
|
|
filteredrefs.append(ref)
|
2014-10-31 20:46:56 +03:00
|
|
|
filteredrefs.sort()
|
2014-10-31 20:40:58 +03:00
|
|
|
|
2015-04-16 01:31:06 +03:00
|
|
|
# the choice of OrderedDict vs plain dict has no impact on stock
|
|
|
|
# hg-git, but allows extensions to customize the order in which refs
|
|
|
|
# are returned
|
2014-10-31 20:40:58 +03:00
|
|
|
return util.OrderedDict((r, refs[r]) for r in filteredrefs)
|
2014-03-05 00:57:37 +04:00
|
|
|
|
2014-10-30 05:36:33 +03:00
|
|
|
def filter_min_date(self, refs):
|
2018-05-30 12:16:33 +03:00
|
|
|
"""filter refs by minimum date
|
2014-10-30 05:36:33 +03:00
|
|
|
|
2018-05-30 12:16:33 +03:00
|
|
|
This only works for refs that are available locally."""
|
|
|
|
min_date = compat.config(self.ui, "string", "git", "mindate")
|
2014-10-30 05:36:33 +03:00
|
|
|
if min_date is None:
|
|
|
|
return refs
|
|
|
|
|
|
|
|
# filter refs older than min_timestamp
|
|
|
|
min_timestamp, min_offset = hgutil.parsedate(min_date)
|
2015-04-16 01:31:06 +03:00
|
|
|
|
2014-11-05 00:56:00 +03:00
|
|
|
def check_min_time(obj):
|
|
|
|
if isinstance(obj, Tag):
|
|
|
|
return obj.tag_time >= min_timestamp
|
|
|
|
else:
|
|
|
|
return obj.commit_time >= min_timestamp
|
2018-05-30 12:16:33 +03:00
|
|
|
|
|
|
|
return util.OrderedDict(
|
|
|
|
(ref, sha) for ref, sha in refs.iteritems() if check_min_time(self.git[sha])
|
|
|
|
)
|
2014-10-30 05:36:33 +03:00
|
|
|
|
2009-06-16 16:39:11 +04:00
|
|
|
def update_references(self):
|
2014-10-29 20:35:36 +03:00
|
|
|
exportable = self.get_exportable()
|
2009-06-16 16:39:11 +04:00
|
|
|
|
2009-06-23 22:20:15 +04:00
|
|
|
# Create a local Git branch name for each
|
|
|
|
# Mercurial bookmark.
|
2014-10-29 20:35:36 +03:00
|
|
|
for hg_sha, refs in exportable.iteritems():
|
|
|
|
for git_ref in refs.heads:
|
|
|
|
git_sha = self.map_git_get(hg_sha)
|
|
|
|
if git_sha:
|
|
|
|
self.git.refs[git_ref] = git_sha
|
2009-06-16 16:39:11 +04:00
|
|
|
|
|
|
|
def export_hg_tags(self):
|
|
|
|
for tag, sha in self.repo.tags().iteritems():
|
2018-05-30 12:16:33 +03:00
|
|
|
if self.repo.tagtype(tag) in ("global", "git"):
|
|
|
|
tag = tag.replace(" ", "_")
|
2012-11-12 01:46:19 +04:00
|
|
|
target = self.map_git_get(hex(sha))
|
|
|
|
if target is not None:
|
2018-05-30 12:16:33 +03:00
|
|
|
tag_refname = "refs/tags/" + tag
|
2018-01-09 16:53:20 +03:00
|
|
|
if check_ref_format(tag_refname):
|
2015-04-16 01:31:06 +03:00
|
|
|
self.git.refs[tag_refname] = target
|
|
|
|
self.tags[tag] = hex(sha)
|
2013-08-12 18:20:41 +04:00
|
|
|
else:
|
2018-05-30 12:16:33 +03:00
|
|
|
self.repo.ui.warn(
|
|
|
|
_(
|
|
|
|
"Skipping export of tag %s because "
|
|
|
|
"it has invalid name as a git "
|
|
|
|
"refname.\n"
|
|
|
|
)
|
|
|
|
% tag
|
|
|
|
)
|
2012-11-12 01:46:19 +04:00
|
|
|
else:
|
|
|
|
self.repo.ui.warn(
|
2018-05-30 12:16:33 +03:00
|
|
|
_(
|
|
|
|
"Skipping export of tag %s because it "
|
|
|
|
"has no matching git revision.\n"
|
|
|
|
)
|
|
|
|
% tag
|
|
|
|
)
|
2009-06-16 16:39:11 +04:00
|
|
|
|
2011-12-19 03:54:16 +04:00
|
|
|
def _filter_for_bookmarks(self, bms):
|
|
|
|
if not self.branch_bookmark_suffix:
|
|
|
|
return [(bm, bm) for bm in bms]
|
|
|
|
else:
|
2018-05-30 12:16:33 +03:00
|
|
|
|
2011-12-19 03:54:16 +04:00
|
|
|
def _filter_bm(bm):
|
|
|
|
if bm.endswith(self.branch_bookmark_suffix):
|
2018-05-30 12:16:33 +03:00
|
|
|
return bm[0 : -(len(self.branch_bookmark_suffix))]
|
2011-12-19 03:54:16 +04:00
|
|
|
else:
|
|
|
|
return bm
|
2018-05-30 12:16:33 +03:00
|
|
|
|
2011-12-19 03:54:16 +04:00
|
|
|
return [(_filter_bm(bm), bm) for bm in bms]
|
|
|
|
|
2014-10-29 21:56:31 +03:00
|
|
|
def get_exportable(self):
|
|
|
|
class heads_tags(object):
|
|
|
|
def __init__(self):
|
|
|
|
self.heads = set()
|
|
|
|
self.tags = set()
|
2015-04-16 01:31:06 +03:00
|
|
|
|
2014-10-29 21:56:31 +03:00
|
|
|
def __iter__(self):
|
|
|
|
return itertools.chain(self.heads, self.tags)
|
2015-04-16 01:31:06 +03:00
|
|
|
|
2014-10-29 21:56:31 +03:00
|
|
|
def __nonzero__(self):
|
|
|
|
return bool(self.heads) or bool(self.tags)
|
|
|
|
|
|
|
|
res = collections.defaultdict(heads_tags)
|
|
|
|
|
|
|
|
bms = self.repo._bookmarks
|
|
|
|
for filtered_bm, bm in self._filter_for_bookmarks(bms):
|
2018-05-30 12:16:33 +03:00
|
|
|
res[hex(bms[bm])].heads.add("refs/heads/" + filtered_bm)
|
2014-10-29 21:56:31 +03:00
|
|
|
for tag, sha in self.tags.iteritems():
|
2018-05-30 12:16:33 +03:00
|
|
|
res[sha].tags.add("refs/tags/" + tag)
|
2014-10-29 21:56:31 +03:00
|
|
|
return res
|
|
|
|
|
2009-06-19 01:38:09 +04:00
|
|
|
def import_tags(self, refs):
|
2009-05-28 02:58:07 +04:00
|
|
|
keys = refs.keys()
|
|
|
|
if not keys:
|
2009-06-19 01:38:09 +04:00
|
|
|
return
|
2014-02-24 23:38:00 +04:00
|
|
|
repotags = self.repo.tags()
|
2009-06-19 01:38:09 +04:00
|
|
|
for k in keys[:]:
|
2009-05-28 02:58:07 +04:00
|
|
|
ref_name = k
|
2018-05-30 12:16:33 +03:00
|
|
|
parts = k.split("/")
|
|
|
|
if parts[0] == "refs" and parts[1] == "tags":
|
2009-05-28 02:58:07 +04:00
|
|
|
ref_name = "/".join([v for v in parts[2:]])
|
2009-08-01 20:55:54 +04:00
|
|
|
# refs contains all the refs in the server, not just
|
|
|
|
# the ones we are pulling
|
|
|
|
if refs[k] not in self.git.object_store:
|
|
|
|
continue
|
2018-05-30 12:16:33 +03:00
|
|
|
if ref_name[-3:] == "^{}":
|
2009-05-28 02:58:07 +04:00
|
|
|
ref_name = ref_name[:-3]
|
2015-04-16 01:31:06 +03:00
|
|
|
if ref_name not in repotags:
|
2009-05-28 02:58:07 +04:00
|
|
|
obj = self.git.get_object(refs[k])
|
|
|
|
sha = None
|
2015-04-16 01:31:06 +03:00
|
|
|
if isinstance(obj, Commit): # lightweight
|
2009-05-28 02:58:07 +04:00
|
|
|
sha = self.map_hg_get(refs[k])
|
2014-03-05 02:26:30 +04:00
|
|
|
if sha is not None:
|
|
|
|
self.tags[ref_name] = sha
|
2015-04-16 01:31:06 +03:00
|
|
|
elif isinstance(obj, Tag): # annotated
|
2010-05-20 02:16:59 +04:00
|
|
|
(obj_type, obj_sha) = obj.object
|
2009-05-28 02:58:07 +04:00
|
|
|
obj = self.git.get_object(obj_sha)
|
2015-04-16 01:31:06 +03:00
|
|
|
if isinstance(obj, Commit):
|
2009-05-28 02:58:07 +04:00
|
|
|
sha = self.map_hg_get(obj_sha)
|
2009-06-19 01:38:09 +04:00
|
|
|
# TODO: better handling for annotated tags
|
2014-03-05 02:26:30 +04:00
|
|
|
if sha is not None:
|
|
|
|
self.tags[ref_name] = sha
|
2009-06-19 01:38:09 +04:00
|
|
|
self.save_tags()
|
2009-06-05 14:56:22 +04:00
|
|
|
|
2009-06-16 17:44:19 +04:00
|
|
|
def update_hg_bookmarks(self, refs):
|
2009-04-28 17:30:11 +04:00
|
|
|
try:
|
2015-05-16 00:02:55 +03:00
|
|
|
bms = self.repo._bookmarks
|
2011-12-19 03:54:16 +04:00
|
|
|
|
2018-05-30 12:16:33 +03:00
|
|
|
heads = dict(
|
|
|
|
[(ref[11:], refs[ref]) for ref in refs if ref.startswith("refs/heads/")]
|
|
|
|
)
|
2009-05-26 22:55:03 +04:00
|
|
|
|
2018-05-30 12:16:33 +03:00
|
|
|
suffix = self.branch_bookmark_suffix or ""
|
2017-07-19 15:51:44 +03:00
|
|
|
changes = []
|
2009-05-26 22:55:03 +04:00
|
|
|
for head, sha in heads.iteritems():
|
2009-08-01 20:55:54 +04:00
|
|
|
# refs contains all the refs in the server, not just
|
|
|
|
# the ones we are pulling
|
2014-03-05 02:50:44 +04:00
|
|
|
hgsha = self.map_hg_get(sha)
|
|
|
|
if hgsha is None:
|
2009-08-01 20:55:54 +04:00
|
|
|
continue
|
2014-03-05 02:50:44 +04:00
|
|
|
hgsha = bin(hgsha)
|
2015-04-16 01:31:06 +03:00
|
|
|
if head not in bms:
|
2009-06-16 17:44:19 +04:00
|
|
|
# new branch
|
2017-07-19 15:51:44 +03:00
|
|
|
changes.append((head + suffix, hgsha))
|
2009-06-16 17:44:19 +04:00
|
|
|
else:
|
|
|
|
bm = self.repo[bms[head]]
|
|
|
|
if bm.ancestor(self.repo[hgsha]) == bm:
|
|
|
|
# fast forward
|
2017-07-19 15:51:44 +03:00
|
|
|
changes.append((head + suffix, hgsha))
|
2013-07-01 23:04:53 +04:00
|
|
|
|
2009-05-26 22:55:03 +04:00
|
|
|
if heads:
|
2017-07-19 15:51:44 +03:00
|
|
|
util.updatebookmarks(self.repo, changes)
|
2009-05-28 04:19:11 +04:00
|
|
|
|
2009-04-28 17:30:11 +04:00
|
|
|
except AttributeError:
|
2018-05-30 12:16:33 +03:00
|
|
|
self.ui.warn(
|
|
|
|
_("creating bookmarks failed, do you have" " bookmarks enabled?\n")
|
|
|
|
)
|
2009-04-30 03:18:37 +04:00
|
|
|
|
2009-06-16 17:44:19 +04:00
|
|
|
def update_remote_branches(self, remote_name, refs):
|
2014-09-03 21:47:37 +04:00
|
|
|
remote_refs = self.remote_refs
|
2010-12-21 07:37:07 +03:00
|
|
|
# since we re-write all refs for this remote each time, prune
|
2014-09-03 21:47:37 +04:00
|
|
|
# all entries matching this remote from our refs list now so
|
2010-12-21 07:37:07 +03:00
|
|
|
# that we avoid any stale refs hanging around forever
|
2014-09-03 21:47:37 +04:00
|
|
|
for t in list(remote_refs):
|
2018-05-30 12:16:33 +03:00
|
|
|
if t.startswith(remote_name + "/"):
|
2014-09-03 21:47:37 +04:00
|
|
|
del remote_refs[t]
|
2010-03-29 09:05:59 +04:00
|
|
|
for ref_name, sha in refs.iteritems():
|
2018-05-30 12:16:33 +03:00
|
|
|
if ref_name.startswith("refs/heads"):
|
2010-12-21 07:37:07 +03:00
|
|
|
hgsha = self.map_hg_get(sha)
|
2014-03-05 02:39:50 +04:00
|
|
|
if hgsha is None or hgsha not in self.repo:
|
|
|
|
continue
|
2010-03-29 09:05:59 +04:00
|
|
|
head = ref_name[11:]
|
2018-05-30 12:16:33 +03:00
|
|
|
remote_refs["/".join((remote_name, head))] = bin(hgsha)
|
2010-12-21 07:37:07 +03:00
|
|
|
# TODO(durin42): what is this doing?
|
2018-05-30 12:16:33 +03:00
|
|
|
new_ref = "refs/remotes/%s/%s" % (remote_name, head)
|
2010-03-29 09:05:59 +04:00
|
|
|
self.git.refs[new_ref] = sha
|
2018-05-30 12:16:33 +03:00
|
|
|
elif ref_name.startswith("refs/tags") and not ref_name.endswith("^{}"):
|
2010-03-29 09:05:59 +04:00
|
|
|
self.git.refs[ref_name] = sha
|
2009-06-23 22:22:49 +04:00
|
|
|
|
2015-04-16 01:31:06 +03:00
|
|
|
# UTILITY FUNCTIONS
|
2009-06-16 16:39:11 +04:00
|
|
|
|
2009-04-29 22:50:56 +04:00
|
|
|
def convert_git_int_mode(self, mode):
|
2009-07-07 20:05:43 +04:00
|
|
|
# TODO: make these into constants
|
2018-05-30 12:16:33 +03:00
|
|
|
convert = {0o100644: "", 0o100755: "x", 0o120000: "l"}
|
2009-04-29 22:50:56 +04:00
|
|
|
if mode in convert:
|
|
|
|
return convert[mode]
|
2018-05-30 12:16:33 +03:00
|
|
|
return ""
|
2009-04-30 03:18:37 +04:00
|
|
|
|
2009-07-03 00:58:51 +04:00
|
|
|
def get_file(self, commit, f):
|
|
|
|
otree = self.git.tree(commit.tree)
|
2018-05-30 12:16:33 +03:00
|
|
|
parts = f.split("/")
|
2009-07-03 00:58:51 +04:00
|
|
|
for part in parts:
|
|
|
|
(mode, sha) = otree[part]
|
|
|
|
obj = self.git.get_object(sha)
|
2015-04-16 01:31:06 +03:00
|
|
|
if isinstance(obj, Blob):
|
2009-07-03 00:58:51 +04:00
|
|
|
return (mode, sha, obj._text)
|
|
|
|
elif isinstance(obj, Tree):
|
|
|
|
otree = obj
|
|
|
|
|
2014-12-02 08:18:34 +03:00
|
|
|
def get_files_changed(self, commit, detect_renames):
|
2009-12-25 10:56:20 +03:00
|
|
|
tree = commit.tree
|
|
|
|
btree = None
|
|
|
|
|
|
|
|
if commit.parents:
|
|
|
|
btree = self.git[commit.parents[0]].tree
|
|
|
|
|
|
|
|
files = {}
|
2014-02-14 23:31:54 +04:00
|
|
|
gitlinks = {}
|
2014-12-03 02:04:50 +03:00
|
|
|
renames = None
|
2014-12-03 02:57:21 +03:00
|
|
|
rename_detector = None
|
2014-12-03 02:04:50 +03:00
|
|
|
if detect_renames:
|
|
|
|
renames = {}
|
2014-12-03 02:57:21 +03:00
|
|
|
rename_detector = self._rename_detector
|
|
|
|
|
2015-04-16 01:31:06 +03:00
|
|
|
# this set is unused if rename detection isn't enabled -- that makes
|
|
|
|
# the code below simpler
|
2015-02-27 04:37:20 +03:00
|
|
|
renamed_out = set()
|
|
|
|
|
2018-05-30 12:16:33 +03:00
|
|
|
changes = diff_tree.tree_changes(
|
|
|
|
self.git.object_store, btree, tree, rename_detector=rename_detector
|
|
|
|
)
|
2014-12-03 02:04:50 +03:00
|
|
|
|
2014-12-02 10:25:25 +03:00
|
|
|
for change in changes:
|
|
|
|
oldfile, oldmode, oldsha = change.old
|
|
|
|
newfile, newmode, newsha = change.new
|
2015-04-16 01:31:06 +03:00
|
|
|
# actions are described by the following table ('no' means 'does
|
|
|
|
# not exist'):
|
2014-02-14 23:31:54 +04:00
|
|
|
# old new | action
|
|
|
|
# no file | record file
|
|
|
|
# no gitlink | record gitlink
|
|
|
|
# file no | delete file
|
|
|
|
# file file | record file
|
|
|
|
# file gitlink | delete file and record gitlink
|
|
|
|
# gitlink no | delete gitlink
|
|
|
|
# gitlink file | delete gitlink and record file
|
|
|
|
# gitlink gitlink | record gitlink
|
2014-12-31 00:25:55 +03:00
|
|
|
#
|
|
|
|
# There's an edge case here -- symlink <-> regular file transitions
|
|
|
|
# are returned by dulwich as separate deletes and adds, not
|
2015-04-16 01:31:06 +03:00
|
|
|
# modifications. The order of those results is unspecified and
|
|
|
|
# could be either way round. Handle both cases: delete first, then
|
|
|
|
# add -- delete stored in 'old = file' case, then overwritten by
|
|
|
|
# 'new = file' case. add first, then delete -- record stored in
|
|
|
|
# 'new = file' case, then membership check fails in 'old = file'
|
|
|
|
# case so is not overwritten there. This is not an issue for
|
|
|
|
# gitlink <-> {symlink, regular file} transitions because they
|
|
|
|
# write to separate dictionaries.
|
2015-02-27 04:37:20 +03:00
|
|
|
#
|
2015-04-16 01:31:06 +03:00
|
|
|
# There's a similar edge case when rename detection is enabled: if
|
|
|
|
# a file is renamed and then replaced by a symlink (typically to
|
|
|
|
# the new location), it is returned by dulwich as an add and a
|
2015-02-27 04:37:20 +03:00
|
|
|
# rename. The order of those results is unspecified. Handle both
|
2015-04-16 01:31:06 +03:00
|
|
|
# cases: rename first, then add -- delete stored in 'new = file'
|
|
|
|
# case with renamed_out, then renamed_out check passes in 'old =
|
|
|
|
# file' case so is overwritten. add first, then rename -- add
|
|
|
|
# stored in 'old = file' case, then membership check fails in 'new
|
|
|
|
# = file' case so is overwritten.
|
2018-01-09 16:53:20 +03:00
|
|
|
if newmode == 0o160000:
|
2014-02-14 23:31:54 +04:00
|
|
|
# new = gitlink
|
|
|
|
gitlinks[newfile] = newsha
|
2014-12-03 02:57:21 +03:00
|
|
|
if change.type == diff_tree.CHANGE_RENAME:
|
|
|
|
# don't record the rename because only file -> file renames
|
|
|
|
# make sense in Mercurial
|
|
|
|
gitlinks[oldfile] = None
|
2018-01-09 16:53:20 +03:00
|
|
|
if oldmode is not None and oldmode != 0o160000:
|
2014-02-14 23:31:54 +04:00
|
|
|
# file -> gitlink
|
|
|
|
files[oldfile] = True, None, None
|
|
|
|
continue
|
2018-01-09 16:53:20 +03:00
|
|
|
if oldmode == 0o160000 and newmode != 0o160000:
|
2014-02-14 23:31:54 +04:00
|
|
|
# gitlink -> no/file (gitlink -> gitlink is covered above)
|
|
|
|
gitlinks[oldfile] = None
|
|
|
|
continue
|
|
|
|
if newfile is not None:
|
2016-02-04 15:33:32 +03:00
|
|
|
self.audit_hg_path(newfile)
|
2014-02-14 23:31:54 +04:00
|
|
|
# new = file
|
|
|
|
files[newfile] = False, newmode, newsha
|
2014-12-03 02:57:21 +03:00
|
|
|
if renames is not None and newfile != oldfile:
|
|
|
|
renames[newfile] = oldfile
|
2015-02-27 04:37:20 +03:00
|
|
|
renamed_out.add(oldfile)
|
|
|
|
# the membership check is explained in a comment above
|
2018-05-30 12:16:33 +03:00
|
|
|
if change.type == diff_tree.CHANGE_RENAME and oldfile not in files:
|
2014-12-03 02:57:21 +03:00
|
|
|
files[oldfile] = True, None, None
|
2009-12-25 10:56:20 +03:00
|
|
|
else:
|
2014-02-14 23:31:54 +04:00
|
|
|
# old = file
|
2015-02-27 04:37:20 +03:00
|
|
|
# files renamed_out | action
|
|
|
|
# no * | write
|
|
|
|
# yes no | ignore
|
|
|
|
# yes yes | write
|
|
|
|
if oldfile not in files or oldfile in renamed_out:
|
2014-12-31 00:25:55 +03:00
|
|
|
files[oldfile] = True, None, None
|
2009-12-25 10:56:20 +03:00
|
|
|
|
2014-12-03 02:04:50 +03:00
|
|
|
return files, gitlinks, renames
|
2009-07-03 00:58:51 +04:00
|
|
|
|
2014-12-03 01:53:11 +03:00
|
|
|
@hgutil.propertycache
|
|
|
|
def _rename_detector(self):
|
|
|
|
# disabled by default to avoid surprises
|
2018-05-30 12:16:33 +03:00
|
|
|
similarity = compat.config(self.ui, "int", "git", "similarity")
|
2014-12-03 01:53:11 +03:00
|
|
|
if similarity < 0 or similarity > 100:
|
2018-05-30 12:16:33 +03:00
|
|
|
raise error.Abort(_("git.similarity must be between 0 and 100"))
|
2014-12-03 01:53:11 +03:00
|
|
|
if similarity == 0:
|
|
|
|
return None
|
|
|
|
|
|
|
|
# default is borrowed from Git
|
2018-05-30 12:16:33 +03:00
|
|
|
max_files = compat.config(self.ui, "int", "git", "renamelimit")
|
2017-10-24 23:21:55 +03:00
|
|
|
if max_files < 0:
|
2018-05-30 12:16:33 +03:00
|
|
|
raise error.Abort(_("git.renamelimit must be non-negative"))
|
2014-12-03 01:53:11 +03:00
|
|
|
if max_files == 0:
|
|
|
|
max_files = None
|
|
|
|
|
2018-05-30 12:16:33 +03:00
|
|
|
find_copies_harder = compat.config(self.ui, "bool", "git", "findcopiesharder")
|
|
|
|
return diff_tree.RenameDetector(
|
|
|
|
self.git.object_store,
|
|
|
|
rename_threshold=similarity,
|
|
|
|
max_files=max_files,
|
|
|
|
find_copies_harder=find_copies_harder,
|
|
|
|
)
|
2014-12-03 01:53:11 +03:00
|
|
|
|
2012-08-06 20:30:33 +04:00
|
|
|
def parse_gitmodules(self, tree_obj):
|
|
|
|
"""Parse .gitmodules from a git tree specified by tree_obj
|
|
|
|
|
|
|
|
:return: list of tuples (submodule path, url, name),
|
|
|
|
where name is quoted part of the section's name, or
|
|
|
|
empty list if nothing found
|
|
|
|
"""
|
|
|
|
rv = []
|
|
|
|
try:
|
2018-05-30 12:16:33 +03:00
|
|
|
unused_mode, gitmodules_sha = tree_obj[".gitmodules"]
|
2012-08-06 20:30:33 +04:00
|
|
|
except KeyError:
|
|
|
|
return rv
|
|
|
|
gitmodules_content = self.git[gitmodules_sha].data
|
|
|
|
fo = StringIO.StringIO(gitmodules_content)
|
|
|
|
tt = dul_config.ConfigFile.from_file(fo)
|
|
|
|
for section in tt.keys():
|
|
|
|
section_kind, section_name = section
|
2018-05-30 12:16:33 +03:00
|
|
|
if section_kind == "submodule":
|
|
|
|
sm_path = tt.get(section, "path")
|
|
|
|
sm_url = tt.get(section, "url")
|
2012-08-06 20:30:33 +04:00
|
|
|
rv.append((sm_path, sm_url, section_name))
|
|
|
|
return rv
|
|
|
|
|
|
|
|
def git_file_readlines(self, tree_obj, fname):
|
|
|
|
"""Read content of a named entry from the git commit tree
|
|
|
|
|
|
|
|
:return: list of lines
|
|
|
|
"""
|
|
|
|
if fname in tree_obj:
|
|
|
|
unused_mode, sha = tree_obj[fname]
|
|
|
|
content = self.git[sha].data
|
|
|
|
return content.splitlines()
|
|
|
|
return []
|
|
|
|
|
2009-06-16 17:44:19 +04:00
|
|
|
def remote_name(self, remote):
|
|
|
|
names = [name for name, path in self.paths if path == remote]
|
|
|
|
if names:
|
|
|
|
return names[0]
|
|
|
|
|
2016-02-04 15:33:32 +03:00
|
|
|
def audit_hg_path(self, path):
|
2018-05-30 12:16:33 +03:00
|
|
|
if ".hg" in path.split(os.path.sep):
|
|
|
|
if compat.config(self.ui, "bool", "git", "blockdothg"):
|
2018-01-09 16:53:20 +03:00
|
|
|
raise error.Abort(
|
2018-05-30 12:16:33 +03:00
|
|
|
("Refusing to import problematic path %r" % path),
|
|
|
|
hint=(
|
|
|
|
"Mercurial cannot check out paths inside nested "
|
|
|
|
+ "repositories; if you need to continue, then set "
|
|
|
|
+ "'[git] blockdothg = false' in your hgrc."
|
|
|
|
),
|
|
|
|
)
|
|
|
|
self.ui.warn(
|
|
|
|
(
|
|
|
|
"warning: path %r is within a nested repository, "
|
|
|
|
+ "which Mercurial cannot check out.\n"
|
|
|
|
)
|
|
|
|
% path
|
|
|
|
)
|
2016-02-04 15:33:32 +03:00
|
|
|
|
2009-06-18 19:49:13 +04:00
|
|
|
# Stolen from hgsubversion
|
2018-05-30 12:16:33 +03:00
|
|
|
def swap_out_encoding(self, new_encoding="UTF-8"):
|
2009-06-18 19:49:13 +04:00
|
|
|
try:
|
2019-01-30 03:25:33 +03:00
|
|
|
from edenscm.mercurial import encoding
|
2018-05-30 12:16:33 +03:00
|
|
|
|
2009-06-18 19:49:13 +04:00
|
|
|
old = encoding.encoding
|
|
|
|
encoding.encoding = new_encoding
|
2015-12-29 21:06:14 +03:00
|
|
|
except (AttributeError, ImportError):
|
2009-06-18 19:49:13 +04:00
|
|
|
old = hgutil._encoding
|
|
|
|
hgutil._encoding = new_encoding
|
|
|
|
return old
|
|
|
|
|
|
|
|
def decode_guess(self, string, encoding):
|
|
|
|
# text is not valid utf-8, try to make sense of it
|
|
|
|
if encoding:
|
|
|
|
try:
|
2018-05-30 12:16:33 +03:00
|
|
|
return string.decode(encoding).encode("utf-8")
|
2009-06-18 19:49:13 +04:00
|
|
|
except UnicodeDecodeError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
try:
|
2018-05-30 12:16:33 +03:00
|
|
|
return string.decode("latin-1").encode("utf-8")
|
2009-06-18 19:49:13 +04:00
|
|
|
except UnicodeDecodeError:
|
2018-05-30 12:16:33 +03:00
|
|
|
return string.decode("ascii", "replace").encode("utf-8")
|
2009-06-18 19:49:13 +04:00
|
|
|
|
2009-04-24 02:26:10 +04:00
|
|
|
def get_transport_and_path(self, uri):
|
2015-06-27 02:32:20 +03:00
|
|
|
"""Method that sets up the transport (either ssh or http(s))
|
|
|
|
|
|
|
|
Tests:
|
|
|
|
|
2017-08-06 01:03:33 +03:00
|
|
|
>>> from collections import namedtuple
|
2015-06-27 02:32:20 +03:00
|
|
|
>>> from dulwich.client import HttpGitClient, SSHGitClient
|
2019-01-30 03:25:33 +03:00
|
|
|
>>> from edenscm.mercurial.ui import ui
|
2018-09-28 17:08:53 +03:00
|
|
|
>>> mockrepo = namedtuple('localrepo', ['sharedvfs'])
|
|
|
|
>>> mockrepo.sharedvfs = ''
|
2017-08-06 01:03:33 +03:00
|
|
|
>>> g = GitHandler(mockrepo, ui())
|
2015-06-27 02:32:20 +03:00
|
|
|
>>> client, url = g.get_transport_and_path('http://fqdn.com/test.git')
|
|
|
|
>>> print isinstance(client, HttpGitClient)
|
|
|
|
True
|
|
|
|
>>> print url
|
|
|
|
http://fqdn.com/test.git
|
|
|
|
>>> client, url = g.get_transport_and_path('git@fqdn.com:user/repo.git')
|
|
|
|
>>> print isinstance(client, SSHGitClient)
|
|
|
|
True
|
|
|
|
>>> print url
|
|
|
|
user/repo.git
|
|
|
|
>>> print client.host
|
|
|
|
git@fqdn.com
|
|
|
|
"""
|
2010-12-23 01:57:26 +03:00
|
|
|
# pass hg's ui.ssh config to dulwich
|
|
|
|
if not issubclass(client.get_ssh_vendor, _ssh.SSHVendor):
|
|
|
|
client.get_ssh_vendor = _ssh.generate_ssh_vendor(self.ui)
|
|
|
|
|
2015-06-27 02:32:20 +03:00
|
|
|
# test for raw git ssh uri here so that we can reuse the logic below
|
|
|
|
if util.isgitsshuri(uri):
|
|
|
|
uri = "git+ssh://" + uri
|
|
|
|
|
2012-09-22 06:32:16 +04:00
|
|
|
git_match = RE_GIT_URI.match(uri)
|
2012-01-27 22:24:31 +04:00
|
|
|
if git_match:
|
|
|
|
res = git_match.groupdict()
|
2018-05-30 12:16:33 +03:00
|
|
|
host, port, sepr = res["host"], res["port"], res["sepr"]
|
2015-04-16 01:31:06 +03:00
|
|
|
transport = client.TCPGitClient
|
2018-05-30 12:16:33 +03:00
|
|
|
if "ssh" in res["scheme"]:
|
2017-08-05 00:34:57 +03:00
|
|
|
util.checksafessh(host)
|
2015-04-16 01:31:06 +03:00
|
|
|
transport = client.SSHGitClient
|
2018-05-30 12:16:33 +03:00
|
|
|
path = res["path"]
|
|
|
|
if sepr == "/" and not path.startswith("~"):
|
|
|
|
path = "/" + path
|
2012-01-28 07:48:55 +04:00
|
|
|
# strip trailing slash for heroku-style URLs
|
|
|
|
# ssh+git://git@heroku.com:project.git/
|
2018-05-30 12:16:33 +03:00
|
|
|
if sepr == ":" and path.endswith(".git/"):
|
|
|
|
path = path.rstrip("/")
|
2012-01-27 22:24:31 +04:00
|
|
|
if port:
|
|
|
|
client.port = port
|
|
|
|
|
2014-07-04 10:46:49 +04:00
|
|
|
return transport(host, port=port), path
|
2011-10-06 00:44:29 +04:00
|
|
|
|
2018-05-30 12:16:33 +03:00
|
|
|
if uri.startswith("git+http://") or uri.startswith("git+https://"):
|
2011-10-06 00:44:29 +04:00
|
|
|
uri = uri[4:]
|
|
|
|
|
2018-05-30 12:16:33 +03:00
|
|
|
if uri.startswith("http://") or uri.startswith("https://"):
|
2017-08-06 01:07:59 +03:00
|
|
|
pmgr = compat.passwordmgr(self.ui)
|
2018-01-09 16:53:20 +03:00
|
|
|
auth = hgutil.urlreq.HTTPBasicAuthHandler(pmgr)
|
2016-08-05 00:16:44 +03:00
|
|
|
|
2018-01-09 16:53:20 +03:00
|
|
|
opener = hgutil.urlreq.build_opener(auth)
|
2018-05-30 12:16:33 +03:00
|
|
|
ua = "git/20x6 (hg-git ; uses dulwich and hg ; like git-core)"
|
|
|
|
opener.addheaders = [("User-Agent", ua)]
|
2015-03-06 21:58:33 +03:00
|
|
|
try:
|
|
|
|
return client.HttpGitClient(uri, opener=opener), uri
|
|
|
|
except TypeError as e:
|
|
|
|
if e.message.find("unexpected keyword argument 'opener'") >= 0:
|
2015-04-16 01:31:06 +03:00
|
|
|
# Dulwich 0.9.4, which is the latest version that ships
|
|
|
|
# with Ubuntu 14.04, doesn't support the 'opener' keyword.
|
|
|
|
# Try without authentication.
|
2015-03-06 21:58:33 +03:00
|
|
|
return client.HttpGitClient(uri), uri
|
|
|
|
else:
|
|
|
|
raise
|
2011-10-06 00:44:29 +04:00
|
|
|
|
2009-04-24 02:26:10 +04:00
|
|
|
# if its not git or git+ssh, try a local url..
|
2014-07-04 10:46:49 +04:00
|
|
|
return client.SubprocessGitClient(), uri
|
2018-12-06 22:15:51 +03:00
|
|
|
|
|
|
|
|
|
|
|
class GitMap(object):
|
|
|
|
def __init__(self, content):
|
|
|
|
mapgit = {}
|
|
|
|
maphg = {}
|
|
|
|
for line in content:
|
|
|
|
# format is <40 hex digits> <40 hex digits>\n
|
|
|
|
if len(line) != 82:
|
|
|
|
raise ValueError(
|
|
|
|
_("corrupt mapfile: incorrect line length %d %s")
|
|
|
|
% (len(line), content)
|
|
|
|
)
|
|
|
|
gitnode = bin(line[:40])
|
|
|
|
hgnode = bin(line[41:81])
|
|
|
|
mapgit[gitnode] = hgnode
|
|
|
|
maphg[hgnode] = gitnode
|
|
|
|
self._mapgit = mapgit
|
|
|
|
self._maphg = maphg
|
|
|
|
|
|
|
|
def lookupbyfirst(self, gitnode):
|
|
|
|
return self._mapgit.get(gitnode)
|
|
|
|
|
|
|
|
def lookupbysecond(self, hgnode):
|
|
|
|
return self._maphg.get(hgnode)
|
|
|
|
|
|
|
|
def add(self, gitnode, hgnode):
|
|
|
|
self._mapgit[gitnode] = hgnode
|
|
|
|
self._maphg[hgnode] = gitnode
|
|
|
|
|
|
|
|
def items(self):
|
|
|
|
return self._mapgit.items()
|