2009-04-08 19:34:01 +04:00
|
|
|
import cStringIO
|
2010-01-26 19:36:13 +03:00
|
|
|
import errno
|
2009-04-08 19:34:01 +04:00
|
|
|
import re
|
|
|
|
|
|
|
|
from mercurial import context
|
2011-10-01 02:01:35 +04:00
|
|
|
from mercurial import node
|
|
|
|
from mercurial import patch
|
2009-04-08 19:34:01 +04:00
|
|
|
from mercurial import revlog
|
2011-10-01 02:01:35 +04:00
|
|
|
from mercurial import util as hgutil
|
2009-04-08 19:34:01 +04:00
|
|
|
|
2014-02-03 10:55:56 +04:00
|
|
|
import compathacks
|
2009-04-08 19:34:01 +04:00
|
|
|
import svnwrap
|
|
|
|
import svnexternals
|
|
|
|
import util
|
|
|
|
|
2012-04-19 16:59:50 +04:00
|
|
|
# Here is a diff mixing content and property changes in svn >= 1.7
|
|
|
|
#
|
|
|
|
# Index: a
|
|
|
|
# ===================================================================
|
|
|
|
# --- a (revision 12)
|
|
|
|
# +++ a (working copy)
|
|
|
|
# @@ -1,2 +1,3 @@
|
|
|
|
# a
|
|
|
|
# a
|
|
|
|
# +a
|
2012-09-18 07:18:22 +04:00
|
|
|
#
|
2012-04-19 16:59:50 +04:00
|
|
|
# Property changes on: a
|
|
|
|
# ___________________________________________________________________
|
|
|
|
# Added: svn:executable
|
|
|
|
# ## -0,0 +1 ##
|
|
|
|
# +*
|
|
|
|
|
|
|
|
class ParseError(Exception):
|
|
|
|
pass
|
2009-04-08 19:34:01 +04:00
|
|
|
|
2012-04-19 16:59:50 +04:00
|
|
|
index_header = r'''Index: ([^\n]*)
|
2009-04-08 19:34:01 +04:00
|
|
|
=*
|
2012-04-19 16:59:50 +04:00
|
|
|
'''
|
2009-04-08 19:34:01 +04:00
|
|
|
|
2012-04-19 16:59:50 +04:00
|
|
|
property_header = r'''Property changes on: ([^\n]*)
|
2009-04-08 19:34:01 +04:00
|
|
|
_*
|
2012-04-19 16:59:50 +04:00
|
|
|
'''
|
|
|
|
|
|
|
|
headers_re = re.compile('(?:' + '|'.join([
|
|
|
|
index_header,
|
|
|
|
property_header,
|
|
|
|
]) + ')')
|
|
|
|
|
|
|
|
property_special_added = r'''(?:Added|Name): (svn:special)
|
|
|
|
(?: \+|## -0,0 \+1 ##
|
|
|
|
\+)'''
|
|
|
|
|
|
|
|
property_special_deleted = r'''(?:Deleted|Name): (svn:special)
|
|
|
|
(?: \-|## -1 \+0,0 ##
|
|
|
|
\-)'''
|
|
|
|
|
|
|
|
property_exec_added = r'''(?:Added|Name): (svn:executable)
|
|
|
|
(?: \+|## -0,0 \+1 ##
|
|
|
|
\+)'''
|
|
|
|
|
|
|
|
property_exec_deleted = r'''(?:Deleted|Name): (svn:executable)
|
|
|
|
(?: \-|## -1 \+0,0 ##
|
|
|
|
\-)'''
|
|
|
|
|
|
|
|
properties_re = re.compile('(?:' + '|'.join([
|
|
|
|
property_special_added,
|
|
|
|
property_special_deleted,
|
|
|
|
property_exec_added,
|
|
|
|
property_exec_deleted,
|
|
|
|
]) + ')')
|
|
|
|
|
|
|
|
class filediff:
|
|
|
|
def __init__(self, name):
|
|
|
|
self.name = name
|
|
|
|
self.diff = None
|
|
|
|
self.binary = False
|
|
|
|
self.executable = None
|
|
|
|
self.symlink = None
|
|
|
|
self.hasprops = False
|
|
|
|
|
|
|
|
def isempty(self):
|
|
|
|
return (not self.diff and not self.binary and not self.hasprops)
|
|
|
|
|
2012-04-19 17:08:04 +04:00
|
|
|
def maybedir(self):
|
|
|
|
return (not self.diff and not self.binary and self.hasprops
|
|
|
|
and self.symlink is None and self.executable is None)
|
|
|
|
|
2012-04-19 16:59:50 +04:00
|
|
|
def parsediff(diff):
|
|
|
|
changes = {}
|
|
|
|
headers = headers_re.split(diff)[1:]
|
|
|
|
if (len(headers) % 3) != 0:
|
|
|
|
# headers should be a sequence of (index file, property file, data)
|
|
|
|
raise ParseError('unexpected diff format')
|
|
|
|
files = []
|
|
|
|
for i in xrange(len(headers)/3):
|
|
|
|
iname, pname, data = headers[3*i:3*i+3]
|
|
|
|
fname = iname or pname
|
|
|
|
if fname not in changes:
|
|
|
|
changes[fname] = filediff(fname)
|
|
|
|
files.append(changes[fname])
|
|
|
|
f = changes[fname]
|
|
|
|
if iname is not None:
|
|
|
|
if data.strip():
|
|
|
|
f.binary = data.lstrip().startswith(
|
|
|
|
'Cannot display: file marked as a binary type.')
|
|
|
|
if not f.binary and '@@' in data:
|
|
|
|
# Non-empty diff
|
|
|
|
f.diff = data
|
|
|
|
else:
|
|
|
|
f.hasprops = True
|
|
|
|
for m in properties_re.finditer(data):
|
|
|
|
p = m.group(1, 2, 3, 4)
|
|
|
|
if p[0] or p[1]:
|
|
|
|
f.symlink = bool(p[0])
|
|
|
|
elif p[2] or p[3]:
|
|
|
|
f.executable = bool(p[2])
|
|
|
|
return files
|
2009-04-08 19:34:01 +04:00
|
|
|
|
|
|
|
|
|
|
|
class BadPatchApply(Exception):
|
|
|
|
pass
|
|
|
|
|
2011-10-17 18:44:15 +04:00
|
|
|
def print_your_svn_is_old_message(ui): # pragma: no cover
|
2009-04-08 19:34:01 +04:00
|
|
|
ui.status("In light of that, I'll fall back and do diffs, but it won't do "
|
|
|
|
"as good a job. You should really upgrade your server.\n")
|
|
|
|
|
|
|
|
def mempatchproxy(parentctx, files):
|
|
|
|
# Avoid circular references patch.patchfile -> mempatch
|
|
|
|
patchfile = patch.patchfile
|
|
|
|
|
2010-04-23 07:37:03 +04:00
|
|
|
# TODO(durin42): make this a compat path for hg < 1.6.
|
2009-04-08 19:34:01 +04:00
|
|
|
class mempatch(patchfile):
|
2010-04-23 07:37:03 +04:00
|
|
|
def __init__(self, ui, fname, opener, missing=False, eolmode=None):
|
|
|
|
patchfile.__init__(self, ui, fname, None, False, eolmode)
|
2009-04-08 19:34:01 +04:00
|
|
|
|
|
|
|
def readlines(self, fname):
|
|
|
|
if fname not in parentctx:
|
2010-01-26 19:36:13 +03:00
|
|
|
raise IOError(errno.ENOENT, 'Cannot find %r to patch' % fname)
|
2009-04-08 19:34:01 +04:00
|
|
|
fctx = parentctx[fname]
|
|
|
|
data = fctx.data()
|
|
|
|
if 'l' in fctx.flags():
|
|
|
|
data = 'link ' + data
|
|
|
|
return cStringIO.StringIO(data).readlines()
|
|
|
|
|
|
|
|
def writelines(self, fname, lines):
|
|
|
|
files[fname] = ''.join(lines)
|
|
|
|
|
|
|
|
def unlink(self, fname):
|
|
|
|
files[fname] = None
|
|
|
|
|
|
|
|
return mempatch
|
|
|
|
|
|
|
|
|
2009-06-11 20:49:52 +04:00
|
|
|
def filteriterhunks(meta):
|
2009-04-08 19:34:01 +04:00
|
|
|
iterhunks = patch.iterhunks
|
2011-05-14 23:44:07 +04:00
|
|
|
def filterhunks(*args, **kwargs):
|
|
|
|
# ui, fp, sourcefile=None, textmode=False
|
2009-04-08 19:34:01 +04:00
|
|
|
applycurrent = False
|
2009-06-15 21:26:18 +04:00
|
|
|
# Passing False instead of textmode because we should never
|
|
|
|
# be ignoring EOL type.
|
2013-09-06 01:25:25 +04:00
|
|
|
fp = args[0]
|
|
|
|
gen = iterhunks(fp)
|
2010-02-06 22:19:41 +03:00
|
|
|
for data in gen:
|
2009-04-08 19:34:01 +04:00
|
|
|
if data[0] == 'file':
|
2009-06-11 20:49:52 +04:00
|
|
|
if data[1][1] in meta.filemap:
|
2009-04-08 19:34:01 +04:00
|
|
|
applycurrent = True
|
|
|
|
else:
|
|
|
|
applycurrent = False
|
|
|
|
assert data[0] != 'git', 'Filtering git hunks not supported.'
|
|
|
|
if applycurrent:
|
|
|
|
yield data
|
|
|
|
return filterhunks
|
|
|
|
|
2014-12-09 00:50:22 +03:00
|
|
|
class svnbackend(patch.repobackend):
|
|
|
|
def getfile(self, fname):
|
|
|
|
# In Mercurial >= 3.2, if fname is missing, data will be None and we
|
|
|
|
# should return None, None in that case. Earlier versions will raise
|
|
|
|
# an IOError which we let propagate up the stack.
|
|
|
|
f = super(svnbackend, self).getfile(fname)
|
|
|
|
if f is None:
|
|
|
|
return None, None
|
|
|
|
data, flags = f
|
|
|
|
if data is None:
|
|
|
|
return None, None
|
|
|
|
islink, isexec = flags
|
|
|
|
if islink:
|
|
|
|
data = 'link ' + data
|
|
|
|
return data, (islink, isexec)
|
2011-06-15 16:44:14 +04:00
|
|
|
|
|
|
|
def patchrepo(ui, meta, parentctx, patchfp):
|
2012-09-28 23:43:50 +04:00
|
|
|
store = patch.filestore(util.getfilestoresize(ui))
|
2011-06-15 16:44:14 +04:00
|
|
|
try:
|
|
|
|
touched = set()
|
|
|
|
backend = svnbackend(ui, meta.repo, parentctx, store)
|
2012-09-18 07:18:22 +04:00
|
|
|
|
|
|
|
try:
|
2015-03-28 02:42:50 +03:00
|
|
|
try:
|
|
|
|
ret = patch.patchbackend(ui, backend, patchfp, 0, files=touched)
|
|
|
|
except TypeError:
|
|
|
|
# Mercurial >= 3.4 have an extra prefix parameter
|
|
|
|
ret = patch.patchbackend(ui, backend, patchfp, 0, '',
|
|
|
|
files=touched)
|
2012-09-18 07:18:22 +04:00
|
|
|
if ret < 0:
|
|
|
|
raise BadPatchApply('patching failed')
|
|
|
|
if ret > 0:
|
|
|
|
raise BadPatchApply('patching succeeded with fuzz')
|
|
|
|
except patch.PatchError, e:
|
|
|
|
raise BadPatchApply(str(e))
|
|
|
|
|
2011-06-15 16:44:14 +04:00
|
|
|
files = {}
|
|
|
|
for f in touched:
|
|
|
|
try:
|
|
|
|
data, mode, copied = store.getfile(f)
|
|
|
|
files[f] = data
|
|
|
|
except IOError:
|
|
|
|
files[f] = None
|
|
|
|
return files
|
|
|
|
finally:
|
|
|
|
store.close()
|
|
|
|
|
2010-01-30 11:43:30 +03:00
|
|
|
def diff_branchrev(ui, svn, meta, branch, branchpath, r, parentctx):
|
2009-04-08 19:34:01 +04:00
|
|
|
"""Extract all 'branch' content at a given revision.
|
|
|
|
|
|
|
|
Return a tuple (files, filectxfn) where 'files' is the list of all files
|
|
|
|
in the branch at the given revision, and 'filectxfn' is a memctx compatible
|
|
|
|
callable to retrieve individual file information. Raise BadPatchApply upon
|
|
|
|
error.
|
|
|
|
"""
|
|
|
|
try:
|
2010-01-30 12:01:03 +03:00
|
|
|
prev, pbranch, ppath = meta.get_source_rev(ctx=parentctx)
|
|
|
|
except KeyError:
|
|
|
|
prev, pbranch, ppath = None, None, None
|
|
|
|
try:
|
|
|
|
if prev is None or pbranch == branch:
|
2009-04-08 19:34:01 +04:00
|
|
|
# letting patch handle binaries sounded
|
|
|
|
# cool, but it breaks patch in sad ways
|
2012-07-29 15:13:56 +04:00
|
|
|
d = svn.get_unified_diff(branchpath, r.revnum, other_rev=prev,
|
|
|
|
deleted=False, ignore_type=False)
|
2009-04-08 19:34:01 +04:00
|
|
|
else:
|
2010-01-30 11:43:30 +03:00
|
|
|
d = svn.get_unified_diff(branchpath, r.revnum,
|
2010-01-30 12:01:03 +03:00
|
|
|
other_path=ppath, other_rev=prev,
|
2009-04-08 19:34:01 +04:00
|
|
|
deleted=True, ignore_type=True)
|
|
|
|
if d:
|
|
|
|
raise BadPatchApply('branch creation with mods')
|
|
|
|
except svnwrap.SubversionRepoCanNotDiff:
|
|
|
|
raise BadPatchApply('subversion diffing code is not supported')
|
2010-03-31 19:51:09 +04:00
|
|
|
except svnwrap.SubversionException, e:
|
|
|
|
if len(e.args) > 1 and e.args[1] != svnwrap.ERR_FS_NOT_FOUND:
|
2009-04-08 19:34:01 +04:00
|
|
|
raise
|
|
|
|
raise BadPatchApply('previous revision does not exist')
|
|
|
|
if '\0' in d:
|
|
|
|
raise BadPatchApply('binary diffs are not supported')
|
|
|
|
files_data = {}
|
2012-04-19 16:59:50 +04:00
|
|
|
changed = parsediff(d)
|
2010-01-30 01:36:14 +03:00
|
|
|
# Here we ensure that all files, including the new empty ones
|
|
|
|
# are marked as touched. Content is loaded on demand.
|
2012-04-19 16:59:50 +04:00
|
|
|
touched_files = set(f.name for f in changed)
|
|
|
|
d2 = '\n'.join(f.diff for f in changed if f.diff)
|
|
|
|
if changed:
|
2011-06-15 16:44:14 +04:00
|
|
|
files_data = patchrepo(ui, meta, parentctx, cStringIO.StringIO(d2))
|
2009-04-08 19:34:01 +04:00
|
|
|
for x in files_data.iterkeys():
|
|
|
|
ui.note('M %s\n' % x)
|
|
|
|
else:
|
|
|
|
ui.status('Not using patch for %s, diff had no hunks.\n' %
|
|
|
|
r.revnum)
|
|
|
|
|
2010-01-30 01:36:14 +03:00
|
|
|
unknown_files = set()
|
2009-04-08 19:34:01 +04:00
|
|
|
for p in r.paths:
|
2010-01-30 01:36:14 +03:00
|
|
|
action = r.paths[p].action
|
2010-01-30 11:43:30 +03:00
|
|
|
if not p.startswith(branchpath) or action not in 'DR':
|
2010-01-30 01:36:14 +03:00
|
|
|
continue
|
2010-01-30 11:43:30 +03:00
|
|
|
if branchpath:
|
|
|
|
p2 = p[len(branchpath)+1:].strip('/')
|
2010-01-30 01:36:14 +03:00
|
|
|
else:
|
|
|
|
p2 = p
|
|
|
|
if p2 in parentctx:
|
|
|
|
toucheds = [p2]
|
|
|
|
else:
|
2009-04-08 19:34:01 +04:00
|
|
|
# If this isn't in the parent ctx, it must've been a dir
|
2010-01-30 01:36:14 +03:00
|
|
|
toucheds = [f for f in parentctx if f.startswith(p2 + '/')]
|
|
|
|
if action == 'R':
|
|
|
|
# Files were replaced, we don't know if they still exist
|
|
|
|
unknown_files.update(toucheds)
|
|
|
|
else:
|
|
|
|
files_data.update((f, None) for f in toucheds)
|
2009-04-08 19:34:01 +04:00
|
|
|
|
2010-01-30 01:36:14 +03:00
|
|
|
touched_files.update(files_data)
|
|
|
|
touched_files.update(unknown_files)
|
2009-04-08 19:34:01 +04:00
|
|
|
|
2012-04-19 17:08:04 +04:00
|
|
|
# As of svn 1.7, diff may contain a lot of property changes for
|
|
|
|
# directories. We do not what to include these in our touched
|
|
|
|
# files list so we try to filter them while minimizing the number
|
|
|
|
# of svn API calls.
|
|
|
|
property_files = set(f.name for f in changed if f.maybedir())
|
|
|
|
property_files.discard('.')
|
|
|
|
touched_files.discard('.')
|
|
|
|
branchprefix = (branchpath and branchpath + '/') or branchpath
|
|
|
|
for f in list(property_files):
|
|
|
|
if f in parentctx:
|
|
|
|
continue
|
|
|
|
# We can be smarter here by checking if f is a subcomponent
|
|
|
|
# of a know path in parentctx or touched_files. KISS for now.
|
|
|
|
kind = svn.checkpath(branchprefix + f, r.revnum)
|
|
|
|
if kind == 'd':
|
|
|
|
touched_files.discard(f)
|
|
|
|
|
2010-01-30 11:43:30 +03:00
|
|
|
copies = getcopies(svn, meta, branch, branchpath, r, touched_files,
|
2009-04-08 19:34:01 +04:00
|
|
|
parentctx)
|
|
|
|
|
2012-04-20 02:26:04 +04:00
|
|
|
# We note binary files because svn's diff format doesn't describe
|
|
|
|
# what changed, only that a change occurred. This means we'll have
|
|
|
|
# to pull them as fulltexts from the server outside the diff
|
|
|
|
# apply.
|
2012-04-19 16:59:50 +04:00
|
|
|
binary_files = set(f.name for f in changed if f.binary)
|
|
|
|
exec_files = dict((f.name, f.executable) for f in changed
|
|
|
|
if f.executable is not None)
|
|
|
|
link_files = dict((f.name, f.symlink) for f in changed
|
|
|
|
if f.symlink is not None)
|
2009-04-08 19:34:01 +04:00
|
|
|
def filectxfn(repo, memctx, path):
|
|
|
|
if path in files_data and files_data[path] is None:
|
2014-09-17 03:15:17 +04:00
|
|
|
return compathacks.filectxfn_deleted(memctx, path)
|
2009-04-08 19:34:01 +04:00
|
|
|
|
2010-01-30 01:36:14 +03:00
|
|
|
if path in binary_files or path in unknown_files:
|
2009-10-17 07:33:41 +04:00
|
|
|
pa = path
|
2010-01-30 11:43:30 +03:00
|
|
|
if branchpath:
|
|
|
|
pa = branchpath + '/' + path
|
2014-09-17 03:15:17 +04:00
|
|
|
try:
|
|
|
|
data, mode = svn.get_file(pa, r.revnum)
|
|
|
|
except IOError:
|
|
|
|
return compathacks.filectxfn_deleted_reraise(memctx)
|
2009-04-08 19:34:01 +04:00
|
|
|
isexe = 'x' in mode
|
|
|
|
islink = 'l' in mode
|
|
|
|
else:
|
|
|
|
isexe = exec_files.get(path, 'x' in parentctx.flags(path))
|
|
|
|
islink = link_files.get(path, 'l' in parentctx.flags(path))
|
|
|
|
data = ''
|
|
|
|
if path in files_data:
|
|
|
|
data = files_data[path]
|
|
|
|
if islink:
|
|
|
|
data = data[len('link '):]
|
|
|
|
elif path in parentctx:
|
|
|
|
data = parentctx[path].data()
|
|
|
|
|
|
|
|
copied = copies.get(path)
|
2009-10-17 07:33:41 +04:00
|
|
|
# TODO this branch feels like it should not be required,
|
|
|
|
# and this may actually imply a bug in getcopies
|
|
|
|
if copied not in parentctx.manifest():
|
|
|
|
copied = None
|
2014-06-03 04:09:35 +04:00
|
|
|
return compathacks.makememfilectx(repo,
|
|
|
|
path=path,
|
|
|
|
data=data,
|
|
|
|
islink=islink,
|
|
|
|
isexec=isexe,
|
|
|
|
copied=copied)
|
2009-04-08 19:34:01 +04:00
|
|
|
|
|
|
|
return list(touched_files), filectxfn
|
|
|
|
|
2010-03-02 19:06:06 +03:00
|
|
|
def makecopyfinder(meta, r, branchpath):
|
2009-04-08 19:34:01 +04:00
|
|
|
"""Return a function detecting copies.
|
|
|
|
|
|
|
|
Returned copyfinder(path) returns None if no copy information can
|
|
|
|
be found or ((source, sourcerev), sourcepath) where "sourcepath" is the
|
|
|
|
copy source path, "sourcerev" the source svn revision and "source" is the
|
|
|
|
copy record path causing the copy to occur. If a single file was copied
|
|
|
|
"sourcepath" and "source" are the same, while file copies dectected from
|
|
|
|
directory copies return the copied source directory in "source".
|
|
|
|
"""
|
2010-03-02 19:06:06 +03:00
|
|
|
# cache changeset contexts and map them to source svn revisions
|
|
|
|
ctxs = {}
|
|
|
|
def getctx(branch, svnrev):
|
|
|
|
if svnrev in ctxs:
|
|
|
|
return ctxs[svnrev]
|
|
|
|
changeid = meta.get_parent_revision(svnrev + 1, branch, True)
|
|
|
|
ctx = None
|
|
|
|
if changeid != revlog.nullid:
|
|
|
|
ctx = meta.repo.changectx(changeid)
|
|
|
|
ctxs[svnrev] = ctx
|
|
|
|
return ctx
|
|
|
|
|
2009-04-08 19:34:01 +04:00
|
|
|
# filter copy information for current branch
|
2009-10-08 02:27:35 +04:00
|
|
|
branchpath = (branchpath and branchpath + '/') or ''
|
2009-04-08 19:34:01 +04:00
|
|
|
copies = []
|
|
|
|
for path, e in r.paths.iteritems():
|
|
|
|
if not e.copyfrom_path:
|
|
|
|
continue
|
|
|
|
if not path.startswith(branchpath):
|
|
|
|
continue
|
2010-03-02 19:06:06 +03:00
|
|
|
# compute converted source path and revision
|
|
|
|
frompath, frombranch = meta.split_branch_path(e.copyfrom_path)[:2]
|
|
|
|
if frompath is None:
|
|
|
|
continue
|
|
|
|
fromctx = getctx(frombranch, e.copyfrom_rev)
|
|
|
|
if fromctx is None:
|
2009-04-08 19:34:01 +04:00
|
|
|
continue
|
2010-03-02 19:06:06 +03:00
|
|
|
destpath = path[len(branchpath):]
|
|
|
|
copies.append((destpath, (frompath, fromctx)))
|
2009-04-08 19:34:01 +04:00
|
|
|
|
|
|
|
copies.sort(reverse=True)
|
|
|
|
exactcopies = dict(copies)
|
|
|
|
|
|
|
|
def finder(path):
|
|
|
|
if path in exactcopies:
|
|
|
|
return exactcopies[path], exactcopies[path][0]
|
|
|
|
# look for parent directory copy, longest first
|
2010-03-02 19:06:06 +03:00
|
|
|
for dest, (source, sourcectx) in copies:
|
2009-04-08 19:34:01 +04:00
|
|
|
dest = dest + '/'
|
|
|
|
if not path.startswith(dest):
|
|
|
|
continue
|
|
|
|
sourcepath = source + '/' + path[len(dest):]
|
2010-03-02 19:06:06 +03:00
|
|
|
return (source, sourcectx), sourcepath
|
2009-04-08 19:34:01 +04:00
|
|
|
return None
|
|
|
|
|
|
|
|
return finder
|
|
|
|
|
2009-06-11 20:49:52 +04:00
|
|
|
def getcopies(svn, meta, branch, branchpath, r, files, parentctx):
|
2009-04-08 19:34:01 +04:00
|
|
|
"""Return a mapping {dest: source} for every file copied into r.
|
|
|
|
"""
|
|
|
|
if parentctx.node() == revlog.nullid:
|
|
|
|
return {}
|
|
|
|
|
|
|
|
# Extract svn copy information, group them by copy source.
|
|
|
|
# The idea is to duplicate the replay behaviour where copies are
|
|
|
|
# evaluated per copy event (one event for all files in a directory copy,
|
|
|
|
# one event for single file copy). We assume that copy events match
|
|
|
|
# copy sources in revision info.
|
|
|
|
svncopies = {}
|
2010-03-02 19:06:06 +03:00
|
|
|
finder = makecopyfinder(meta, r, branchpath)
|
2009-04-08 19:34:01 +04:00
|
|
|
for f in files:
|
|
|
|
copy = finder(f)
|
|
|
|
if copy:
|
|
|
|
svncopies.setdefault(copy[0], []).append((f, copy[1]))
|
|
|
|
if not svncopies:
|
|
|
|
return {}
|
|
|
|
|
|
|
|
# check svn copies really make sense in mercurial
|
|
|
|
hgcopies = {}
|
2010-03-02 19:06:06 +03:00
|
|
|
for (sourcepath, sourcectx), copies in svncopies.iteritems():
|
2009-10-17 03:09:25 +04:00
|
|
|
for k, v in copies:
|
2009-10-17 03:09:53 +04:00
|
|
|
if not util.issamefile(sourcectx, parentctx, v):
|
2009-10-17 03:09:25 +04:00
|
|
|
continue
|
|
|
|
hgcopies.update({k: v})
|
2009-04-08 19:34:01 +04:00
|
|
|
return hgcopies
|
|
|
|
|
2010-11-25 23:55:21 +03:00
|
|
|
def fetch_externals(ui, svn, branchpath, r, parentctx):
|
2009-04-08 19:34:01 +04:00
|
|
|
"""Extract svn:externals for the current revision and branch
|
|
|
|
|
|
|
|
Return an externalsfile instance or None if there are no externals
|
|
|
|
to convert and never were.
|
|
|
|
"""
|
2010-11-25 23:55:21 +03:00
|
|
|
externals = svnexternals.parse(ui, parentctx)
|
2009-04-08 19:34:01 +04:00
|
|
|
# Detect property additions only, changes are handled by checking
|
|
|
|
# existing entries individually. Projects are unlikely to store
|
|
|
|
# externals on many different root directories, so we trade code
|
|
|
|
# duplication and complexity for a constant lookup price at every
|
|
|
|
# revision in the common case.
|
|
|
|
dirs = set(externals)
|
|
|
|
if parentctx.node() == revlog.nullid:
|
2011-10-12 11:35:25 +04:00
|
|
|
dirs.update([p for p, k in svn.list_files(branchpath, r.revnum) if k == 'd'])
|
2009-04-08 19:34:01 +04:00
|
|
|
dirs.add('')
|
|
|
|
else:
|
2009-10-17 07:33:41 +04:00
|
|
|
branchprefix = (branchpath and branchpath + '/') or branchpath
|
2009-04-08 19:34:01 +04:00
|
|
|
for path, e in r.paths.iteritems():
|
|
|
|
if e.action == 'D':
|
|
|
|
continue
|
|
|
|
if not path.startswith(branchprefix) and path != branchpath:
|
|
|
|
continue
|
|
|
|
kind = svn.checkpath(path, r.revnum)
|
|
|
|
if kind != 'd':
|
|
|
|
continue
|
|
|
|
path = path[len(branchprefix):]
|
|
|
|
dirs.add(path)
|
|
|
|
if e.action == 'M' or (e.action == 'A' and e.copyfrom_path):
|
|
|
|
# Do not recurse in copied directories, changes are marked
|
|
|
|
# as 'M', except for the copied one.
|
|
|
|
continue
|
|
|
|
for child, k in svn.list_files(branchprefix + path, r.revnum):
|
|
|
|
if k == 'd':
|
|
|
|
dirs.add((path + '/' + child).strip('/'))
|
|
|
|
|
|
|
|
# Retrieve new or updated values
|
|
|
|
for dir in dirs:
|
|
|
|
try:
|
2009-10-17 07:33:41 +04:00
|
|
|
dpath = (branchpath and branchpath + '/' + dir) or dir
|
|
|
|
values = svn.list_props(dpath, r.revnum)
|
2009-04-08 19:34:01 +04:00
|
|
|
externals[dir] = values.get('svn:externals', '')
|
|
|
|
except IOError:
|
|
|
|
externals[dir] = ''
|
|
|
|
return externals
|
|
|
|
|
|
|
|
|
2009-06-11 20:49:52 +04:00
|
|
|
def fetch_branchrev(svn, meta, branch, branchpath, r, parentctx):
|
2009-04-08 19:34:01 +04:00
|
|
|
"""Extract all 'branch' content at a given revision.
|
|
|
|
|
|
|
|
Return a tuple (files, filectxfn) where 'files' is the list of all files
|
|
|
|
in the branch at the given revision, and 'filectxfn' is a memctx compatible
|
|
|
|
callable to retrieve individual file information.
|
|
|
|
"""
|
|
|
|
files = []
|
|
|
|
if parentctx.node() == revlog.nullid:
|
|
|
|
# Initial revision, fetch all files
|
|
|
|
for path, kind in svn.list_files(branchpath, r.revnum):
|
|
|
|
if kind == 'f':
|
|
|
|
files.append(path)
|
|
|
|
else:
|
2009-10-17 07:33:41 +04:00
|
|
|
branchprefix = (branchpath and branchpath + '/') or ''
|
2009-04-08 19:34:01 +04:00
|
|
|
for path, e in r.paths.iteritems():
|
2012-10-16 23:17:55 +04:00
|
|
|
if path == branchpath:
|
|
|
|
if e.action != 'R' or branch not in meta.branches:
|
|
|
|
# Full-branch replacements are handled as reverts,
|
|
|
|
# skip everything else.
|
|
|
|
continue
|
|
|
|
elif not path.startswith(branchprefix):
|
2009-04-08 19:34:01 +04:00
|
|
|
continue
|
2009-06-11 20:49:06 +04:00
|
|
|
if not meta.is_path_valid(path):
|
2009-04-08 19:34:01 +04:00
|
|
|
continue
|
|
|
|
kind = svn.checkpath(path, r.revnum)
|
|
|
|
path = path[len(branchprefix):]
|
|
|
|
if kind == 'f':
|
|
|
|
files.append(path)
|
|
|
|
elif kind == 'd':
|
|
|
|
if e.action == 'M':
|
|
|
|
continue
|
|
|
|
dirpath = branchprefix + path
|
|
|
|
for child, k in svn.list_files(dirpath, r.revnum):
|
|
|
|
if k == 'f':
|
2010-08-11 21:57:34 +04:00
|
|
|
if path:
|
|
|
|
childpath = '%s/%s' % (path, child)
|
|
|
|
else:
|
|
|
|
childpath = child
|
|
|
|
files.append(childpath)
|
2010-03-02 19:06:06 +03:00
|
|
|
if e.action == 'R':
|
|
|
|
# Check all files in replaced directory
|
|
|
|
path = path + '/'
|
|
|
|
files += [f for f in parentctx if f.startswith(path)]
|
2009-04-08 19:34:01 +04:00
|
|
|
else:
|
|
|
|
if path in parentctx:
|
|
|
|
files.append(path)
|
|
|
|
continue
|
|
|
|
# Assume it's a deleted directory
|
|
|
|
path = path + '/'
|
|
|
|
deleted = [f for f in parentctx if f.startswith(path)]
|
|
|
|
files += deleted
|
|
|
|
|
2009-06-11 20:49:52 +04:00
|
|
|
copies = getcopies(svn, meta, branch, branchpath, r, files, parentctx)
|
2009-04-08 19:34:01 +04:00
|
|
|
|
|
|
|
def filectxfn(repo, memctx, path):
|
2009-10-17 07:33:41 +04:00
|
|
|
svnpath = path
|
|
|
|
if branchpath:
|
|
|
|
svnpath = branchpath + '/' + path
|
2014-09-17 03:16:26 +04:00
|
|
|
try:
|
|
|
|
data, mode = svn.get_file(svnpath, r.revnum)
|
|
|
|
except IOError:
|
|
|
|
return compathacks.filectxfn_deleted_reraise(memctx)
|
2009-04-08 19:34:01 +04:00
|
|
|
isexec = 'x' in mode
|
|
|
|
islink = 'l' in mode
|
|
|
|
copied = copies.get(path)
|
2010-01-30 11:55:28 +03:00
|
|
|
# TODO this branch feels like it should not be required,
|
|
|
|
# and this may actually imply a bug in getcopies
|
|
|
|
if copied not in parentctx.manifest():
|
|
|
|
copied = None
|
2014-06-03 04:09:35 +04:00
|
|
|
return compathacks.makememfilectx(repo,
|
|
|
|
path=path,
|
|
|
|
data=data,
|
|
|
|
islink=islink,
|
|
|
|
isexec=isexec,
|
|
|
|
copied=copied)
|
2009-04-08 19:34:01 +04:00
|
|
|
|
|
|
|
return files, filectxfn
|
|
|
|
|
2009-06-11 20:49:52 +04:00
|
|
|
def checkbranch(meta, r, branch):
|
|
|
|
branchedits = meta.revmap.branchedits(branch, r)
|
2009-05-29 15:12:28 +04:00
|
|
|
if not branchedits:
|
|
|
|
return None
|
|
|
|
branchtip = branchedits[0][1]
|
2009-06-11 20:49:52 +04:00
|
|
|
for child in meta.repo[branchtip].children():
|
2009-05-29 17:24:12 +04:00
|
|
|
b = child.branch() != 'default' and child.branch() or None
|
|
|
|
if b == branch and child.extra().get('close'):
|
2009-05-29 15:12:28 +04:00
|
|
|
return None
|
|
|
|
return branchtip
|
|
|
|
|
2013-08-08 00:42:11 +04:00
|
|
|
def branches_in_paths(meta, tbdelta, paths, revnum, checkpath, listdir,
|
|
|
|
firstrun):
|
2009-05-30 20:11:04 +04:00
|
|
|
'''Given a list of paths, return mapping of all branches touched
|
|
|
|
to their branch path.
|
|
|
|
'''
|
|
|
|
branches = {}
|
2013-08-08 00:42:11 +04:00
|
|
|
if firstrun:
|
|
|
|
paths_need_discovery = [p for (p, t) in listdir('', revnum)
|
|
|
|
if t == 'f']
|
|
|
|
else:
|
|
|
|
paths_need_discovery = []
|
|
|
|
|
2009-05-30 20:11:04 +04:00
|
|
|
for p in paths:
|
2010-02-02 23:18:20 +03:00
|
|
|
relpath, branch, branchpath = meta.split_branch_path(p)
|
2009-05-30 20:11:04 +04:00
|
|
|
if relpath is not None:
|
|
|
|
branches[branch] = branchpath
|
2010-01-16 19:25:09 +03:00
|
|
|
elif paths[p].action == 'D' and not meta.get_path_tag(p):
|
2009-06-11 20:49:06 +04:00
|
|
|
ln = meta.localname(p)
|
2009-05-30 20:11:04 +04:00
|
|
|
# must check in branches_to_delete as well, because this runs after we
|
|
|
|
# already updated the branch map
|
2009-06-11 20:49:52 +04:00
|
|
|
if ln in meta.branches or ln in tbdelta['branches'][1]:
|
2009-05-30 20:11:04 +04:00
|
|
|
branches[ln] = p
|
|
|
|
else:
|
|
|
|
paths_need_discovery.append(p)
|
|
|
|
|
|
|
|
if not paths_need_discovery:
|
|
|
|
return branches
|
|
|
|
|
|
|
|
actually_files = []
|
|
|
|
while paths_need_discovery:
|
|
|
|
p = paths_need_discovery.pop(0)
|
2013-09-04 22:39:58 +04:00
|
|
|
if checkpath(p, revnum) == 'f':
|
|
|
|
actually_files.append(p)
|
|
|
|
# if there's a copyfrom_path and there were files inside that copyfrom,
|
|
|
|
# we need to detect those branches. It's a little thorny and slow, but
|
|
|
|
# seems to be the best option.
|
|
|
|
elif paths[p].copyfrom_path and not meta.get_path_tag(p):
|
|
|
|
paths_need_discovery.extend(['%s/%s' % (p, x[0])
|
|
|
|
for x in listdir(p, revnum)
|
|
|
|
if x[1] == 'f'])
|
2009-05-30 20:11:04 +04:00
|
|
|
|
2013-09-04 22:47:57 +04:00
|
|
|
for path in actually_files:
|
|
|
|
if meta.get_path_tag(path):
|
2009-05-30 20:11:04 +04:00
|
|
|
continue
|
2013-09-04 22:47:57 +04:00
|
|
|
fpath, branch, bpath = meta.split_branch_path(path, existing=False)
|
|
|
|
if bpath is None:
|
|
|
|
continue
|
|
|
|
branches[branch] = bpath
|
2013-06-26 06:22:12 +04:00
|
|
|
|
2009-05-30 20:11:04 +04:00
|
|
|
return branches
|
|
|
|
|
2011-02-13 22:10:52 +03:00
|
|
|
def convert_rev(ui, meta, svn, r, tbdelta, firstrun):
|
2015-12-31 20:06:58 +03:00
|
|
|
if svnwrap.subversion_version >= (1, 9, 0):
|
|
|
|
raise hgutil.Abort(
|
|
|
|
"hgsubversion doesn't support stupid mode with Subversion 1.9."
|
|
|
|
' Please email hgsubversion@googlegroups.com and let us know you'
|
|
|
|
' saw this, otherwise we may remove stupid mode entirely.')
|
2009-04-08 19:34:01 +04:00
|
|
|
# this server fails at replay
|
2009-05-29 19:33:52 +04:00
|
|
|
|
2011-10-01 02:01:35 +04:00
|
|
|
if meta.filemap:
|
|
|
|
raise hgutil.Abort('filemaps currently unsupported with stupid replay.')
|
|
|
|
|
2009-06-11 20:49:52 +04:00
|
|
|
branches = branches_in_paths(meta, tbdelta, r.paths, r.revnum,
|
2013-08-08 00:42:11 +04:00
|
|
|
svn.checkpath, svn.list_files, firstrun)
|
2009-04-08 19:34:01 +04:00
|
|
|
brpaths = branches.values()
|
|
|
|
bad_branch_paths = {}
|
|
|
|
for br, bp in branches.iteritems():
|
|
|
|
bad_branch_paths[br] = []
|
|
|
|
|
2009-09-16 05:22:47 +04:00
|
|
|
# This next block might be needed, but for now I'm omitting it until it
|
|
|
|
# can be proven necessary.
|
2009-04-08 19:34:01 +04:00
|
|
|
# for bad in brpaths:
|
|
|
|
# if bad.startswith(bp) and len(bad) > len(bp):
|
|
|
|
# bad_branch_paths[br].append(bad[len(bp)+1:])
|
|
|
|
|
2009-09-16 05:22:47 +04:00
|
|
|
# We've go a branch that contains other branches. We have to be careful
|
|
|
|
# to get results similar to real replay in this case.
|
2009-06-11 20:49:52 +04:00
|
|
|
for existingbr in meta.branches:
|
2009-06-11 20:49:06 +04:00
|
|
|
bad = meta.remotename(existingbr)
|
2009-04-08 19:34:01 +04:00
|
|
|
if bad.startswith(bp) and len(bad) > len(bp):
|
|
|
|
bad_branch_paths[br].append(bad[len(bp)+1:])
|
2009-05-29 15:12:28 +04:00
|
|
|
|
|
|
|
deleted_branches = {}
|
2009-04-08 19:34:01 +04:00
|
|
|
for p in r.paths:
|
2010-02-06 19:36:21 +03:00
|
|
|
tag = meta.get_path_tag(p)
|
|
|
|
if tag and tag not in meta.tags:
|
2009-04-08 19:34:01 +04:00
|
|
|
continue
|
2009-06-11 20:49:06 +04:00
|
|
|
branch = meta.localname(p)
|
2009-06-11 20:49:52 +04:00
|
|
|
if not (r.paths[p].action == 'R' and branch in meta.branches):
|
2009-05-29 15:12:28 +04:00
|
|
|
continue
|
2012-10-16 23:17:55 +04:00
|
|
|
# Check the branch is not being replaced by one of its
|
|
|
|
# ancestors, it happens a lot with project-wide reverts.
|
|
|
|
frompath = r.paths[p].copyfrom_path
|
|
|
|
frompath, frombranch = meta.split_branch_path(
|
|
|
|
frompath, existing=False)[:2]
|
|
|
|
if frompath == '':
|
|
|
|
fromnode = meta.get_parent_revision(
|
|
|
|
r.paths[p].copyfrom_rev + 1, frombranch, exact=True)
|
|
|
|
if fromnode != node.nullid:
|
|
|
|
fromctx = meta.repo[fromnode]
|
|
|
|
pctx = meta.repo[meta.get_parent_revision(
|
|
|
|
r.revnum, branch, exact=True)]
|
|
|
|
if util.isancestor(pctx, fromctx):
|
|
|
|
continue
|
2013-06-05 22:00:06 +04:00
|
|
|
closed = checkbranch(meta, r, branch)
|
|
|
|
if closed is not None:
|
|
|
|
deleted_branches[branch] = closed
|
2009-04-08 19:34:01 +04:00
|
|
|
|
2009-06-11 20:49:52 +04:00
|
|
|
date = meta.fixdate(r.date)
|
2009-07-19 05:44:33 +04:00
|
|
|
check_deleted_branches = set(tbdelta['branches'][1])
|
2009-04-08 19:34:01 +04:00
|
|
|
for b in branches:
|
2014-03-24 20:20:57 +04:00
|
|
|
|
|
|
|
if meta.skipbranch(b):
|
|
|
|
continue
|
|
|
|
|
2009-06-11 20:49:52 +04:00
|
|
|
parentctx = meta.repo[meta.get_parent_revision(r.revnum, b)]
|
2010-01-30 12:01:03 +03:00
|
|
|
tag = meta.get_path_tag(meta.remotename(b))
|
2009-04-08 19:34:01 +04:00
|
|
|
kind = svn.checkpath(branches[b], r.revnum)
|
|
|
|
if kind != 'd':
|
2010-01-30 12:01:03 +03:00
|
|
|
if not tag:
|
|
|
|
# Branch does not exist at this revision. Get parent
|
|
|
|
# revision and remove everything.
|
|
|
|
deleted_branches[b] = parentctx.node()
|
2009-04-08 19:34:01 +04:00
|
|
|
continue
|
|
|
|
|
2012-04-19 17:08:07 +04:00
|
|
|
# The nullrev check might not be necessary in theory but svn <
|
|
|
|
# 1.7 failed to diff branch creation so the diff_branchrev()
|
|
|
|
# path does not support this case with svn >= 1.7. We can fix
|
|
|
|
# it, or we can force the existing fetch_branchrev() path. Do
|
|
|
|
# the latter for now.
|
2014-03-24 20:20:45 +04:00
|
|
|
incremental = (meta.firstpulled > 0 and
|
2013-08-08 00:42:11 +04:00
|
|
|
parentctx.rev() != node.nullrev and
|
|
|
|
not firstrun)
|
2010-07-20 13:55:07 +04:00
|
|
|
|
|
|
|
if incremental:
|
|
|
|
try:
|
|
|
|
files_touched, filectxfn2 = diff_branchrev(
|
|
|
|
ui, svn, meta, b, branches[b], r, parentctx)
|
|
|
|
except BadPatchApply, e:
|
|
|
|
# Either this revision or the previous one does not exist.
|
|
|
|
ui.note("Fetching entire revision: %s.\n" % e.args[0])
|
|
|
|
incremental = False
|
|
|
|
if not incremental:
|
2009-06-10 12:15:57 +04:00
|
|
|
files_touched, filectxfn2 = fetch_branchrev(
|
2009-06-11 20:49:52 +04:00
|
|
|
svn, meta, b, branches[b], r, parentctx)
|
2009-06-10 12:15:57 +04:00
|
|
|
|
2010-11-19 19:07:13 +03:00
|
|
|
externals = {}
|
2009-10-17 07:33:41 +04:00
|
|
|
if meta.layout != 'single':
|
2010-11-25 23:55:21 +03:00
|
|
|
externals = fetch_externals(ui, svn, branches[b], r, parentctx)
|
2010-11-19 19:07:13 +03:00
|
|
|
externals = svnexternals.getchanges(ui, meta.repo, parentctx,
|
|
|
|
externals)
|
|
|
|
files_touched.extend(externals)
|
2009-06-10 12:15:57 +04:00
|
|
|
|
|
|
|
def filectxfn(repo, memctx, path):
|
2010-11-19 19:07:13 +03:00
|
|
|
if path in externals:
|
|
|
|
if externals[path] is None:
|
2010-02-14 23:04:56 +03:00
|
|
|
raise IOError(errno.ENOENT, 'no externals')
|
2014-06-03 04:09:35 +04:00
|
|
|
return compathacks.makememfilectx(repo,
|
|
|
|
path=path,
|
|
|
|
data=externals[path],
|
|
|
|
islink=False,
|
|
|
|
isexec=False,
|
|
|
|
copied=None)
|
2009-06-10 12:15:57 +04:00
|
|
|
for bad in bad_branch_paths[b]:
|
|
|
|
if path.startswith(bad):
|
2010-02-14 23:04:56 +03:00
|
|
|
raise IOError(errno.ENOENT, 'Path %s is bad' % path)
|
2009-06-10 12:15:57 +04:00
|
|
|
return filectxfn2(repo, memctx, path)
|
|
|
|
|
2009-04-08 19:34:01 +04:00
|
|
|
if '' in files_touched:
|
|
|
|
files_touched.remove('')
|
2009-06-11 20:49:52 +04:00
|
|
|
excluded = [f for f in files_touched if f not in meta.filemap]
|
2009-04-08 19:34:01 +04:00
|
|
|
for f in excluded:
|
|
|
|
files_touched.remove(f)
|
2009-06-10 12:15:57 +04:00
|
|
|
|
2010-01-30 12:01:03 +03:00
|
|
|
if b:
|
|
|
|
# Regular tag without modifications, it will be committed by
|
|
|
|
# svnmeta.committag(), we can skip the whole branch for now
|
|
|
|
if (tag and tag not in meta.tags and
|
|
|
|
b not in meta.branches
|
2014-02-03 10:55:56 +04:00
|
|
|
and b not in compathacks.branchset(meta.repo)
|
2010-02-06 19:36:21 +03:00
|
|
|
and not files_touched):
|
2010-01-30 12:01:03 +03:00
|
|
|
continue
|
|
|
|
|
2009-06-10 12:15:57 +04:00
|
|
|
if parentctx.node() == node.nullid and not files_touched:
|
2009-10-08 03:05:01 +04:00
|
|
|
meta.repo.ui.debug('skipping commit since parent is null and no files touched.\n')
|
2009-06-10 12:15:57 +04:00
|
|
|
continue
|
|
|
|
|
|
|
|
for f in files_touched:
|
|
|
|
if f:
|
|
|
|
# this is a case that really shouldn't ever happen, it means
|
|
|
|
# something is very wrong
|
|
|
|
assert f[0] != '/'
|
|
|
|
|
2009-06-12 10:37:39 +04:00
|
|
|
extra = meta.genextra(r.revnum, b)
|
2009-06-24 06:38:27 +04:00
|
|
|
if tag:
|
|
|
|
if parentctx.node() == node.nullid:
|
|
|
|
continue
|
|
|
|
extra.update({'branch': parentctx.extra().get('branch', None),
|
|
|
|
'close': 1})
|
|
|
|
|
2010-07-13 15:18:11 +04:00
|
|
|
origbranch = extra.get('branch', None)
|
2010-03-02 00:10:18 +03:00
|
|
|
meta.mapbranch(extra)
|
2009-06-11 20:49:52 +04:00
|
|
|
current_ctx = context.memctx(meta.repo,
|
2009-06-10 12:15:57 +04:00
|
|
|
[parentctx.node(), revlog.nullid],
|
2014-03-24 20:20:52 +04:00
|
|
|
meta.getmessage(r),
|
2009-06-10 12:15:57 +04:00
|
|
|
files_touched,
|
|
|
|
filectxfn,
|
2009-06-11 20:49:52 +04:00
|
|
|
meta.authors[r.author],
|
2009-06-10 12:15:57 +04:00
|
|
|
date,
|
|
|
|
extra)
|
2011-10-11 07:03:13 +04:00
|
|
|
ha = meta.repo.svn_commitctx(current_ctx)
|
2009-06-10 12:15:57 +04:00
|
|
|
|
2009-06-24 06:38:27 +04:00
|
|
|
if not tag:
|
2010-07-13 15:18:11 +04:00
|
|
|
if (not origbranch in meta.branches
|
|
|
|
and not meta.get_path_tag(meta.remotename(origbranch))):
|
|
|
|
meta.branches[origbranch] = None, 0, r.revnum
|
2009-06-24 06:38:27 +04:00
|
|
|
meta.revmap[r.revnum, b] = ha
|
2009-06-26 23:53:58 +04:00
|
|
|
else:
|
2010-07-11 13:46:19 +04:00
|
|
|
meta.movetag(tag, ha, r, date)
|
2010-02-06 19:36:21 +03:00
|
|
|
meta.addedtags.pop(tag, None)
|
2009-06-26 23:53:58 +04:00
|
|
|
util.describe_commit(ui, ha, b)
|
2009-05-29 15:12:28 +04:00
|
|
|
|
|
|
|
# These are branches with an 'R' status in svn log. This means they were
|
2009-04-08 19:34:01 +04:00
|
|
|
# replaced by some other branch, so we need to verify they get marked as closed.
|
|
|
|
for branch in check_deleted_branches:
|
2009-06-11 20:49:52 +04:00
|
|
|
closed = checkbranch(meta, r, branch)
|
2009-05-29 15:12:28 +04:00
|
|
|
if closed is not None:
|
|
|
|
deleted_branches[branch] = closed
|
|
|
|
|
2009-06-15 18:10:24 +04:00
|
|
|
return deleted_branches
|