2009-06-16 10:43:12 +04:00
|
|
|
import traceback
|
|
|
|
|
|
|
|
from mercurial import revlog
|
|
|
|
from mercurial import node
|
|
|
|
from mercurial import context
|
|
|
|
from mercurial import util as hgutil
|
|
|
|
|
|
|
|
import svnexternals
|
|
|
|
import util
|
|
|
|
|
|
|
|
|
|
|
|
class MissingPlainTextError(Exception):
|
|
|
|
"""Exception raised when the repo lacks a source file required for replaying
|
|
|
|
a txdelta.
|
|
|
|
"""
|
|
|
|
|
|
|
|
class ReplayException(Exception):
|
|
|
|
"""Exception raised when you try and commit but the replay encountered an
|
|
|
|
exception.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def convert_rev(ui, meta, svn, r, tbdelta):
|
2009-06-16 11:12:04 +04:00
|
|
|
|
|
|
|
editor = meta.editor
|
|
|
|
editor.current.clear()
|
|
|
|
editor.current.rev = r
|
|
|
|
svn.get_replay(r.revnum, editor)
|
|
|
|
current = editor.current
|
|
|
|
current.findmissing(svn)
|
|
|
|
|
|
|
|
# update externals
|
|
|
|
|
|
|
|
if current.externals:
|
|
|
|
|
|
|
|
# accumulate externals records for all branches
|
|
|
|
revnum = current.rev.revnum
|
|
|
|
branches = {}
|
|
|
|
for path, entry in current.externals.iteritems():
|
|
|
|
|
|
|
|
if not meta.is_path_valid(path):
|
|
|
|
ui.warn('WARNING: Invalid path %s in externals\n' % path)
|
|
|
|
continue
|
|
|
|
|
|
|
|
p, b, bp = meta.split_branch_path(path)
|
|
|
|
if bp not in branches:
|
|
|
|
external = svnexternals.externalsfile()
|
|
|
|
parent = meta.get_parent_revision(revnum, b)
|
|
|
|
pctx = meta.repo[parent]
|
|
|
|
if '.hgsvnexternals' in pctx:
|
|
|
|
external.read(pctx['.hgsvnexternals'].data())
|
|
|
|
branches[bp] = external
|
|
|
|
else:
|
|
|
|
external = branches[bp]
|
|
|
|
|
|
|
|
external[p] = entry
|
|
|
|
|
|
|
|
# register externals file changes
|
|
|
|
for bp, external in branches.iteritems():
|
|
|
|
path = bp + '/.hgsvnexternals'
|
|
|
|
if external:
|
|
|
|
current.set(path, external.write(), False, False)
|
|
|
|
else:
|
|
|
|
current.delete(path)
|
2009-06-16 10:43:12 +04:00
|
|
|
|
|
|
|
if current.exception is not None: #pragma: no cover
|
|
|
|
traceback.print_exception(*current.exception)
|
|
|
|
raise ReplayException()
|
|
|
|
if current.missing:
|
|
|
|
raise MissingPlainTextError()
|
|
|
|
|
|
|
|
# paranoidly generate the list of files to commit
|
|
|
|
files_to_commit = set(current.files.keys())
|
|
|
|
files_to_commit.update(current.symlinks.keys())
|
|
|
|
files_to_commit.update(current.execfiles.keys())
|
|
|
|
files_to_commit.update(current.deleted.keys())
|
|
|
|
# back to a list and sort so we get sane behavior
|
|
|
|
files_to_commit = list(files_to_commit)
|
|
|
|
files_to_commit.sort()
|
|
|
|
branch_batches = {}
|
|
|
|
rev = current.rev
|
|
|
|
date = meta.fixdate(rev.date)
|
|
|
|
|
|
|
|
# build up the branches that have files on them
|
|
|
|
for f in files_to_commit:
|
|
|
|
if not meta.is_path_valid(f):
|
|
|
|
continue
|
|
|
|
p, b = meta.split_branch_path(f)[:2]
|
|
|
|
if b not in branch_batches:
|
|
|
|
branch_batches[b] = []
|
|
|
|
branch_batches[b].append((p, f))
|
|
|
|
|
|
|
|
closebranches = {}
|
|
|
|
for branch in tbdelta['branches'][1]:
|
|
|
|
branchedits = meta.revmap.branchedits(branch, rev)
|
|
|
|
if len(branchedits) < 1:
|
|
|
|
# can't close a branch that never existed
|
|
|
|
continue
|
|
|
|
ha = branchedits[0][1]
|
|
|
|
closebranches[branch] = ha
|
|
|
|
|
|
|
|
# 1. handle normal commits
|
|
|
|
closedrevs = closebranches.values()
|
|
|
|
for branch, files in branch_batches.iteritems():
|
|
|
|
|
|
|
|
if branch in current.emptybranches and files:
|
|
|
|
del current.emptybranches[branch]
|
|
|
|
|
|
|
|
files = dict(files)
|
|
|
|
parents = meta.get_parent_revision(rev.revnum, branch), revlog.nullid
|
|
|
|
if parents[0] in closedrevs and branch in meta.closebranches:
|
|
|
|
continue
|
|
|
|
|
|
|
|
extra = meta.genextra(rev.revnum, branch)
|
2009-06-24 06:38:27 +04:00
|
|
|
tag = False
|
2009-06-16 10:43:12 +04:00
|
|
|
if branch is not None:
|
2009-06-24 06:38:27 +04:00
|
|
|
tag = meta.is_path_tag(meta.remotename(branch))
|
|
|
|
if (not (tag and tag in meta.tags) and
|
|
|
|
(branch not in meta.branches
|
|
|
|
and branch not in meta.repo.branchtags())):
|
2009-06-16 10:43:12 +04:00
|
|
|
continue
|
|
|
|
|
2009-06-24 06:38:27 +04:00
|
|
|
parentctx = meta.repo.changectx(parents[0])
|
|
|
|
if tag:
|
|
|
|
if parentctx.node() == node.nullid:
|
|
|
|
continue
|
|
|
|
extra.update({'branch': parentctx.extra().get('branch', None),
|
|
|
|
'close': 1})
|
|
|
|
|
|
|
|
if '.hgsvnexternals' not in parentctx and '.hgsvnexternals' in files:
|
2009-06-16 10:43:12 +04:00
|
|
|
# Do not register empty externals files
|
|
|
|
if (files['.hgsvnexternals'] in current.files
|
|
|
|
and not current.files[files['.hgsvnexternals']]):
|
|
|
|
del files['.hgsvnexternals']
|
|
|
|
|
|
|
|
def filectxfn(repo, memctx, path):
|
|
|
|
current_file = files[path]
|
|
|
|
if current_file in current.deleted:
|
|
|
|
raise IOError()
|
|
|
|
copied = current.copies.get(current_file)
|
2009-06-24 06:38:27 +04:00
|
|
|
flags = parentctx.flags(path)
|
2009-06-16 10:43:12 +04:00
|
|
|
is_exec = current.execfiles.get(current_file, 'x' in flags)
|
|
|
|
is_link = current.symlinks.get(current_file, 'l' in flags)
|
|
|
|
if current_file in current.files:
|
|
|
|
data = current.files[current_file]
|
|
|
|
if is_link and data.startswith('link '):
|
|
|
|
data = data[len('link '):]
|
|
|
|
elif is_link:
|
2009-06-16 11:12:04 +04:00
|
|
|
ui.warn('file marked as link, but contains data: '
|
|
|
|
'%s (%r)\n' % (current_file, flags))
|
2009-06-16 10:43:12 +04:00
|
|
|
else:
|
2009-06-24 06:38:27 +04:00
|
|
|
data = parentctx.filectx(path).data()
|
2009-06-16 10:43:12 +04:00
|
|
|
return context.memfilectx(path=path,
|
|
|
|
data=data,
|
|
|
|
islink=is_link, isexec=is_exec,
|
|
|
|
copied=copied)
|
|
|
|
|
2009-06-24 06:38:27 +04:00
|
|
|
if not meta.usebranchnames or extra.get('branch', None) == 'default':
|
2009-06-16 10:43:12 +04:00
|
|
|
extra.pop('branch', None)
|
|
|
|
current_ctx = context.memctx(meta.repo,
|
|
|
|
parents,
|
|
|
|
rev.message or '...',
|
|
|
|
files.keys(),
|
|
|
|
filectxfn,
|
|
|
|
meta.authors[rev.author],
|
|
|
|
date,
|
|
|
|
extra)
|
|
|
|
|
|
|
|
new_hash = meta.repo.commitctx(current_ctx)
|
2009-06-16 11:12:04 +04:00
|
|
|
util.describe_commit(ui, new_hash, branch)
|
2009-06-24 06:38:27 +04:00
|
|
|
if (rev.revnum, branch) not in meta.revmap and not tag:
|
2009-06-16 10:43:12 +04:00
|
|
|
meta.revmap[rev.revnum, branch] = new_hash
|
2009-06-24 06:38:27 +04:00
|
|
|
if tag:
|
|
|
|
meta.movetag(tag, new_hash, parentctx.extra().get('branch', None), rev, date)
|
2009-06-16 10:43:12 +04:00
|
|
|
|
|
|
|
# 2. handle branches that need to be committed without any files
|
|
|
|
for branch in current.emptybranches:
|
|
|
|
|
|
|
|
ha = meta.get_parent_revision(rev.revnum, branch)
|
|
|
|
if ha == node.nullid:
|
|
|
|
continue
|
|
|
|
|
|
|
|
parent_ctx = meta.repo.changectx(ha)
|
|
|
|
def del_all_files(*args):
|
|
|
|
raise IOError
|
|
|
|
|
|
|
|
# True here meant nuke all files, shouldn't happen with branch closing
|
|
|
|
if current.emptybranches[branch]: #pragma: no cover
|
|
|
|
raise hgutil.Abort('Empty commit to an open branch attempted. '
|
|
|
|
'Please report this issue.')
|
|
|
|
|
|
|
|
extra = meta.genextra(rev.revnum, branch)
|
2009-06-24 06:38:27 +04:00
|
|
|
|
2009-06-16 10:43:12 +04:00
|
|
|
if not meta.usebranchnames:
|
|
|
|
extra.pop('branch', None)
|
|
|
|
|
|
|
|
current_ctx = context.memctx(meta.repo,
|
|
|
|
(ha, node.nullid),
|
|
|
|
rev.message or ' ',
|
|
|
|
[],
|
|
|
|
del_all_files,
|
|
|
|
meta.authors[rev.author],
|
|
|
|
date,
|
|
|
|
extra)
|
|
|
|
new_hash = meta.repo.commitctx(current_ctx)
|
2009-06-16 11:12:04 +04:00
|
|
|
util.describe_commit(ui, new_hash, branch)
|
2009-06-16 10:43:12 +04:00
|
|
|
if (rev.revnum, branch) not in meta.revmap:
|
|
|
|
meta.revmap[rev.revnum, branch] = new_hash
|
|
|
|
|
|
|
|
return closebranches
|