2008-09-30 20:42:52 +04:00
|
|
|
from mercurial import util as merc_util
|
|
|
|
from mercurial import hg
|
|
|
|
from svn import core
|
|
|
|
|
|
|
|
import util
|
|
|
|
import hg_delta_editor
|
|
|
|
import svnwrap
|
|
|
|
import fetch_command
|
|
|
|
import utility_commands
|
|
|
|
|
|
|
|
|
|
|
|
@util.register_subcommand('push')
|
|
|
|
@util.register_subcommand('dcommit') # for git expats
|
|
|
|
def push_revisions_to_subversion(ui, repo, hg_repo_path, svn_url, **opts):
|
|
|
|
"""Push revisions starting at a specified head back to Subversion.
|
|
|
|
"""
|
2008-10-28 19:39:36 +03:00
|
|
|
oldencoding = merc_util._encoding
|
|
|
|
merc_util._encoding = 'UTF-8'
|
2008-09-30 20:42:52 +04:00
|
|
|
hge = hg_delta_editor.HgChangeReceiver(hg_repo_path,
|
|
|
|
ui_=ui)
|
|
|
|
svn_commit_hashes = dict(zip(hge.revmap.itervalues(),
|
|
|
|
hge.revmap.iterkeys()))
|
|
|
|
# Strategy:
|
|
|
|
# 1. Find all outgoing commits from this head
|
|
|
|
outgoing = utility_commands.outgoing_revisions(ui, repo, hge,
|
|
|
|
svn_commit_hashes)
|
|
|
|
if not (outgoing and len(outgoing)):
|
|
|
|
ui.status('No revisions to push.')
|
|
|
|
return 0
|
|
|
|
if len(repo.parents()) != 1:
|
|
|
|
ui.status('Cowardly refusing to push branch merge')
|
|
|
|
return 1
|
|
|
|
while outgoing:
|
|
|
|
oldest = outgoing.pop(-1)
|
|
|
|
old_ctx = repo[oldest]
|
|
|
|
if len(old_ctx.parents()) != 1:
|
|
|
|
ui.status('Found a branch merge, this needs discussion and '
|
|
|
|
'implementation.')
|
|
|
|
return 1
|
|
|
|
base_n = old_ctx.parents()[0].node()
|
|
|
|
old_children = repo[base_n].children()
|
|
|
|
# 2. Commit oldest revision that needs to be pushed
|
|
|
|
base_revision = svn_commit_hashes[old_ctx.parents()[0].node()][0]
|
|
|
|
commit_from_rev(ui, repo, old_ctx, hge, svn_url, base_revision)
|
|
|
|
# 3. Fetch revisions from svn
|
|
|
|
r = fetch_command.fetch_revisions(ui, svn_url, hg_repo_path)
|
|
|
|
assert not r or r == 0
|
|
|
|
# 4. Find the new head of the target branch
|
|
|
|
repo = hg.repository(ui, hge.path)
|
|
|
|
base_c = repo[base_n]
|
|
|
|
replacement = [c for c in base_c.children() if c not in old_children
|
|
|
|
and c.branch() == old_ctx.branch()]
|
|
|
|
assert len(replacement) == 1
|
|
|
|
replacement = replacement[0]
|
|
|
|
# 5. Rebase all children of the currently-pushing rev to the new branch
|
|
|
|
heads = repo.heads(old_ctx.node())
|
|
|
|
for needs_transplant in heads:
|
|
|
|
hg.clean(repo, needs_transplant)
|
|
|
|
utility_commands.rebase_commits(ui, repo, hg_repo_path, **opts)
|
|
|
|
repo = hg.repository(ui, hge.path)
|
|
|
|
if needs_transplant in outgoing:
|
|
|
|
hg.clean(repo, repo['tip'].node())
|
|
|
|
hge = hg_delta_editor.HgChangeReceiver(hg_repo_path, ui_=ui)
|
|
|
|
svn_commit_hashes = dict(zip(hge.revmap.itervalues(),
|
|
|
|
hge.revmap.iterkeys()))
|
|
|
|
outgoing = utility_commands.outgoing_revisions(ui, repo, hge,
|
|
|
|
svn_commit_hashes)
|
2008-10-28 19:39:36 +03:00
|
|
|
merc_util._encoding = oldencoding
|
2008-09-30 20:42:52 +04:00
|
|
|
return 0
|
|
|
|
|
2008-11-15 01:18:24 +03:00
|
|
|
def _getdirchanges(svn, branchpath, parentctx, ctx, changedfiles):
|
|
|
|
"""Compute directories to add or delete when moving from parentctx
|
|
|
|
to ctx, assuming only 'changedfiles' files changed.
|
2008-09-30 20:42:52 +04:00
|
|
|
|
2008-11-15 01:18:24 +03:00
|
|
|
Return (added, deleted) where 'added' is the list of all added
|
|
|
|
directories and 'deleted' the list of deleted directories.
|
|
|
|
Intermediate directories are included: if a/b/c is new and requires
|
|
|
|
the addition of a/b and a, those will be listed too. Intermediate
|
|
|
|
deleted directories are also listed, but item order of undefined
|
|
|
|
in either list.
|
2008-11-10 02:02:07 +03:00
|
|
|
"""
|
2008-11-15 01:18:24 +03:00
|
|
|
def exists(svndir):
|
2008-11-10 02:02:07 +03:00
|
|
|
try:
|
2008-11-15 01:18:24 +03:00
|
|
|
svn.list_dir('%s/%s' % (branchpath, svndir))
|
|
|
|
return True
|
|
|
|
except core.SubversionException:
|
|
|
|
return False
|
|
|
|
|
|
|
|
def finddirs(path):
|
|
|
|
pos = path.rfind('/')
|
|
|
|
while pos != -1:
|
|
|
|
yield path[:pos]
|
|
|
|
pos = path.rfind('/', 0, pos)
|
|
|
|
|
|
|
|
def getctxdirs(ctx, keptdirs):
|
|
|
|
dirs = {}
|
|
|
|
for f in ctx.manifest():
|
|
|
|
for d in finddirs(f):
|
|
|
|
if d in dirs:
|
|
|
|
break
|
|
|
|
if d in keptdirs:
|
|
|
|
dirs[d] = 1
|
|
|
|
return dirs
|
|
|
|
|
|
|
|
deleted, added = [], []
|
|
|
|
changeddirs = {}
|
|
|
|
for f in changedfiles:
|
2008-11-15 01:18:24 +03:00
|
|
|
if f in parentctx and f in ctx:
|
|
|
|
# Updated files cannot cause directories to be created
|
|
|
|
# or removed.
|
|
|
|
continue
|
2008-11-15 01:18:24 +03:00
|
|
|
for d in finddirs(f):
|
|
|
|
changeddirs[d] = 1
|
2008-11-15 01:18:24 +03:00
|
|
|
if not changeddirs:
|
|
|
|
return added, deleted
|
2008-11-15 01:18:24 +03:00
|
|
|
olddirs = getctxdirs(parentctx, changeddirs)
|
|
|
|
newdirs = getctxdirs(ctx, changeddirs)
|
|
|
|
|
|
|
|
for d in newdirs:
|
|
|
|
if d not in olddirs and not exists(d):
|
|
|
|
added.append(d)
|
|
|
|
|
|
|
|
for d in olddirs:
|
|
|
|
if d not in newdirs and exists(d):
|
|
|
|
deleted.append(d)
|
|
|
|
|
|
|
|
return added, deleted
|
|
|
|
|
2008-11-10 02:02:07 +03:00
|
|
|
|
2008-09-30 20:42:52 +04:00
|
|
|
def commit_from_rev(ui, repo, rev_ctx, hg_editor, svn_url, base_revision):
|
|
|
|
"""Build and send a commit from Mercurial to Subversion.
|
|
|
|
"""
|
|
|
|
file_data = {}
|
2008-10-06 20:01:46 +04:00
|
|
|
svn = svnwrap.SubversionRepo(svn_url, username=merc_util.getuser())
|
|
|
|
parent = rev_ctx.parents()[0]
|
|
|
|
parent_branch = rev_ctx.parents()[0].branch()
|
|
|
|
branch_path = 'trunk'
|
|
|
|
|
|
|
|
if parent_branch and parent_branch != 'default':
|
|
|
|
branch_path = 'branches/%s' % parent_branch
|
|
|
|
|
2008-11-15 01:18:24 +03:00
|
|
|
addeddirs, deleteddirs = _getdirchanges(svn, branch_path, parent,
|
|
|
|
rev_ctx, rev_ctx.files())
|
|
|
|
deleteddirs = set(deleteddirs)
|
|
|
|
|
2008-10-06 22:52:10 +04:00
|
|
|
props = {}
|
2008-11-05 15:37:08 +03:00
|
|
|
copies = {}
|
2008-09-30 20:42:52 +04:00
|
|
|
for file in rev_ctx.files():
|
|
|
|
new_data = base_data = ''
|
|
|
|
action = ''
|
|
|
|
if file in rev_ctx:
|
2008-11-05 15:37:08 +03:00
|
|
|
fctx = rev_ctx.filectx(file)
|
|
|
|
new_data = fctx.data()
|
2008-10-06 22:52:10 +04:00
|
|
|
|
2008-11-05 15:37:08 +03:00
|
|
|
if 'x' in fctx.flags():
|
2008-10-06 22:52:10 +04:00
|
|
|
props.setdefault(file, {})['svn:executable'] = '*'
|
2008-11-05 15:37:08 +03:00
|
|
|
if 'l' in fctx.flags():
|
2008-10-06 22:52:10 +04:00
|
|
|
props.setdefault(file, {})['svn:special'] = '*'
|
|
|
|
|
2008-09-30 20:42:52 +04:00
|
|
|
if file not in parent:
|
2008-11-05 15:37:08 +03:00
|
|
|
renamed = fctx.renamed()
|
|
|
|
if renamed:
|
|
|
|
# TODO current model (and perhaps svn model) does not support
|
|
|
|
# this kind of renames: a -> b, b -> c
|
|
|
|
copies[file] = renamed[0]
|
|
|
|
base_data = parent[renamed[0]].data()
|
|
|
|
|
2008-09-30 20:42:52 +04:00
|
|
|
action = 'add'
|
2008-10-06 20:01:46 +04:00
|
|
|
dirname = '/'.join(file.split('/')[:-1] + [''])
|
2008-09-30 20:42:52 +04:00
|
|
|
else:
|
|
|
|
base_data = parent.filectx(file).data()
|
2008-11-06 18:24:33 +03:00
|
|
|
if ('x' in parent.filectx(file).flags()
|
|
|
|
and 'x' not in rev_ctx.filectx(file).flags()):
|
|
|
|
props.setdefault(file, {})['svn:executable'] = None
|
2008-11-06 18:07:13 +03:00
|
|
|
if ('l' in parent.filectx(file).flags()
|
|
|
|
and 'l' not in rev_ctx.filectx(file).flags()):
|
|
|
|
props.setdefault(file, {})['svn:special'] = None
|
2008-09-30 20:42:52 +04:00
|
|
|
action = 'modify'
|
|
|
|
else:
|
2008-11-15 01:18:24 +03:00
|
|
|
pos = file.rfind('/')
|
|
|
|
if pos >= 0:
|
|
|
|
if file[:pos] in deleteddirs:
|
|
|
|
# This file will be removed when its directory is removed
|
|
|
|
continue
|
2008-09-30 20:42:52 +04:00
|
|
|
base_data = parent.filectx(file).data()
|
|
|
|
action = 'delete'
|
|
|
|
file_data[file] = base_data, new_data, action
|
|
|
|
|
2008-11-15 01:18:24 +03:00
|
|
|
# Now we are done with files, we can prune deleted directories
|
|
|
|
# against themselves: ignore a/b if a/ is already removed
|
|
|
|
deleteddirs2 = list(deleteddirs)
|
|
|
|
deleteddirs2.sort()
|
|
|
|
deleteddirs2.reverse()
|
|
|
|
for d in deleteddirs2:
|
|
|
|
pos = d.rfind('/')
|
|
|
|
if pos >= 0 and d[:pos] in deleteddirs:
|
|
|
|
deleteddirs.remove(d[:pos])
|
|
|
|
|
2008-11-05 15:37:08 +03:00
|
|
|
def svnpath(p):
|
|
|
|
return '%s/%s' % (branch_path, p)
|
|
|
|
|
|
|
|
newcopies = {}
|
|
|
|
for source, dest in copies.iteritems():
|
|
|
|
newcopies[svnpath(source)] = (svnpath(dest), base_revision)
|
|
|
|
|
2008-11-15 01:18:24 +03:00
|
|
|
new_target_files = [svnpath(f) for f in file_data]
|
|
|
|
for tf, ntf in zip(file_data, new_target_files):
|
2008-09-30 20:42:52 +04:00
|
|
|
if tf in file_data:
|
|
|
|
file_data[ntf] = file_data[tf]
|
2008-10-06 22:52:10 +04:00
|
|
|
if tf in props:
|
|
|
|
props[ntf] = props[tf]
|
|
|
|
del props[tf]
|
|
|
|
if merc_util.binary(file_data[ntf][1]):
|
|
|
|
props.setdefault(ntf, {}).update(props.get(ntf, {}))
|
2008-10-06 23:14:50 +04:00
|
|
|
props.setdefault(ntf, {})['svn:mime-type'] = 'application/octet-stream'
|
2008-09-30 20:42:52 +04:00
|
|
|
del file_data[tf]
|
2008-11-15 01:18:24 +03:00
|
|
|
|
|
|
|
addeddirs = [svnpath(d) for d in addeddirs]
|
|
|
|
deleteddirs = [svnpath(d) for d in deleteddirs]
|
|
|
|
new_target_files += addeddirs + deleteddirs
|
2008-09-30 20:42:52 +04:00
|
|
|
try:
|
|
|
|
svn.commit(new_target_files, rev_ctx.description(), file_data,
|
2008-11-15 01:18:24 +03:00
|
|
|
base_revision, set(addeddirs), set(deleteddirs),
|
|
|
|
props, newcopies)
|
2008-09-30 20:42:52 +04:00
|
|
|
except core.SubversionException, e:
|
|
|
|
if hasattr(e, 'apr_err') and e.apr_err == 160028:
|
|
|
|
raise merc_util.Abort('Base text was out of date, maybe rebase?')
|
|
|
|
else:
|
|
|
|
raise
|