mirror of
https://github.com/facebook/sapling.git
synced 2024-10-09 16:31:02 +03:00
Initial import of hgsubversion into a public repository.
This commit is contained in:
commit
7b76bdbc02
3
README
Normal file
3
README
Normal file
@ -0,0 +1,3 @@
|
||||
hgsubversion is an extension for Mercurial that allows using Mercurial as a Subversion client.
|
||||
|
||||
Right now it is *not* ready for production use. You should only be using this if you're ready to hack on it, and go diving into the internals of Mercurial and/or Subversion.
|
16
TODO
Normal file
16
TODO
Normal file
@ -0,0 +1,16 @@
|
||||
Handle directory adds.
|
||||
Handle symlinks in commit.
|
||||
Handle execute in commit.
|
||||
Handle auto-props on file adds.
|
||||
Handle directory deletes.
|
||||
Test coverage is horrible. Figure out a strategy to have some real tests.
|
||||
Be more explicit about deletes in diff-replay because then we can probably
|
||||
handle a rename like ``mv Foo foo '' properly.
|
||||
Use replay_range on svn 1.5, it will make replay-over-serf faster.
|
||||
Add convertedrev to extra on hg-side commit.
|
||||
|
||||
Known upstream issues:
|
||||
Using serf as the http library causes no revisions to convert. (svn trunk and 1.5.x branch as of
|
||||
r33158 both have this. No idea bout 1.4.x, since I always just use neon there).
|
||||
This is fixed in Subversion trunk in revision 33173. I don't know if it has been backported to
|
||||
the 1.5.x branch yet.
|
31
__init__.py
Normal file
31
__init__.py
Normal file
@ -0,0 +1,31 @@
|
||||
from mercurial import commands
|
||||
from mercurial import hg
|
||||
|
||||
import svncommand
|
||||
import fetch_command
|
||||
|
||||
def svn(ui, repo, subcommand, *args, **opts):
|
||||
return svncommand.svncmd(ui, repo, subcommand, *args, **opts)
|
||||
|
||||
def svn_fetch(ui, svn_url, hg_repo_path=None, **opts):
|
||||
if not hg_repo_path:
|
||||
hg_repo_path = hg.defaultdest(svn_url) + "-hg"
|
||||
ui.status("Assuming destination %s\n" % hg_repo_path)
|
||||
return fetch_command.fetch_revisions(ui, svn_url, hg_repo_path, **opts)
|
||||
|
||||
commands.norepo += " svnclone"
|
||||
cmdtable = {
|
||||
"svn":
|
||||
(svn,
|
||||
[('u', 'svn_url', '', 'Path to the Subversion server.'),
|
||||
('', 'stupid', False, 'Be stupid and use diffy replay.'),
|
||||
],
|
||||
'hg svn subcommand'),
|
||||
"svnclone" :(svn_fetch,
|
||||
[('S', 'skipto_rev', '0', 'Skip commits before this revision.'),
|
||||
('', 'stupid', False, 'Be stupid and use diffy replay.'),
|
||||
('T', 'tag_locations', 'tags', 'Relative path to where tags get '
|
||||
'stored, as comma sep. values if there is more than one such path.')
|
||||
],
|
||||
'hg svn_fetch svn_url, dest'),
|
||||
}
|
447
fetch_command.py
Normal file
447
fetch_command.py
Normal file
@ -0,0 +1,447 @@
|
||||
import cStringIO
|
||||
import re
|
||||
import operator
|
||||
import os
|
||||
import shutil
|
||||
import stat
|
||||
import tempfile
|
||||
|
||||
from mercurial import patch
|
||||
from mercurial import node
|
||||
from mercurial import context
|
||||
from mercurial import revlog
|
||||
from svn import core
|
||||
from svn import delta
|
||||
|
||||
import hg_delta_editor
|
||||
import svnwrap
|
||||
import util
|
||||
|
||||
|
||||
def print_your_svn_is_old_message(ui):
|
||||
ui.status("In light of that, I'll fall back and do diffs, but it won't do "
|
||||
"as good a job. You should really upgrade your server.")
|
||||
|
||||
|
||||
@util.register_subcommand('pull')
|
||||
def fetch_revisions(ui, svn_url, hg_repo_path, skipto_rev=0, stupid=None,
|
||||
tag_locations='tags',
|
||||
**opts):
|
||||
"""Pull new revisions from Subversion.
|
||||
"""
|
||||
skipto_rev=int(skipto_rev)
|
||||
have_replay = not stupid
|
||||
if have_replay and not callable(delta.svn_txdelta_apply(None, None,
|
||||
None)[0]):
|
||||
ui.status('You are using old Subversion SWIG bindings. Replay will not'
|
||||
' work until you upgrade to 1.5.0 or newer. Falling back to'
|
||||
' a slower method that may be buggier. Please upgrade, or'
|
||||
' contribute a patch to use the ctypes bindings instead'
|
||||
' of SWIG.')
|
||||
have_replay = False
|
||||
initializing_repo = False
|
||||
svn = svnwrap.SubversionRepo(svn_url)
|
||||
author_host = "@%s" % svn.uuid
|
||||
tag_locations = tag_locations.split(',')
|
||||
hg_editor = hg_delta_editor.HgChangeReceiver(hg_repo_path,
|
||||
ui_=ui,
|
||||
subdir=svn.subdir,
|
||||
author_host=author_host,
|
||||
tag_locations=tag_locations)
|
||||
if os.path.exists(hg_editor.uuid_file):
|
||||
uuid = open(hg_editor.uuid_file).read()
|
||||
assert uuid == svn.uuid
|
||||
start = int(open(hg_editor.last_revision_handled_file, 'r').read())
|
||||
else:
|
||||
open(hg_editor.uuid_file, 'w').write(svn.uuid)
|
||||
open(hg_editor.svn_url_file, 'w').write(svn_url)
|
||||
open(hg_editor.last_revision_handled_file, 'w').write(str(0))
|
||||
initializing_repo = True
|
||||
start = skipto_rev
|
||||
|
||||
# start converting revisions
|
||||
for r in svn.revisions(start=start):
|
||||
valid = False
|
||||
hg_editor.update_branch_tag_map_for_rev(r)
|
||||
for p in r.paths:
|
||||
if hg_editor._is_path_valid(p):
|
||||
valid = True
|
||||
continue
|
||||
if initializing_repo and start > 0:
|
||||
assert False, 'This feature not ready yet.'
|
||||
if valid:
|
||||
# got a 502? Try more than once!
|
||||
tries = 0
|
||||
converted = False
|
||||
while not converted and tries < 3:
|
||||
try:
|
||||
ui.status('converting %s\n' % r)
|
||||
if have_replay:
|
||||
try:
|
||||
replay_convert_rev(hg_editor, svn, r)
|
||||
except svnwrap.SubversionRepoCanNotReplay, e:
|
||||
ui.status('%s\n' % e.message)
|
||||
print_your_svn_is_old_message(ui)
|
||||
have_replay = False
|
||||
stupid_svn_server_pull_rev(ui, svn, hg_editor, r)
|
||||
else:
|
||||
stupid_svn_server_pull_rev(ui, svn, hg_editor, r)
|
||||
converted = True
|
||||
open(hg_editor.last_revision_handled_file,
|
||||
'w').write(str(r.revnum))
|
||||
except core.SubversionException, e:
|
||||
if hasattr(e, 'message') and (
|
||||
'Server sent unexpected return value (502 Bad Gateway)'
|
||||
' in response to PROPFIND') in e.message:
|
||||
tries += 1
|
||||
ui.status('Got a 502, retrying (%s)\n' % tries)
|
||||
else:
|
||||
raise
|
||||
|
||||
|
||||
def replay_convert_rev(hg_editor, svn, r):
|
||||
hg_editor.set_current_rev(r)
|
||||
svn.get_replay(r.revnum, hg_editor)
|
||||
if hg_editor.missing_plaintexts:
|
||||
files_to_grab = set()
|
||||
dirs_to_list = []
|
||||
props = {}
|
||||
for p in hg_editor.missing_plaintexts:
|
||||
p2 = p
|
||||
if svn.subdir:
|
||||
p2 = p2[len(svn.subdir)-1:]
|
||||
# this *sometimes* raises on me, and I have
|
||||
# no idea why. TODO(augie) figure out the why.
|
||||
try:
|
||||
pl = svn.proplist(p2, r.revnum, recurse=True)
|
||||
except core.SubversionException, e:
|
||||
pass
|
||||
props.update(pl)
|
||||
if p[-1] == '/':
|
||||
dirs_to_list.append(p)
|
||||
else:
|
||||
files_to_grab.add(p)
|
||||
while dirs_to_list:
|
||||
p = dirs_to_list.pop(0)
|
||||
l = svn.list_dir(p[:-1], r.revnum)
|
||||
for f in l:
|
||||
|
||||
if l[f].kind == core.svn_node_dir:
|
||||
dirs_to_list.append(p+f+'/')
|
||||
elif l[f].kind == core.svn_node_file:
|
||||
files_to_grab.add(p+f)
|
||||
for p in files_to_grab:
|
||||
p2 = p
|
||||
if svn.subdir:
|
||||
p2 = p2[len(svn.subdir)-1:]
|
||||
hg_editor.current_files[p] = svn.get_file(p2, r.revnum)
|
||||
hg_editor.current_files_exec[p] = False
|
||||
if p in props:
|
||||
if 'svn:executable' in props[p]:
|
||||
hg_editor.current_files_exec[p] = True
|
||||
if 'svn:special' in props[p]:
|
||||
hg_editor.current_files_symlink[p] = True
|
||||
hg_editor.missing_plaintexts = set()
|
||||
hg_editor.commit_current_delta()
|
||||
|
||||
|
||||
binary_file_re = re.compile(r'''Index: ([^\n]*)
|
||||
=*
|
||||
Cannot display: file marked as a binary type.''')
|
||||
|
||||
property_exec_set_re = re.compile(r'''Property changes on: ([^\n]*)
|
||||
_*
|
||||
Added: svn:executable
|
||||
\+ \*
|
||||
''')
|
||||
|
||||
property_exec_removed_re = re.compile(r'''Property changes on: ([^\n]*)
|
||||
_*
|
||||
Deleted: svn:executable
|
||||
- \*
|
||||
''')
|
||||
|
||||
empty_file_patch_wont_make_re = re.compile(r'''Index: ([^\n]*)\n=*\n(?=Index:)''')
|
||||
|
||||
any_file_re = re.compile(r'''^Index: ([^\n]*)\n=*\n''', re.MULTILINE)
|
||||
|
||||
property_special_set_re = re.compile(r'''Property changes on: ([^\n]*)
|
||||
_*
|
||||
Added: svn:special
|
||||
\+ \*
|
||||
''')
|
||||
|
||||
property_special_removed_re = re.compile(r'''Property changes on: ([^\n]*)
|
||||
_*
|
||||
Added: svn:special
|
||||
\- \*
|
||||
''')
|
||||
|
||||
def make_diff_path(b):
|
||||
if b == None:
|
||||
return 'trunk'
|
||||
return 'branches/' + b
|
||||
|
||||
|
||||
def stupid_svn_server_pull_rev(ui, svn, hg_editor, r):
|
||||
used_diff = True
|
||||
delete_all_files = False
|
||||
# this server fails at replay
|
||||
branches = hg_editor.branches_in_paths(r.paths)
|
||||
temp_location = os.path.join(hg_editor.path, '.hg', 'svn', 'temp')
|
||||
if not os.path.exists(temp_location):
|
||||
os.makedirs(temp_location)
|
||||
for b in branches:
|
||||
our_tempdir = tempfile.mkdtemp('svn_fetch_temp', dir=temp_location)
|
||||
diff_path = make_diff_path(b)
|
||||
parent_rev, br_p = hg_editor.get_parent_svn_branch_and_rev(r.revnum, b)
|
||||
parent_ha = hg_editor.get_parent_revision(r.revnum, b)
|
||||
files_touched = set()
|
||||
link_files = {}
|
||||
exec_files = {}
|
||||
try:
|
||||
if br_p == b:
|
||||
d = svn.get_unified_diff(diff_path, r.revnum, deleted=False,
|
||||
# letting patch handle binaries sounded
|
||||
# cool, but it breaks patch in sad ways
|
||||
ignore_type=False)
|
||||
else:
|
||||
d = svn.get_unified_diff(diff_path, r.revnum,
|
||||
other_path=make_diff_path(br_p),
|
||||
other_rev=parent_rev,
|
||||
deleted=True, ignore_type=True)
|
||||
if d:
|
||||
ui.status('Branch creation with mods, pulling full rev.\n')
|
||||
raise BadPatchApply()
|
||||
for m in binary_file_re.findall(d):
|
||||
# we have to pull each binary file by hand as a fulltext,
|
||||
# which sucks but we've got no choice
|
||||
file_path = os.path.join(our_tempdir, m)
|
||||
files_touched.add(m)
|
||||
try:
|
||||
try:
|
||||
os.makedirs(os.path.dirname(file_path))
|
||||
except OSError, e:
|
||||
pass
|
||||
f = open(file_path, 'w')
|
||||
f.write(svn.get_file(diff_path+'/'+m, r.revnum))
|
||||
f.close()
|
||||
except core.SubversionException, e:
|
||||
if (e.message.endswith("' path not found")
|
||||
or e.message.startswith("File not found: revision")):
|
||||
pass
|
||||
else:
|
||||
raise
|
||||
d2 = empty_file_patch_wont_make_re.sub('', d)
|
||||
d2 = property_exec_set_re.sub('', d2)
|
||||
d2 = property_exec_removed_re.sub('', d2)
|
||||
old_cwd = os.getcwd()
|
||||
os.chdir(our_tempdir)
|
||||
for f in any_file_re.findall(d):
|
||||
files_touched.add(f)
|
||||
# this check is here because modified binary files will get
|
||||
# created before here.
|
||||
if os.path.exists(f):
|
||||
continue
|
||||
dn = os.path.dirname(f)
|
||||
if dn and not os.path.exists(dn):
|
||||
os.makedirs(dn)
|
||||
if f in hg_editor.repo[parent_ha].manifest():
|
||||
data = hg_editor.repo[parent_ha].filectx(f).data()
|
||||
fi = open(f, 'w')
|
||||
fi.write(data)
|
||||
fi.close()
|
||||
else:
|
||||
open(f, 'w').close()
|
||||
if f.startswith(our_tempdir):
|
||||
f = f[len(our_tempdir)+1:]
|
||||
os.chdir(old_cwd)
|
||||
if d2.strip() and len(re.findall('\n[-+]', d2.strip())) > 0:
|
||||
old_cwd = os.getcwd()
|
||||
os.chdir(our_tempdir)
|
||||
changed = {}
|
||||
try:
|
||||
patch_st = patch.applydiff(ui, cStringIO.StringIO(d2),
|
||||
changed, strip=0)
|
||||
except patch.PatchError:
|
||||
# TODO: this happens if the svn server has the wrong mime
|
||||
# type stored and doesn't know a file is binary. It would
|
||||
# be better to do one file at a time and only do a
|
||||
# full fetch on files that had problems.
|
||||
os.chdir(old_cwd)
|
||||
raise BadPatchApply()
|
||||
for x in changed.iterkeys():
|
||||
ui.status('M %s\n' % x)
|
||||
files_touched.add(x)
|
||||
os.chdir(old_cwd)
|
||||
# if this patch didn't apply right, fall back to exporting the
|
||||
# entire rev.
|
||||
if patch_st == -1:
|
||||
parent_ctx = hg_editor.repo[parent_ha]
|
||||
parent_manifest = parent_ctx.manifest()
|
||||
for fn in files_touched:
|
||||
if (fn in parent_manifest and
|
||||
'l' in parent_ctx.filectx(fn).flags()):
|
||||
# I think this might be an underlying bug in svn -
|
||||
# I get diffs of deleted symlinks even though I
|
||||
# specifically said no deletes above.
|
||||
ui.status('Pulling whole rev because of a deleted'
|
||||
'symlink')
|
||||
raise BadPatchApply()
|
||||
assert False, ('This should only happen on case-insensitive'
|
||||
' volumes.')
|
||||
elif patch_st == 1:
|
||||
# When converting Django, I saw fuzz on .po files that was
|
||||
# causing revisions to end up failing verification. If that
|
||||
# can be fixed, maybe this won't ever be reached.
|
||||
ui.status('There was some fuzz, not using diff after all.')
|
||||
raise BadPatchApply()
|
||||
else:
|
||||
ui.status('Not using patch for %s, diff had no hunks.\n' %
|
||||
r.revnum)
|
||||
|
||||
# we create the files if they don't exist here because we know
|
||||
# that we'll never have diff info for a deleted file, so if the
|
||||
# property is set, we should force the file to exist no matter what.
|
||||
for m in property_exec_removed_re.findall(d):
|
||||
f = os.path.join(our_tempdir, m)
|
||||
if not os.path.exists(f):
|
||||
d = os.path.dirname(f)
|
||||
if not os.path.exists(d):
|
||||
os.makedirs(d)
|
||||
if not m in hg_editor.repo[parent_ha].manifest():
|
||||
open(f, 'w').close()
|
||||
else:
|
||||
data = hg_editor.repo[parent_ha].filectx(m).data()
|
||||
fp = open(f, 'w')
|
||||
fp.write(data)
|
||||
fp.close()
|
||||
exec_files[m] = False
|
||||
files_touched.add(m)
|
||||
for m in property_exec_set_re.findall(d):
|
||||
f = os.path.join(our_tempdir, m)
|
||||
if not os.path.exists(f):
|
||||
d = os.path.dirname(f)
|
||||
if not os.path.exists(d):
|
||||
os.makedirs(d)
|
||||
if m not in hg_editor.repo[parent_ha].manifest():
|
||||
open(f, 'w').close()
|
||||
else:
|
||||
data = hg_editor.repo[parent_ha].filectx(m).data()
|
||||
fp = open(f, 'w')
|
||||
fp.write(data)
|
||||
fp.close()
|
||||
exec_files[m] = True
|
||||
files_touched.add(m)
|
||||
for m in property_special_set_re.findall(d):
|
||||
# TODO(augie) when a symlink is removed, patching will fail.
|
||||
# We're seeing that above - there's gotta be a better
|
||||
# workaround than just bailing like that.
|
||||
path = os.path.join(our_tempdir, m)
|
||||
assert os.path.exists(path)
|
||||
link_path = open(path).read()
|
||||
link_path = link_path[len('link '):]
|
||||
os.remove(path)
|
||||
link_files[m] = link_path
|
||||
files_touched.add(m)
|
||||
except core.SubversionException, e:
|
||||
if (e.apr_err == 160013 or (hasattr(e, 'message') and
|
||||
'was not found in the repository at revision ' in e.message)):
|
||||
# Either this revision or the previous one does not exist.
|
||||
try:
|
||||
ui.status("fetching entire rev previous rev does not exist.\n")
|
||||
used_diff = False
|
||||
svn.fetch_all_files_to_dir(diff_path, r.revnum, our_tempdir)
|
||||
except core.SubversionException, e:
|
||||
if e.apr_err == 170000 or (e.message.startswith("URL '")
|
||||
and e.message.endswith("' doesn't exist")):
|
||||
delete_all_files = True
|
||||
else:
|
||||
raise
|
||||
|
||||
except BadPatchApply, e:
|
||||
# previous rev didn't exist, so this is most likely the first
|
||||
# revision. We'll have to pull all files by hand.
|
||||
try:
|
||||
ui.status("fetching entire rev because raised.\n")
|
||||
used_diff = False
|
||||
shutil.rmtree(our_tempdir)
|
||||
os.makedirs(our_tempdir)
|
||||
svn.fetch_all_files_to_dir(diff_path, r.revnum, our_tempdir)
|
||||
except core.SubversionException, e:
|
||||
if e.apr_err == 170000 or (e.message.startswith("URL '")
|
||||
and e.message.endswith("' doesn't exist")):
|
||||
delete_all_files = True
|
||||
else:
|
||||
raise
|
||||
for p in r.paths:
|
||||
if p.startswith(diff_path) and r.paths[p].action == 'D':
|
||||
p2 = p[len(diff_path)+1:]
|
||||
files_touched.add(p2)
|
||||
p3 = os.path.join(our_tempdir, p2)
|
||||
if os.path.exists(p3) and not os.path.isdir(p3):
|
||||
os.unlink(p3)
|
||||
if p2 and p2[0] == '/':
|
||||
p2 = p2[1:]
|
||||
# If this isn't in the parent ctx, it must've been a dir
|
||||
if not p2 in hg_editor.repo[parent_ha]:
|
||||
d_files = [f for f in hg_editor.repo[parent_ha].manifest().iterkeys()
|
||||
if f.startswith(p2 + '/')]
|
||||
for d in d_files:
|
||||
files_touched.add(d)
|
||||
if delete_all_files:
|
||||
for p in hg_editor.repo[parent_ha].manifest().iterkeys():
|
||||
files_touched.add(p)
|
||||
if not used_diff:
|
||||
for p in reduce(operator.add, [[os.path.join(x[0], y) for y in x[2]]
|
||||
for x in
|
||||
list(os.walk(our_tempdir))]):
|
||||
p_real = p[len(our_tempdir)+1:]
|
||||
if os.path.islink(p):
|
||||
link_files[p_real] = os.readlink(p)
|
||||
exec_files[p_real] = (os.lstat(p).st_mode & 0100 != 0)
|
||||
files_touched.add(p_real)
|
||||
for p in hg_editor.repo[parent_ha].manifest().iterkeys():
|
||||
# TODO this might not be a required step.
|
||||
files_touched.add(p)
|
||||
date = r.date.replace('T', ' ').replace('Z', '').split('.')[0]
|
||||
date += ' -0000'
|
||||
def filectxfn(repo, memctx, path):
|
||||
disk_path = os.path.join(our_tempdir, path)
|
||||
if path in link_files:
|
||||
return context.memfilectx(path=path, data=link_files[path],
|
||||
islink=True, isexec=False,
|
||||
copied=False)
|
||||
fp = open(disk_path)
|
||||
exe = exec_files.get(path, None)
|
||||
if exe is None and path in hg_editor.repo[parent_ha]:
|
||||
exe = 'x' in hg_editor.repo[parent_ha].filectx(path).flags()
|
||||
return context.memfilectx(path=path, data=fp.read(), islink=False,
|
||||
isexec=exe, copied=False)
|
||||
extra = {}
|
||||
if b:
|
||||
extra['branch'] = b
|
||||
if parent_ha != node.nullid or files_touched:
|
||||
# TODO(augie) remove this debug code? Or maybe it's sane to have it.
|
||||
for f in files_touched:
|
||||
if f:
|
||||
assert f[0] != '/'
|
||||
current_ctx = context.memctx(hg_editor.repo,
|
||||
[parent_ha, revlog.nullid],
|
||||
r.message or '...',
|
||||
files_touched,
|
||||
filectxfn,
|
||||
'%s%s' % (r.author,
|
||||
hg_editor.author_host),
|
||||
date,
|
||||
extra)
|
||||
ha = hg_editor.repo.commitctx(current_ctx)
|
||||
hg_editor.revmap[r.revnum, b] = ha
|
||||
hg_editor._save_metadata()
|
||||
ui.status('committed as %s on branch %s\n' %
|
||||
(node.hex(ha), b or 'default'))
|
||||
shutil.rmtree(our_tempdir)
|
||||
|
||||
|
||||
class BadPatchApply(Exception):
|
||||
pass
|
614
hg_delta_editor.py
Normal file
614
hg_delta_editor.py
Normal file
@ -0,0 +1,614 @@
|
||||
import cStringIO
|
||||
import cPickle as pickle
|
||||
import os
|
||||
import sys
|
||||
import tempfile
|
||||
import traceback
|
||||
|
||||
from mercurial import context
|
||||
from mercurial import hg
|
||||
from mercurial import ui
|
||||
from mercurial import revlog
|
||||
from mercurial import node
|
||||
from svn import delta
|
||||
from svn import core
|
||||
|
||||
def pickle_atomic(data, file_path, dir=None):
|
||||
"""pickle some data to a path atomically.
|
||||
|
||||
This is present because I kept corrupting my revmap by managing to hit ^C
|
||||
during the pickle of that file.
|
||||
"""
|
||||
try:
|
||||
f, path = tempfile.mkstemp(prefix='pickling', dir=dir)
|
||||
f = os.fdopen(f, 'w')
|
||||
pickle.dump(data, f)
|
||||
f.close()
|
||||
except:
|
||||
raise
|
||||
else:
|
||||
os.rename(path, file_path)
|
||||
|
||||
def stash_exception_on_self(fn):
|
||||
"""Stash any exception raised in the method on self.
|
||||
|
||||
This is required because the SWIG bindings just mutate any exception into
|
||||
a generic Subversion exception with no way of telling what the original was.
|
||||
This allows the editor object to notice when you try and commit and really
|
||||
got an exception in the replay process.
|
||||
"""
|
||||
def fun(self, *args, **kwargs):
|
||||
try:
|
||||
return fn(self, *args, **kwargs)
|
||||
except:
|
||||
if not hasattr(self, '_exception_info'):
|
||||
self._exception_info = sys.exc_info()
|
||||
raise
|
||||
return fun
|
||||
|
||||
|
||||
class HgChangeReceiver(delta.Editor):
|
||||
def __init__(self, path, ui_=None, subdir='', author_host='',
|
||||
tag_locations=['tags']):
|
||||
"""path is the path to the target hg repo.
|
||||
|
||||
subdir is the subdirectory of the edits *on the svn server*.
|
||||
It is needed for stripping paths off in certain cases.
|
||||
"""
|
||||
if not ui_:
|
||||
ui_ = ui.ui()
|
||||
self.ui = ui_
|
||||
self.path = path
|
||||
self.__setup_repo(path)
|
||||
self.subdir = subdir
|
||||
if self.subdir and self.subdir[0] == '/':
|
||||
self.subdir = self.subdir[1:]
|
||||
self.revmap = {}
|
||||
if os.path.exists(self.revmap_file):
|
||||
f = open(self.revmap_file)
|
||||
self.revmap = pickle.load(f)
|
||||
f.close()
|
||||
self.branches = {}
|
||||
if os.path.exists(self.branch_info_file):
|
||||
f = open(self.branch_info_file)
|
||||
self.branches = pickle.load(f)
|
||||
f.close()
|
||||
self.tags = {}
|
||||
if os.path.exists(self.tag_info_file):
|
||||
f = open(self.tag_info_file)
|
||||
self.tags = pickle.load(f)
|
||||
f.close()
|
||||
if os.path.exists(self.tag_locations_file):
|
||||
f = open(self.tag_locations_file)
|
||||
self.tag_locations = pickle.load(f)
|
||||
else:
|
||||
self.tag_locations = tag_locations
|
||||
pickle_atomic(self.tag_locations, self.tag_locations_file,
|
||||
self.meta_data_dir)
|
||||
|
||||
self.clear_current_info()
|
||||
self.author_host = author_host
|
||||
|
||||
def __setup_repo(self, repo_path):
|
||||
'''Verify the repo is going to work out for us.
|
||||
|
||||
This method will fail an assertion if the repo exists but doesn't have
|
||||
the Subversion metadata.
|
||||
'''
|
||||
if os.path.isdir(repo_path) and len(os.listdir(repo_path)):
|
||||
self.repo = hg.repository(self.ui, repo_path)
|
||||
assert os.path.isfile(self.revmap_file)
|
||||
assert os.path.isfile(self.svn_url_file)
|
||||
assert os.path.isfile(self.uuid_file)
|
||||
assert os.path.isfile(self.last_revision_handled_file)
|
||||
else:
|
||||
self.repo = hg.repository(self.ui, repo_path, create=True)
|
||||
os.makedirs(os.path.dirname(self.uuid_file))
|
||||
|
||||
def clear_current_info(self):
|
||||
'''Clear the info relevant to a replayed revision so that the next
|
||||
revision can be replayed.
|
||||
'''
|
||||
self.current_files = {}
|
||||
self.deleted_files = {}
|
||||
self.current_rev = None
|
||||
self.current_files_exec = {}
|
||||
self.current_files_symlink = {}
|
||||
self.missing_plaintexts = set()
|
||||
self.commit_branches_empty = {}
|
||||
self.base_revision = None
|
||||
|
||||
def _save_metadata(self):
|
||||
'''Save the Subversion metadata. This should really be called after
|
||||
every revision is created.
|
||||
'''
|
||||
pickle_atomic(self.revmap, self.revmap_file, self.meta_data_dir)
|
||||
pickle_atomic(self.branches, self.branch_info_file, self.meta_data_dir)
|
||||
pickle_atomic(self.tags, self.tag_info_file, self.meta_data_dir)
|
||||
|
||||
def branches_in_paths(self, paths):
|
||||
'''Given a list of paths, return the set of branches that are touched.
|
||||
'''
|
||||
branches = set([])
|
||||
for p in paths:
|
||||
if self._is_path_valid(p):
|
||||
junk, branch = self._path_and_branch_for_path(p)
|
||||
branches.add(branch)
|
||||
return branches
|
||||
|
||||
def _path_and_branch_for_path(self, path):
|
||||
'''Figure out which branch inside our repo this path represents, and
|
||||
also figure out which path inside that branch it is.
|
||||
|
||||
Raises an exception if it can't perform its job.
|
||||
'''
|
||||
path = self._normalize_path(path)
|
||||
if path.startswith('trunk'):
|
||||
p = path[len('trunk'):]
|
||||
if p and p[0] == '/':
|
||||
p = p[1:]
|
||||
return p, None
|
||||
elif path.startswith('branches/'):
|
||||
p = path[len('branches/'):]
|
||||
br = p.split('/')[0]
|
||||
p = p[len(br)+1:]
|
||||
if p and p[0] == '/':
|
||||
p = p[1:]
|
||||
return p, br
|
||||
raise Exception,'Things went boom: ' + path
|
||||
|
||||
def set_current_rev(self, rev):
|
||||
'''Set the revision we're currently converting.
|
||||
'''
|
||||
self.current_rev = rev
|
||||
|
||||
def _normalize_path(self, path):
|
||||
'''Normalize a path to strip of leading slashes and our subdir if we
|
||||
have one.
|
||||
'''
|
||||
if path and path[0] == '/':
|
||||
path = path[1:]
|
||||
if path and path.startswith(self.subdir):
|
||||
path = path[len(self.subdir):]
|
||||
if path and path[0] == '/':
|
||||
path = path[1:]
|
||||
return path
|
||||
|
||||
def _is_path_valid(self, path):
|
||||
path = self._normalize_path(path)
|
||||
if path.startswith('trunk'):
|
||||
return True
|
||||
elif path.startswith('branches/'):
|
||||
br = path.split('/')[1]
|
||||
return len(br) > 0
|
||||
return False
|
||||
|
||||
def _is_path_tag(self, path):
|
||||
"""If path represents the path to a tag, returns the tag name.
|
||||
|
||||
Otherwise, returns False.
|
||||
"""
|
||||
path = self._normalize_path(path)
|
||||
for tags_path in self.tag_locations:
|
||||
if path and (path.startswith(tags_path) and
|
||||
len(path) > len('%s/' % tags_path)):
|
||||
return path[len(tags_path)+1:].split('/')[0]
|
||||
return False
|
||||
|
||||
def get_parent_svn_branch_and_rev(self, number, branch):
|
||||
number -= 1
|
||||
if (number, branch) in self.revmap:
|
||||
return number, branch
|
||||
real_num = 0
|
||||
for num, br in self.revmap.iterkeys():
|
||||
if br != branch:
|
||||
continue
|
||||
if num <= number and num > real_num:
|
||||
real_num = num
|
||||
if real_num == 0:
|
||||
if branch in self.branches:
|
||||
parent_branch = self.branches[branch][0]
|
||||
parent_branch_rev = self.branches[branch][1]
|
||||
branch_created_rev = self.branches[branch][2]
|
||||
if parent_branch == 'trunk':
|
||||
parent_branch = None
|
||||
if branch_created_rev <= number+1 and branch != parent_branch:
|
||||
return self.get_parent_svn_branch_and_rev(
|
||||
parent_branch_rev+1,
|
||||
parent_branch)
|
||||
if real_num != 0:
|
||||
return real_num, branch
|
||||
return None, None
|
||||
|
||||
def get_parent_revision(self, number, branch):
|
||||
'''Get the parent revision hash for a commit on a specific branch.
|
||||
'''
|
||||
r, br = self.get_parent_svn_branch_and_rev(number, branch)
|
||||
if r is not None:
|
||||
return self.revmap[r, br]
|
||||
return revlog.nullid
|
||||
|
||||
def update_branch_tag_map_for_rev(self, revision):
|
||||
paths = revision.paths
|
||||
added_branches = {}
|
||||
added_tags = {}
|
||||
tags_to_delete = set()
|
||||
for p in paths:
|
||||
if self._is_path_valid(p):
|
||||
fi, br = self._path_and_branch_for_path(p)
|
||||
if fi == '' and br not in self.branches:
|
||||
# TODO handle creating a branch from a tag
|
||||
src_p = paths[p].copyfrom_path
|
||||
src_rev = paths[p].copyfrom_rev
|
||||
src_tag = self._is_path_tag(src_p)
|
||||
|
||||
if not src_p or not (self._is_path_valid(src_p) or src_tag):
|
||||
# we'll imply you're a branch off of trunk
|
||||
# if you have no path, but if you do, it must be valid
|
||||
# or else we assume trunk as well
|
||||
src_branch = None
|
||||
src_rev = revision.revnum
|
||||
elif src_tag:
|
||||
# this is a branch created from a tag. Note that this
|
||||
# really does happen (see Django)
|
||||
src_branch, src_rev = self.tags[src_tag]
|
||||
added_branches[br] = (src_branch, src_rev,
|
||||
revision.revnum)
|
||||
else:
|
||||
# Not from a tag, and from a valid repo path
|
||||
(src_p,
|
||||
src_branch) = self._path_and_branch_for_path(src_p)
|
||||
added_branches[br] = src_branch, src_rev, revision.revnum
|
||||
elif br in added_branches:
|
||||
if paths[p].copyfrom_rev > added_branches[br][1]:
|
||||
x,y,z = added_branches[br]
|
||||
added_branches[br] = x, paths[p].copyfrom_rev, z
|
||||
else:
|
||||
t_name = self._is_path_tag(p)
|
||||
if t_name == False:
|
||||
continue
|
||||
src_p, src_rev = paths[p].copyfrom_path, paths[p].copyfrom_rev
|
||||
# if you commit to a tag, I'm calling you stupid and ignoring
|
||||
# you.
|
||||
if src_p is not None and src_rev is not None:
|
||||
if self._is_path_valid(src_p):
|
||||
file, branch = self._path_and_branch_for_path(src_p)
|
||||
else:
|
||||
# some crazy people make tags from other tags
|
||||
file = ''
|
||||
from_tag = self._is_path_tag(src_p)
|
||||
if not from_tag:
|
||||
continue
|
||||
branch, src_rev = self.tags[from_tag]
|
||||
if t_name not in added_tags:
|
||||
added_tags[t_name] = branch, src_rev
|
||||
elif file and src_rev > added_tags[t_name][1]:
|
||||
added_tags[t_name] = branch, src_rev
|
||||
elif (paths[p].action == 'D' and p.endswith(t_name)
|
||||
and t_name in self.tags):
|
||||
tags_to_delete.add(t_name)
|
||||
for t in tags_to_delete:
|
||||
del self.tags[t]
|
||||
self.tags.update(added_tags)
|
||||
self.branches.update(added_branches)
|
||||
|
||||
def commit_current_delta(self):
|
||||
if hasattr(self, '_exception_info'):
|
||||
traceback.print_exception(*self._exception_info)
|
||||
raise ReplayException()
|
||||
if self.missing_plaintexts:
|
||||
raise MissingPlainTextError()
|
||||
files_to_commit = self.current_files.keys()
|
||||
files_to_commit.extend(self.current_files_symlink.keys())
|
||||
files_to_commit.extend(self.current_files_exec.keys())
|
||||
files_to_commit = sorted(list(set(files_to_commit)))
|
||||
branch_batches = {}
|
||||
rev = self.current_rev
|
||||
date = rev.date.replace('T', ' ').replace('Z', '').split('.')[0]
|
||||
date += ' -0000'
|
||||
|
||||
# build up the branches that have files on them
|
||||
for f in files_to_commit:
|
||||
if not self._is_path_valid(f):
|
||||
continue
|
||||
p, b = self._path_and_branch_for_path(f)
|
||||
if b not in branch_batches:
|
||||
branch_batches[b] = []
|
||||
branch_batches[b].append((p, f))
|
||||
|
||||
for branch, files in branch_batches.iteritems():
|
||||
if branch in self.commit_branches_empty and files:
|
||||
del self.commit_branches_empty[branch]
|
||||
extra = {}
|
||||
files = dict(files)
|
||||
|
||||
parents = (self.get_parent_revision(rev.revnum, branch),
|
||||
revlog.nullid)
|
||||
if branch is not None:
|
||||
if branch not in self.branches:
|
||||
continue
|
||||
if parents == (revlog.nullid, revlog.nullid):
|
||||
assert False, ('a non-trunk branch should probably have'
|
||||
' parents figured out by this point')
|
||||
extra['branch'] = branch
|
||||
parent_ctx = self.repo.changectx(parents[0])
|
||||
def filectxfn(repo, memctx, path):
|
||||
is_link = False
|
||||
is_exec = False
|
||||
copied = None
|
||||
current_file = files[path]
|
||||
if current_file in self.deleted_files:
|
||||
raise IOError()
|
||||
# TODO(augie) tag copies from files
|
||||
if path in parent_ctx:
|
||||
is_exec = 'x' in parent_ctx.flags(path)
|
||||
is_link = 'l' in parent_ctx.flags(path)
|
||||
if current_file in self.current_files_exec:
|
||||
is_exec = self.current_files_exec[current_file]
|
||||
if current_file in self.current_files_symlink:
|
||||
is_link = self.current_files_symlink[current_file]
|
||||
if current_file in self.current_files:
|
||||
data = self.current_files[current_file]
|
||||
if is_link:
|
||||
assert data.startswith('link ')
|
||||
data = data[len('link '):]
|
||||
else:
|
||||
data = parent_ctx.filectx(path).data()
|
||||
return context.memfilectx(path=path,
|
||||
data=data,
|
||||
islink=is_link, isexec=is_exec,
|
||||
copied=copied)
|
||||
current_ctx = context.memctx(self.repo,
|
||||
parents,
|
||||
rev.message or '...',
|
||||
files.keys(),
|
||||
filectxfn,
|
||||
'%s%s' %(rev.author, self.author_host),
|
||||
date,
|
||||
extra)
|
||||
new_hash = self.repo.commitctx(current_ctx)
|
||||
self.ui.status('committed as %s on branch %s\n' %
|
||||
(node.hex(new_hash), (branch or 'default')))
|
||||
if (rev.revnum, branch) not in self.revmap:
|
||||
self.revmap[rev.revnum, branch] = new_hash
|
||||
self._save_metadata()
|
||||
# now we handle branches that need to be committed without any files
|
||||
for branch in self.commit_branches_empty:
|
||||
ha = self.get_parent_revision(rev.revnum, branch)
|
||||
if ha == node.nullid:
|
||||
continue
|
||||
parent_ctx = self.repo.changectx(ha)
|
||||
def del_all_files(*args):
|
||||
raise IOError
|
||||
extra = {}
|
||||
if branch:
|
||||
extra['branch'] = branch
|
||||
# True here means nuke all files
|
||||
files = []
|
||||
if self.commit_branches_empty[branch]:
|
||||
files = parent_ctx.manifest().keys()
|
||||
current_ctx = context.memctx(self.repo,
|
||||
(ha, node.nullid),
|
||||
rev.message or ' ',
|
||||
files,
|
||||
del_all_files,
|
||||
'%s%s' % (rev.author,
|
||||
self.author_host),
|
||||
date,
|
||||
extra)
|
||||
new_hash = self.repo.commitctx(current_ctx)
|
||||
self.ui.status('committed as %s on branch %s\n' %
|
||||
(node.hex(new_hash), (branch or 'default')))
|
||||
if (rev.revnum, branch) not in self.revmap:
|
||||
self.revmap[rev.revnum, branch] = new_hash
|
||||
self._save_metadata()
|
||||
self.clear_current_info()
|
||||
|
||||
@property
|
||||
def meta_data_dir(self):
|
||||
return os.path.join(self.path, '.hg', 'svn')
|
||||
|
||||
def meta_file_named(self, name):
|
||||
return os.path.join(self.meta_data_dir, name)
|
||||
|
||||
@property
|
||||
def revmap_file(self):
|
||||
return self.meta_file_named('rev_map')
|
||||
|
||||
@property
|
||||
def svn_url_file(self):
|
||||
return self.meta_file_named('url')
|
||||
|
||||
@property
|
||||
def uuid_file(self):
|
||||
return self.meta_file_named('uuid')
|
||||
|
||||
@property
|
||||
def last_revision_handled_file(self):
|
||||
return self.meta_file_named('last_rev')
|
||||
|
||||
@property
|
||||
def branch_info_file(self):
|
||||
return self.meta_file_named('branch_info')
|
||||
|
||||
@property
|
||||
def tag_info_file(self):
|
||||
return self.meta_file_named('tag_info')
|
||||
|
||||
@property
|
||||
def tag_locations_file(self):
|
||||
return self.meta_file_named('tag_locations')
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
return open(self.svn_url_file).read()
|
||||
|
||||
@stash_exception_on_self
|
||||
def delete_entry(self, path, revision_bogus, parent_baton, pool=None):
|
||||
if self._is_path_valid(path):
|
||||
br_path, branch = self._path_and_branch_for_path(path)
|
||||
ha = self.get_parent_revision(self.current_rev.revnum, branch)
|
||||
if ha == revlog.nullid:
|
||||
return
|
||||
ctx = self.repo.changectx(ha)
|
||||
if br_path not in ctx:
|
||||
br_path2 = ''
|
||||
if br_path != '':
|
||||
br_path2 = br_path + '/'
|
||||
# assuming it is a directory
|
||||
for f in ctx:
|
||||
if f.startswith(br_path2):
|
||||
f_p = '%s/%s' % (path, f[len(br_path2):])
|
||||
self.deleted_files[f_p] = True
|
||||
self.current_files[f_p] = ''
|
||||
self.ui.status('D %s\n' % f_p)
|
||||
self.deleted_files[path] = True
|
||||
self.current_files[path] = ''
|
||||
self.ui.status('D %s\n' % path)
|
||||
|
||||
@stash_exception_on_self
|
||||
def open_file(self, path, parent_baton, base_revision, p=None):
|
||||
self.current_file = 'foobaz'
|
||||
if self._is_path_valid(path):
|
||||
self.current_file = path
|
||||
self.ui.status('M %s\n' % path)
|
||||
if base_revision != -1:
|
||||
self.base_revision = base_revision
|
||||
else:
|
||||
self.base_revision = None
|
||||
self.should_edit_most_recent_plaintext = True
|
||||
|
||||
@stash_exception_on_self
|
||||
def add_file(self, path, parent_baton, copyfrom_path,
|
||||
copyfrom_revision, file_pool=None):
|
||||
self.current_file = 'foobaz'
|
||||
self.base_revision = None
|
||||
if path in self.deleted_files:
|
||||
del self.deleted_files[path]
|
||||
if self._is_path_valid(path):
|
||||
self.current_file = path
|
||||
self.should_edit_most_recent_plaintext = False
|
||||
if copyfrom_path:
|
||||
self.ui.status('A+ %s\n' % path)
|
||||
# TODO(augie) handle this better, actually mark a copy
|
||||
(from_file,
|
||||
from_branch) = self._path_and_branch_for_path(copyfrom_path)
|
||||
ha = self.get_parent_revision(copyfrom_revision + 1,
|
||||
from_branch)
|
||||
ctx = self.repo.changectx(ha)
|
||||
if from_file in ctx:
|
||||
fctx = ctx.filectx(from_file)
|
||||
cur_file = self.current_file
|
||||
self.current_files[cur_file] = fctx.data()
|
||||
self.current_files_symlink[cur_file] = 'l' in fctx.flags()
|
||||
self.current_files_exec[cur_file] = 'x' in fctx.flags()
|
||||
else:
|
||||
self.ui.status('A %s\n' % path)
|
||||
|
||||
|
||||
@stash_exception_on_self
|
||||
def add_directory(self, path, parent_baton, copyfrom_path,
|
||||
copyfrom_revision, dir_pool=None):
|
||||
if self._is_path_valid(path):
|
||||
junk, branch = self._path_and_branch_for_path(path)
|
||||
if not copyfrom_path and not junk:
|
||||
self.commit_branches_empty[branch] = True
|
||||
else:
|
||||
self.commit_branches_empty[branch] = False
|
||||
if not (self._is_path_valid(path) and copyfrom_path and
|
||||
self._is_path_valid(copyfrom_path)):
|
||||
return
|
||||
|
||||
cp_f, br_from = self._path_and_branch_for_path(copyfrom_path)
|
||||
new_hash = self.get_parent_revision(copyfrom_revision + 1, br_from)
|
||||
if new_hash == node.nullid:
|
||||
self.missing_plaintexts.add('%s/' % path)
|
||||
return
|
||||
cp_f_ctx = self.repo.changectx(new_hash)
|
||||
if cp_f != '/' and cp_f != '':
|
||||
cp_f = '%s/' % cp_f
|
||||
else:
|
||||
cp_f = ''
|
||||
for f in cp_f_ctx:
|
||||
if f.startswith(cp_f):
|
||||
f2 = f[len(cp_f):]
|
||||
fctx = cp_f_ctx.filectx(f)
|
||||
fp_c = path + '/' + f2
|
||||
self.current_files[fp_c] = fctx.data()
|
||||
self.current_files_exec[fp_c] = 'x' in fctx.flags()
|
||||
self.current_files_symlink[fp_c] = 'l' in fctx.flags()
|
||||
# TODO(augie) tag copies from files
|
||||
|
||||
@stash_exception_on_self
|
||||
def change_file_prop(self, file_baton, name, value, pool=None):
|
||||
if name == 'svn:executable':
|
||||
self.current_files_exec[self.current_file] = bool(value)
|
||||
elif name == 'svn:special':
|
||||
self.current_files_symlink[self.current_file] = bool(value)
|
||||
|
||||
@stash_exception_on_self
|
||||
def open_directory(self, path, parent_baton, base_revision, dir_pool=None):
|
||||
if self._is_path_valid(path):
|
||||
p_, branch = self._path_and_branch_for_path(path)
|
||||
if p_ == '':
|
||||
self.commit_branches_empty[branch] = False
|
||||
|
||||
@stash_exception_on_self
|
||||
def apply_textdelta(self, file_baton, base_checksum, pool=None):
|
||||
base = ''
|
||||
if not self._is_path_valid(self.current_file):
|
||||
return lambda x: None
|
||||
if (self.current_file in self.current_files
|
||||
and not self.should_edit_most_recent_plaintext):
|
||||
base = self.current_files[self.current_file]
|
||||
elif (base_checksum is not None or
|
||||
self.should_edit_most_recent_plaintext):
|
||||
p_, br = self._path_and_branch_for_path(self.current_file)
|
||||
par_rev = self.current_rev.revnum
|
||||
if self.base_revision:
|
||||
par_rev = self.base_revision + 1
|
||||
ha = self.get_parent_revision(par_rev, br)
|
||||
if ha != revlog.nullid:
|
||||
ctx = self.repo.changectx(ha)
|
||||
if not p_ in ctx:
|
||||
self.missing_plaintexts.add(self.current_file)
|
||||
# short circuit exit since we can't do anything anyway
|
||||
return lambda x: None
|
||||
base = ctx.filectx(p_).data()
|
||||
source = cStringIO.StringIO(base)
|
||||
target = cStringIO.StringIO()
|
||||
self.stream = target
|
||||
|
||||
handler, baton = delta.svn_txdelta_apply(source, target, None)
|
||||
if not callable(handler):
|
||||
# TODO(augie) Raise a real exception, don't just fail an assertion.
|
||||
assert False, 'handler not callable, bindings are broken'
|
||||
def txdelt_window(window):
|
||||
try:
|
||||
if not self._is_path_valid(self.current_file):
|
||||
return
|
||||
handler(window, baton)
|
||||
# window being None means commit this file
|
||||
if not window:
|
||||
self.current_files[self.current_file] = target.getvalue()
|
||||
except core.SubversionException, e:
|
||||
if e.message == 'Delta source ended unexpectedly':
|
||||
self.missing_plaintexts.add(self.current_file)
|
||||
else:
|
||||
self._exception_info = sys.exc_info()
|
||||
raise
|
||||
except:
|
||||
print len(base), self.current_file
|
||||
self._exception_info = sys.exc_info()
|
||||
raise
|
||||
return txdelt_window
|
||||
|
||||
class MissingPlainTextError(Exception):
|
||||
"""Exception raised when the repo lacks a source file required for replaying
|
||||
a txdelta.
|
||||
"""
|
||||
|
||||
class ReplayException(Exception):
|
||||
"""Exception raised when you try and commit but the replay encountered an
|
||||
exception.
|
||||
"""
|
114
push_cmd.py
Normal file
114
push_cmd.py
Normal file
@ -0,0 +1,114 @@
|
||||
from mercurial import util as merc_util
|
||||
from mercurial import hg
|
||||
from svn import core
|
||||
|
||||
import util
|
||||
import hg_delta_editor
|
||||
import svnwrap
|
||||
import fetch_command
|
||||
import utility_commands
|
||||
|
||||
|
||||
@util.register_subcommand('push')
|
||||
@util.register_subcommand('dcommit') # for git expats
|
||||
def push_revisions_to_subversion(ui, repo, hg_repo_path, svn_url, **opts):
|
||||
"""Push revisions starting at a specified head back to Subversion.
|
||||
"""
|
||||
#assert False # safety while the command is partially implemented.
|
||||
hge = hg_delta_editor.HgChangeReceiver(hg_repo_path,
|
||||
ui_=ui)
|
||||
svn_commit_hashes = dict(zip(hge.revmap.itervalues(),
|
||||
hge.revmap.iterkeys()))
|
||||
# Strategy:
|
||||
# 1. Find all outgoing commits from this head
|
||||
outgoing = utility_commands.outgoing_revisions(ui, repo, hge,
|
||||
svn_commit_hashes)
|
||||
if not (outgoing and len(outgoing)):
|
||||
ui.status('No revisions to push.')
|
||||
return 0
|
||||
if len(repo.parents()) != 1:
|
||||
ui.status('Cowardly refusing to push branch merge')
|
||||
return 1
|
||||
while outgoing:
|
||||
oldest = outgoing.pop(-1)
|
||||
old_ctx = repo[oldest]
|
||||
if len(old_ctx.parents()) != 1:
|
||||
ui.status('Found a branch merge, this needs discussion and '
|
||||
'implementation.')
|
||||
return 1
|
||||
base_n = old_ctx.parents()[0].node()
|
||||
old_children = repo[base_n].children()
|
||||
# 2. Commit oldest revision that needs to be pushed
|
||||
base_revision = svn_commit_hashes[old_ctx.parents()[0].node()][0]
|
||||
commit_from_rev(ui, repo, old_ctx, hge, svn_url, base_revision)
|
||||
# 3. Fetch revisions from svn
|
||||
r = fetch_command.fetch_revisions(ui, svn_url, hg_repo_path)
|
||||
assert not r or r == 0
|
||||
# 4. Find the new head of the target branch
|
||||
repo = hg.repository(ui, hge.path)
|
||||
base_c = repo[base_n]
|
||||
replacement = [c for c in base_c.children() if c not in old_children
|
||||
and c.branch() == old_ctx.branch()]
|
||||
assert len(replacement) == 1
|
||||
replacement = replacement[0]
|
||||
# 5. Rebase all children of the currently-pushing rev to the new branch
|
||||
heads = repo.heads(old_ctx.node())
|
||||
for needs_transplant in heads:
|
||||
hg.clean(repo, needs_transplant)
|
||||
utility_commands.rebase_commits(ui, repo, hg_repo_path, **opts)
|
||||
repo = hg.repository(ui, hge.path)
|
||||
if needs_transplant in outgoing:
|
||||
hg.clean(repo, repo['tip'].node())
|
||||
hge = hg_delta_editor.HgChangeReceiver(hg_repo_path, ui_=ui)
|
||||
svn_commit_hashes = dict(zip(hge.revmap.itervalues(),
|
||||
hge.revmap.iterkeys()))
|
||||
outgoing = utility_commands.outgoing_revisions(ui, repo, hge,
|
||||
svn_commit_hashes)
|
||||
return 0
|
||||
|
||||
|
||||
def commit_from_rev(ui, repo, rev_ctx, hg_editor, svn_url, base_revision):
|
||||
"""Build and send a commit from Mercurial to Subversion.
|
||||
"""
|
||||
target_files = []
|
||||
file_data = {}
|
||||
for file in rev_ctx.files():
|
||||
parent = rev_ctx.parents()[0]
|
||||
new_data = base_data = ''
|
||||
action = ''
|
||||
if file in rev_ctx:
|
||||
new_data = rev_ctx.filectx(file).data()
|
||||
if file not in parent:
|
||||
target_files.append(file)
|
||||
action = 'add'
|
||||
# TODO check for mime-type autoprops here
|
||||
# TODO check for directory adds here
|
||||
else:
|
||||
target_files.append(file)
|
||||
base_data = parent.filectx(file).data()
|
||||
action = 'modify'
|
||||
else:
|
||||
target_files.append(file)
|
||||
base_data = parent.filectx(file).data()
|
||||
action = 'delete'
|
||||
file_data[file] = base_data, new_data, action
|
||||
|
||||
# TODO check for directory deletes here
|
||||
svn = svnwrap.SubversionRepo(svn_url)
|
||||
parent_branch = rev_ctx.parents()[0].branch()
|
||||
branch_path = 'trunk'
|
||||
if parent_branch and parent_branch != 'default':
|
||||
branch_path = 'branches/%s' % parent_branch
|
||||
new_target_files = ['%s/%s' % (branch_path, f) for f in target_files]
|
||||
for tf, ntf in zip(target_files, new_target_files):
|
||||
if tf in file_data:
|
||||
file_data[ntf] = file_data[tf]
|
||||
del file_data[tf]
|
||||
try:
|
||||
svn.commit(new_target_files, rev_ctx.description(), file_data,
|
||||
base_revision, set([]))
|
||||
except core.SubversionException, e:
|
||||
if hasattr(e, 'apr_err') and e.apr_err == 160028:
|
||||
raise merc_util.Abort('Base text was out of date, maybe rebase?')
|
||||
else:
|
||||
raise
|
155
svncommand.py
Normal file
155
svncommand.py
Normal file
@ -0,0 +1,155 @@
|
||||
import os
|
||||
import pickle
|
||||
import stat
|
||||
|
||||
from mercurial import hg
|
||||
from mercurial import node
|
||||
|
||||
import svnwrap
|
||||
import hg_delta_editor
|
||||
import util
|
||||
from util import register_subcommand, svn_subcommands
|
||||
# dirty trick to force demandimport to run my decorator anyway.
|
||||
from utility_commands import print_wc_url
|
||||
from fetch_command import fetch_revisions
|
||||
from push_cmd import commit_from_rev
|
||||
# shut up, pyflakes, we must import those
|
||||
__x = [print_wc_url, fetch_revisions, commit_from_rev, ]
|
||||
|
||||
mode755 = (stat.S_IXUSR | stat.S_IXGRP| stat.S_IXOTH | stat.S_IRUSR |
|
||||
stat.S_IRGRP| stat.S_IROTH | stat.S_IWUSR)
|
||||
mode644 = (stat.S_IRUSR | stat.S_IRGRP| stat.S_IROTH | stat.S_IWUSR)
|
||||
|
||||
|
||||
def svncmd(ui, repo, subcommand, *args, **opts):
|
||||
if subcommand not in svn_subcommands:
|
||||
candidates = []
|
||||
for c in svn_subcommands:
|
||||
if c.startswith(subcommand):
|
||||
candidates.append(c)
|
||||
if len(candidates) == 1:
|
||||
subcommand = candidates[0]
|
||||
path = os.path.dirname(repo.path)
|
||||
try:
|
||||
opts['svn_url'] = open(os.path.join(repo.path, 'svn', 'url')).read()
|
||||
return svn_subcommands[subcommand](ui, args=args,
|
||||
hg_repo_path=path,
|
||||
repo=repo,
|
||||
**opts)
|
||||
except TypeError, e:
|
||||
print e
|
||||
print 'Bad arguments for subcommand %s' % subcommand
|
||||
except KeyError, e:
|
||||
print 'Unknown subcommand %s' % subcommand
|
||||
|
||||
@register_subcommand('help')
|
||||
def help_command(ui, args=None, **opts):
|
||||
"""Get help on the subsubcommands.
|
||||
"""
|
||||
if args and args[0] in svn_subcommands:
|
||||
print svn_subcommands[args[0]].__doc__.strip()
|
||||
return
|
||||
print 'Valid commands:', ' '.join(sorted(svn_subcommands.keys()))
|
||||
|
||||
@register_subcommand('gentags')
|
||||
def generate_hg_tags(ui, hg_repo_path, **opts):
|
||||
"""Save tags to .hg/localtags
|
||||
"""
|
||||
hg_editor = hg_delta_editor.HgChangeReceiver(hg_repo_path, ui_=ui)
|
||||
f = open(hg_editor.tag_info_file)
|
||||
tag_info = pickle.load(f)
|
||||
f = open(os.path.join(hg_repo_path, '.hg', 'localtags'), 'w')
|
||||
for tag, source in tag_info.iteritems():
|
||||
source_ha = hg_editor.get_parent_revision(source[1]+1, source[0])
|
||||
f.write('%s tag:%s\n' % (node.hex(source_ha), tag))
|
||||
|
||||
@register_subcommand('up')
|
||||
def update(ui, args, repo, clean=False, **opts):
|
||||
"""Update to a specified Subversion revision number.
|
||||
"""
|
||||
assert len(args) == 1
|
||||
rev = int(args[0])
|
||||
path = os.path.join(repo.path, 'svn', 'rev_map')
|
||||
answers = []
|
||||
for k,v in pickle.load(open(path)).iteritems():
|
||||
if k[0] == rev:
|
||||
answers.append((v, k[1]))
|
||||
if len(answers) == 1:
|
||||
if clean:
|
||||
return hg.clean(repo, answers[0][0])
|
||||
return hg.update(repo, answers[0][0])
|
||||
elif len(answers) == 0:
|
||||
ui.status('Revision %s did not produce an hg revision.\n' % rev)
|
||||
return 1
|
||||
else:
|
||||
ui.status('Non-ambiguous revision!\n')
|
||||
ui.status('\n'.join(['%s on %s' % (node.hex(a[0]), a[1]) for a in
|
||||
answers]+['']))
|
||||
return 1
|
||||
|
||||
|
||||
@register_subcommand('verify_revision')
|
||||
def verify_revision(ui, args, repo, force=False, **opts):
|
||||
"""Verify a single converted revision.
|
||||
Note: This wipes your working copy and then exports the corresponding
|
||||
Subversion into your working copy to verify. Use with caution.
|
||||
"""
|
||||
assert len(args) == 1
|
||||
if not force:
|
||||
assert repo.status(ignored=True,
|
||||
unknown=True) == ([], [], [], [], [], [], [])
|
||||
rev = int(args[0])
|
||||
wc_path = os.path.dirname(repo.path)
|
||||
svn_url = open(os.path.join(repo.path, 'svn', 'url')).read()
|
||||
svn = svnwrap.SubversionRepo(svn_url)
|
||||
util.wipe_all_files(wc_path)
|
||||
if update(ui, args, repo, clean=True) == 0:
|
||||
util.wipe_all_files(wc_path)
|
||||
br = repo.dirstate.branch()
|
||||
if br == 'default':
|
||||
br = None
|
||||
if br:
|
||||
diff_path = 'branches/%s' % br
|
||||
else:
|
||||
diff_path = 'trunk'
|
||||
svn.fetch_all_files_to_dir(diff_path, rev, wc_path)
|
||||
stat = repo.status(unknown=True)
|
||||
ignored = [s for s in stat[4]
|
||||
if '/.svn/' not in s and not s.startswith('.svn/')]
|
||||
stat = stat[0:4]
|
||||
if stat != ([], [], [], [],) or ignored != []:
|
||||
ui.status('Something is wrong with this revision.\n')
|
||||
return 2
|
||||
else:
|
||||
ui.status('OK.\n')
|
||||
return 0
|
||||
return 1
|
||||
|
||||
@register_subcommand('verify_all_revisions')
|
||||
def verify_all_revisions(ui, args, repo, **opts):
|
||||
"""Verify all the converted revisions, optionally starting at a revision.
|
||||
|
||||
Note: This is *extremely* abusive of the Subversion server. It exports every
|
||||
revision of the code one revision at a time.
|
||||
"""
|
||||
assert repo.status(ignored=True,
|
||||
unknown=True) == ([], [], [], [], [], [], [])
|
||||
start_rev = 0
|
||||
args = list(args)
|
||||
if args:
|
||||
start_rev = int(args.pop(0))
|
||||
revmap_f = open(os.path.join(repo.path, 'svn', 'rev_map'))
|
||||
revmap = pickle.load(revmap_f)
|
||||
revs = sorted(revmap.keys())
|
||||
for revnum, br in revs:
|
||||
if revnum < start_rev:
|
||||
continue
|
||||
res = verify_revision(ui, [revnum], repo, force=True)
|
||||
if res == 0:
|
||||
print revnum, 'verfied'
|
||||
elif res == 1:
|
||||
print revnum, 'skipped'
|
||||
else:
|
||||
print revnum, 'failed'
|
||||
return 1
|
||||
return 0
|
16
svnwrap/__init__.py
Normal file
16
svnwrap/__init__.py
Normal file
@ -0,0 +1,16 @@
|
||||
"""This is a special package because it contains (or will contain, as of now)
|
||||
two parallel implementations of the same code. One implementation, the original,
|
||||
uses the SWIG Python bindings. That's great, but those leak RAM and have a few
|
||||
other quirks. There are new, up-and-coming ctypes bindings for Subversion which
|
||||
look more promising, and are portible backwards to 1.4's libraries. The goal is
|
||||
to have this file automatically contain the "best" available implementation
|
||||
without the user having to configure what is actually present.
|
||||
"""
|
||||
|
||||
#try:
|
||||
# # we do __import__ here so that the correct items get pulled in. Otherwise
|
||||
# # demandimport can make life difficult.
|
||||
# __import__('csvn')
|
||||
# from svn_ctypes_wrapper import *
|
||||
#except ImportError, e:
|
||||
from svn_swig_wrapper import *
|
120
svnwrap/svn_ctypes_wrapper.py
Normal file
120
svnwrap/svn_ctypes_wrapper.py
Normal file
@ -0,0 +1,120 @@
|
||||
"""Right now this is a dummy module, but it should wrap the ctypes API and
|
||||
allow running this more easily without the SWIG bindings.
|
||||
"""
|
||||
from csvn import repos
|
||||
|
||||
class Revision(object):
|
||||
"""Wrapper for a Subversion revision.
|
||||
"""
|
||||
def __init__(self, revnum, author, message, date, paths, strip_path=''):
|
||||
self.revnum, self.author, self.message = revnum, author, message
|
||||
# TODO parse this into a datetime
|
||||
self.date = date
|
||||
self.paths = {}
|
||||
for p in paths:
|
||||
self.paths[p[len(strip_path):]] = paths[p]
|
||||
|
||||
def __str__(self):
|
||||
return 'r%d by %s' % (self.revnum, self.author)
|
||||
|
||||
|
||||
class SubversionRepo(object):
|
||||
"""Wrapper for a Subversion repository.
|
||||
|
||||
This uses the SWIG Python bindings, and will only work on svn >= 1.4.
|
||||
It takes a required param, the URL.
|
||||
"""
|
||||
def __init__(self, url=''):
|
||||
self.svn_url = url
|
||||
|
||||
self.init_ra_and_client()
|
||||
self.uuid = ra.get_uuid(self.ra, self.pool)
|
||||
repo_root = ra.get_repos_root(self.ra, self.pool)
|
||||
# *will* have a leading '/', would not if we used get_repos_root2
|
||||
self.subdir = url[len(repo_root):]
|
||||
if not self.subdir or self.subdir[-1] != '/':
|
||||
self.subdir += '/'
|
||||
|
||||
def init_ra_and_client(self):
|
||||
# TODO(augie) need to figure out a way to do auth
|
||||
self.repo = repos.RemoteRepository(self.svn_url)
|
||||
|
||||
@property
|
||||
def HEAD(self):
|
||||
raise NotImplementedError
|
||||
|
||||
@property
|
||||
def START(self):
|
||||
return 0
|
||||
|
||||
@property
|
||||
def branches(self):
|
||||
"""Get the branches defined in this repo assuming a standard layout.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
@property
|
||||
def tags(self):
|
||||
"""Get the current tags in this repo assuming a standard layout.
|
||||
|
||||
This returns a dictionary of tag: (source path, source rev)
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def _get_copy_source(self, path, cached_head=None):
|
||||
"""Get copy revision for the given path, assuming it was meant to be
|
||||
a copy of the entire tree.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def list_dir(self, dir, revision=None):
|
||||
"""List the contents of a server-side directory.
|
||||
|
||||
Returns a dict-like object with one dict key per directory entry.
|
||||
|
||||
Args:
|
||||
dir: the directory to list, no leading slash
|
||||
rev: the revision at which to list the directory, defaults to HEAD
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def revisions(self, start=None, chunk_size=1000):
|
||||
"""Load the history of this repo.
|
||||
|
||||
This is LAZY. It returns a generator, and fetches a small number
|
||||
of revisions at a time.
|
||||
|
||||
The reason this is lazy is so that you can use the same repo object
|
||||
to perform RA calls to get deltas.
|
||||
"""
|
||||
# NB: you'd think this would work, but you'd be wrong. I'm pretty
|
||||
# convinced there must be some kind of svn bug here.
|
||||
#return self.fetch_history_at_paths(['tags', 'trunk', 'branches'],
|
||||
# start=start)
|
||||
# this does the same thing, but at the repo root + filtering. It's
|
||||
# kind of tough cookies, sadly.
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
def fetch_history_at_paths(self, paths, start=None, stop=None,
|
||||
chunk_size=1000):
|
||||
raise NotImplementedError
|
||||
|
||||
def get_replay(self, revision, editor, oldest_rev_i_have=0):
|
||||
raise NotImplementedError
|
||||
|
||||
def get_unified_diff(self, path, revision, deleted=True, ignore_type=False):
|
||||
raise NotImplementedError
|
||||
|
||||
def get_file(self, path, revision):
|
||||
raise NotImplementedError
|
||||
|
||||
def proplist(self, path, revision, recurse=False):
|
||||
raise NotImplementedError
|
||||
|
||||
def fetch_all_files_to_dir(self, path, revision, checkout_path):
|
||||
raise NotImplementedError
|
||||
|
||||
class SubversionRepoCanNotReplay(Exception):
|
||||
"""Exception raised when the svn server is too old to have replay.
|
||||
"""
|
381
svnwrap/svn_swig_wrapper.py
Normal file
381
svnwrap/svn_swig_wrapper.py
Normal file
@ -0,0 +1,381 @@
|
||||
import cStringIO
|
||||
import getpass
|
||||
import os
|
||||
import pwd
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
from svn import client
|
||||
from svn import core
|
||||
from svn import delta
|
||||
from svn import ra
|
||||
|
||||
svn_config = core.svn_config_get_config(None)
|
||||
|
||||
|
||||
def user_pass_prompt(realm, default_username, ms, pool):
|
||||
creds = core.svn_auth_cred_simple_t()
|
||||
creds.may_save = ms
|
||||
if default_username:
|
||||
sys.stderr.write('Auth realm: %s\n' % (realm,))
|
||||
creds.username = default_username
|
||||
else:
|
||||
sys.stderr.write('Auth realm: %s\n' % (realm,))
|
||||
sys.stderr.write('Username: ')
|
||||
sys.stderr.flush()
|
||||
creds.username = sys.stdin.readline().strip()
|
||||
creds.password = getpass.getpass('Password for %s: ' % creds.username)
|
||||
return creds
|
||||
|
||||
def _create_auth_baton(pool):
|
||||
"""Create a Subversion authentication baton. """
|
||||
# Give the client context baton a suite of authentication
|
||||
# providers.h
|
||||
providers = [
|
||||
client.get_simple_provider(),
|
||||
client.get_username_provider(),
|
||||
client.get_ssl_client_cert_file_provider(),
|
||||
client.get_ssl_client_cert_pw_file_provider(),
|
||||
client.get_ssl_server_trust_file_provider(),
|
||||
]
|
||||
# Platform-dependant authentication methods
|
||||
if hasattr(client, 'get_windows_simple_provider'):
|
||||
providers.append(client.get_windows_simple_provider())
|
||||
if hasattr(client, 'get_keychain_simple_provider'):
|
||||
providers.append(client.get_keychain_simple_provider())
|
||||
providers.extend([client.get_simple_prompt_provider(user_pass_prompt, 2),
|
||||
])
|
||||
return core.svn_auth_open(providers, pool)
|
||||
|
||||
|
||||
class Revision(object):
|
||||
"""Wrapper for a Subversion revision.
|
||||
"""
|
||||
def __init__(self, revnum, author, message, date, paths, strip_path=''):
|
||||
self.revnum, self.author, self.message = revnum, author, message
|
||||
# TODO parse this into a datetime
|
||||
self.date = date
|
||||
self.paths = {}
|
||||
for p in paths:
|
||||
self.paths[p[len(strip_path):]] = paths[p]
|
||||
|
||||
def __str__(self):
|
||||
return 'r%d by %s' % (self.revnum, self.author)
|
||||
|
||||
class SubversionRepo(object):
|
||||
"""Wrapper for a Subversion repository.
|
||||
|
||||
This uses the SWIG Python bindings, and will only work on svn >= 1.4.
|
||||
It takes a required param, the URL.
|
||||
"""
|
||||
def __init__(self, url=''):
|
||||
self.svn_url = url
|
||||
self.auth_baton_pool = core.Pool()
|
||||
self.auth_baton = _create_auth_baton(self.auth_baton_pool)
|
||||
|
||||
self.init_ra_and_client()
|
||||
self.uuid = ra.get_uuid(self.ra, self.pool)
|
||||
repo_root = ra.get_repos_root(self.ra, self.pool)
|
||||
# *will* have a leading '/', would not if we used get_repos_root2
|
||||
self.subdir = url[len(repo_root):]
|
||||
if not self.subdir or self.subdir[-1] != '/':
|
||||
self.subdir += '/'
|
||||
|
||||
def init_ra_and_client(self):
|
||||
"""Initializes the RA and client layers, because sometimes getting
|
||||
unified diffs runs the remote server out of open files.
|
||||
"""
|
||||
# while we're in here we'll recreate our pool
|
||||
self.pool = core.Pool()
|
||||
self.client_context = client.create_context()
|
||||
self.uname = str(pwd.getpwuid(os.getuid())[0])
|
||||
core.svn_auth_set_parameter(self.auth_baton,
|
||||
core.SVN_AUTH_PARAM_DEFAULT_USERNAME,
|
||||
self.uname)
|
||||
|
||||
self.client_context.auth_baton = self.auth_baton
|
||||
self.client_context.config = svn_config
|
||||
self.ra = client.open_ra_session(self.svn_url.encode('utf8'),
|
||||
self.client_context)
|
||||
|
||||
|
||||
@property
|
||||
def HEAD(self):
|
||||
return ra.get_latest_revnum(self.ra, self.pool)
|
||||
|
||||
@property
|
||||
def START(self):
|
||||
return 0
|
||||
|
||||
@property
|
||||
def branches(self):
|
||||
"""Get the branches defined in this repo assuming a standard layout.
|
||||
"""
|
||||
branches = self.list_dir('branches').keys()
|
||||
branch_info = {}
|
||||
head=self.HEAD
|
||||
for b in branches:
|
||||
b_path = 'branches/%s' %b
|
||||
hist_gen = self.fetch_history_at_paths([b_path], stop=head)
|
||||
hist = hist_gen.next()
|
||||
source, source_rev = self._get_copy_source(b_path, cached_head=head)
|
||||
# This if statement guards against projects that have non-ancestral
|
||||
# branches by not listing them has branches
|
||||
# Note that they probably are really ancestrally related, but there
|
||||
# is just no way for us to know how.
|
||||
if source is not None and source_rev is not None:
|
||||
branch_info[b] = (source, source_rev, hist.revnum)
|
||||
return branch_info
|
||||
|
||||
@property
|
||||
def tags(self):
|
||||
"""Get the current tags in this repo assuming a standard layout.
|
||||
|
||||
This returns a dictionary of tag: (source path, source rev)
|
||||
"""
|
||||
tags = self.list_dir('tags').keys()
|
||||
tag_info = {}
|
||||
head = self.HEAD
|
||||
for t in tags:
|
||||
tag_info[t] = self._get_copy_source('tags/%s' % t,
|
||||
cached_head=head)
|
||||
return tag_info
|
||||
|
||||
def _get_copy_source(self, path, cached_head=None):
|
||||
"""Get copy revision for the given path, assuming it was meant to be
|
||||
a copy of the entire tree.
|
||||
"""
|
||||
if not cached_head:
|
||||
cached_head = self.HEAD
|
||||
hist_gen = self.fetch_history_at_paths([path], stop=cached_head)
|
||||
hist = hist_gen.next()
|
||||
if hist.paths[path].copyfrom_path is None:
|
||||
return None, None
|
||||
source = hist.paths[path].copyfrom_path
|
||||
source_rev = 0
|
||||
for p in hist.paths:
|
||||
if hist.paths[p].copyfrom_rev:
|
||||
# We assume that the revision of the source tree as it was
|
||||
# copied was actually the revision of the highest revision
|
||||
# copied item. This could be wrong, but in practice it will
|
||||
# *probably* be correct
|
||||
if source_rev < hist.paths[p].copyfrom_rev:
|
||||
source_rev = hist.paths[p].copyfrom_rev
|
||||
source = source[len(self.subdir):]
|
||||
return source, source_rev
|
||||
|
||||
def list_dir(self, dir, revision=None):
|
||||
"""List the contents of a server-side directory.
|
||||
|
||||
Returns a dict-like object with one dict key per directory entry.
|
||||
|
||||
Args:
|
||||
dir: the directory to list, no leading slash
|
||||
rev: the revision at which to list the directory, defaults to HEAD
|
||||
"""
|
||||
if dir[-1] == '/':
|
||||
dir = dir[:-1]
|
||||
if revision is None:
|
||||
revision = self.HEAD
|
||||
r = ra.get_dir2(self.ra, dir, revision, core.SVN_DIRENT_KIND, self.pool)
|
||||
folders, props, junk = r
|
||||
return folders
|
||||
|
||||
def revisions(self, start=None, chunk_size=1000):
|
||||
"""Load the history of this repo.
|
||||
|
||||
This is LAZY. It returns a generator, and fetches a small number
|
||||
of revisions at a time.
|
||||
|
||||
The reason this is lazy is so that you can use the same repo object
|
||||
to perform RA calls to get deltas.
|
||||
"""
|
||||
# NB: you'd think this would work, but you'd be wrong. I'm pretty
|
||||
# convinced there must be some kind of svn bug here.
|
||||
#return self.fetch_history_at_paths(['tags', 'trunk', 'branches'],
|
||||
# start=start)
|
||||
# this does the same thing, but at the repo root + filtering. It's
|
||||
# kind of tough cookies, sadly.
|
||||
for r in self.fetch_history_at_paths([''], start=start,
|
||||
chunk_size=chunk_size):
|
||||
should_yield = False
|
||||
i = 0
|
||||
paths = list(r.paths.keys())
|
||||
while i < len(paths) and not should_yield:
|
||||
p = paths[i]
|
||||
if (p.startswith('trunk') or p.startswith('tags')
|
||||
or p.startswith('branches')):
|
||||
should_yield = True
|
||||
i += 1
|
||||
if should_yield:
|
||||
yield r
|
||||
|
||||
|
||||
def fetch_history_at_paths(self, paths, start=None, stop=None,
|
||||
chunk_size=1000):
|
||||
revisions = []
|
||||
def callback(paths, revnum, author, date, message, pool):
|
||||
r = Revision(revnum, author, message, date, paths,
|
||||
strip_path=self.subdir)
|
||||
revisions.append(r)
|
||||
if not start:
|
||||
start = self.START
|
||||
if not stop:
|
||||
stop = self.HEAD
|
||||
while stop > start:
|
||||
ra.get_log(self.ra, paths,
|
||||
start+1,
|
||||
stop,
|
||||
chunk_size, #limit of how many log messages to load
|
||||
True, # don't need to know changed paths
|
||||
True, # stop on copies
|
||||
callback,
|
||||
self.pool)
|
||||
if len(revisions) < chunk_size:
|
||||
# this means there was no history for the path, so force the
|
||||
# loop to exit
|
||||
start = stop
|
||||
else:
|
||||
start = revisions[-1].revnum
|
||||
while len(revisions) > 0:
|
||||
yield revisions[0]
|
||||
revisions.pop(0)
|
||||
|
||||
def commit(self, paths, message, file_data, base_revision, dirs):
|
||||
"""Commits the appropriate targets from revision in editor's store.
|
||||
"""
|
||||
self.init_ra_and_client()
|
||||
commit_info = []
|
||||
def commit_cb(_commit_info, pool):
|
||||
commit_info.append(_commit_info)
|
||||
editor, edit_baton = ra.get_commit_editor2(self.ra,
|
||||
message,
|
||||
commit_cb,
|
||||
None,
|
||||
False)
|
||||
checksum = []
|
||||
def driver_cb(parent, path, pool):
|
||||
if path in dirs:
|
||||
return baton
|
||||
base_text, new_text, action = file_data[path]
|
||||
compute_delta = True
|
||||
if action == 'modify':
|
||||
baton = editor.open_file(path, parent, base_revision, pool)
|
||||
elif action == 'add':
|
||||
try:
|
||||
baton = editor.add_file(path, parent, None, -1, pool)
|
||||
except (core.SubversionException, TypeError), e:
|
||||
print e.message
|
||||
raise
|
||||
elif action == 'delete':
|
||||
baton = editor.delete_entry(path, base_revision, parent, pool)
|
||||
compute_delta = False
|
||||
|
||||
if compute_delta:
|
||||
handler, wh_baton = editor.apply_textdelta(baton, None,
|
||||
self.pool)
|
||||
|
||||
txdelta_stream = delta.svn_txdelta(
|
||||
cStringIO.StringIO(base_text), cStringIO.StringIO(new_text),
|
||||
self.pool)
|
||||
delta.svn_txdelta_send_txstream(txdelta_stream, handler,
|
||||
wh_baton, pool)
|
||||
|
||||
delta.path_driver(editor, edit_baton, base_revision, paths, driver_cb,
|
||||
self.pool)
|
||||
editor.close_edit(edit_baton, self.pool)
|
||||
|
||||
def get_replay(self, revision, editor, oldest_rev_i_have=0):
|
||||
# this method has a tendency to chew through RAM if you don't re-init
|
||||
self.init_ra_and_client()
|
||||
e_ptr, e_baton = delta.make_editor(editor)
|
||||
try:
|
||||
ra.replay(self.ra, revision, oldest_rev_i_have, True, e_ptr,
|
||||
e_baton, self.pool)
|
||||
except core.SubversionException, e:
|
||||
# can I depend on this number being constant?
|
||||
if (e.message == "Server doesn't support the replay command"
|
||||
or e.apr_err == 170003):
|
||||
raise SubversionRepoCanNotReplay, ('This Subversion server '
|
||||
'is older than 1.4.0, and cannot satisfy replay requests.')
|
||||
else:
|
||||
raise
|
||||
|
||||
def get_unified_diff(self, path, revision, other_path=None, other_rev=None,
|
||||
deleted=True, ignore_type=False):
|
||||
"""Gets a unidiff of path at revision against revision-1.
|
||||
"""
|
||||
# works around an svn server keeping too many open files (observed
|
||||
# in an svnserve from the 1.2 era)
|
||||
self.init_ra_and_client()
|
||||
|
||||
old_cwd = os.getcwd()
|
||||
assert path[0] != '/'
|
||||
url = self.svn_url + '/' + path
|
||||
url2 = url
|
||||
if other_path is not None:
|
||||
url2 = self.svn_url + '/' + other_path
|
||||
if other_rev is None:
|
||||
other_rev = revision - 1
|
||||
tmpdir = tempfile.mkdtemp('svnwrap_temp')
|
||||
# hot tip: the swig bridge doesn't like StringIO for these bad boys
|
||||
out_path = os.path.join(tmpdir, 'diffout')
|
||||
error_path = os.path.join(tmpdir, 'differr')
|
||||
out = open(out_path, 'w')
|
||||
err = open(error_path, 'w')
|
||||
rev_old = core.svn_opt_revision_t()
|
||||
rev_old.kind = core.svn_opt_revision_number
|
||||
rev_old.value.number = other_rev
|
||||
rev_new = core.svn_opt_revision_t()
|
||||
rev_new.kind = core.svn_opt_revision_number
|
||||
rev_new.value.number = revision
|
||||
client.diff3([], url2, rev_old, url, rev_new, True, True,
|
||||
deleted, ignore_type, 'UTF-8', out, err,
|
||||
self.client_context, self.pool)
|
||||
out.close()
|
||||
err.close()
|
||||
assert len(open(error_path).read()) == 0
|
||||
diff = open(out_path).read()
|
||||
os.chdir(old_cwd)
|
||||
shutil.rmtree(tmpdir)
|
||||
return diff
|
||||
|
||||
def get_file(self, path, revision):
|
||||
out = cStringIO.StringIO()
|
||||
tmpdir = tempfile.mkdtemp('svnwrap_temp')
|
||||
# hot tip: the swig bridge doesn't like StringIO for these bad boys
|
||||
out_path = os.path.join(tmpdir, 'diffout')
|
||||
out = open(out_path, 'w')
|
||||
ra.get_file(self.ra, path,revision, out , None)
|
||||
out.close()
|
||||
x = open(out_path).read()
|
||||
shutil.rmtree(tmpdir)
|
||||
return x
|
||||
|
||||
def proplist(self, path, revision, recurse=False):
|
||||
rev = core.svn_opt_revision_t()
|
||||
rev.kind = core.svn_opt_revision_number
|
||||
rev.value.number = revision
|
||||
if path[-1] == '/':
|
||||
path = path[:-1]
|
||||
if path[0] == '/':
|
||||
path = path[1:]
|
||||
pl = dict(client.proplist2(self.svn_url+'/'+path, rev, rev, True,
|
||||
self.client_context, self.pool))
|
||||
pl2 = {}
|
||||
for key, value in pl.iteritems():
|
||||
pl2[key[len(self.svn_url)+1:]] = value
|
||||
return pl2
|
||||
|
||||
def fetch_all_files_to_dir(self, path, revision, checkout_path):
|
||||
rev = core.svn_opt_revision_t()
|
||||
rev.kind = core.svn_opt_revision_number
|
||||
rev.value.number = revision
|
||||
client.export3(self.svn_url+'/'+path, checkout_path, rev,
|
||||
rev, True, True, True, 'LF', # should be 'CRLF' on win32
|
||||
self.client_context, self.pool)
|
||||
|
||||
class SubversionRepoCanNotReplay(Exception):
|
||||
"""Exception raised when the svn server is too old to have replay.
|
||||
"""
|
48
test_fetch_command.py
Normal file
48
test_fetch_command.py
Normal file
@ -0,0 +1,48 @@
|
||||
import fetch_command
|
||||
|
||||
two_empties = """Index: __init__.py
|
||||
===================================================================
|
||||
Index: bar/__init__.py
|
||||
===================================================================
|
||||
Index: bar/test_muhaha.py
|
||||
===================================================================
|
||||
--- bar/test_muhaha.py (revision 0)
|
||||
+++ bar/test_muhaha.py (revision 1)
|
||||
@@ -0,0 +1,2 @@
|
||||
+
|
||||
+blah blah blah, I'm a fake patch
|
||||
\ No newline at end of file
|
||||
"""
|
||||
|
||||
def test_empty_file_re():
|
||||
matches = fetch_command.empty_file_patch_wont_make_re.findall(two_empties)
|
||||
assert sorted(matches) == ['__init__.py', 'bar/__init__.py']
|
||||
|
||||
def test_any_matches_just_one():
|
||||
pat = '''Index: trunk/django/contrib/admin/urls/__init__.py
|
||||
===================================================================
|
||||
'''
|
||||
matches = fetch_command.any_file_re.findall(pat)
|
||||
assert len(matches) == 1
|
||||
|
||||
def test_any_file_re():
|
||||
matches = fetch_command.any_file_re.findall(two_empties)
|
||||
assert sorted(matches) == ['__init__.py', 'bar/__init__.py',
|
||||
'bar/test_muhaha.py']
|
||||
binary_delta = """Index: trunk/functional_tests/doc_tests/test_doctest_fixtures/doctest_fixtures_fixtures.pyc
|
||||
===================================================================
|
||||
Cannot display: file marked as a binary type.
|
||||
svn:mime-type = application/octet-stream
|
||||
|
||||
Property changes on: trunk/functional_tests/doc_tests/test_doctest_fixtures/doctest_fixtures_fixtures.pyc
|
||||
___________________________________________________________________
|
||||
Added: svn:mime-type
|
||||
+ application/octet-stream
|
||||
|
||||
Index: trunk/functional_tests/doc_tests/test_doctest_fixtures/doctest_fixtures.rst
|
||||
===================================================================
|
||||
"""
|
||||
def test_binary_file_re():
|
||||
matches = fetch_command.binary_file_re.findall(binary_delta)
|
||||
print matches
|
||||
assert matches == ['trunk/functional_tests/doc_tests/test_doctest_fixtures/doctest_fixtures_fixtures.pyc']
|
146
test_svnwrap.py
Normal file
146
test_svnwrap.py
Normal file
@ -0,0 +1,146 @@
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
import unittest
|
||||
|
||||
from nose import tools
|
||||
|
||||
import svnwrap
|
||||
|
||||
class TestBasicRepoLayout(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.oldwd = os.getcwd()
|
||||
self.tmpdir = tempfile.mkdtemp('svnwrap_test')
|
||||
self.repo_path = '%s/testrepo' % self.tmpdir
|
||||
wc_path = '%s/testrepo_wc' % self.tmpdir
|
||||
os.spawnvp(os.P_WAIT, 'svnadmin', ['svnadmin', 'create',
|
||||
self.repo_path,])
|
||||
os.spawnvp(os.P_WAIT, 'svn', ['svn', 'checkout',
|
||||
'file://%s' % self.repo_path,
|
||||
wc_path,])
|
||||
os.chdir(wc_path)
|
||||
for d in ['branches', 'tags', 'trunk']:
|
||||
os.mkdir(os.path.join(wc_path, d))
|
||||
#r1
|
||||
os.spawnvp(os.P_WAIT, 'svn', ['svn', 'add', 'branches', 'tags', 'trunk'])
|
||||
os.spawnvp(os.P_WAIT, 'svn', ['svn', 'ci', '-m', 'Empty dirs.'])
|
||||
#r2
|
||||
files = ['alpha', 'beta', 'delta']
|
||||
for f in files:
|
||||
open(os.path.join(wc_path, 'trunk', f), 'w').write('This is %s.\n' % f)
|
||||
os.chdir('trunk')
|
||||
os.spawnvp(os.P_WAIT, 'svn', ['svn', 'add']+files)
|
||||
os.spawnvp(os.P_WAIT, 'svn', ['svn', 'ci', '-m', 'Initial Files.'])
|
||||
os.chdir('..')
|
||||
#r3
|
||||
os.spawnvp(os.P_WAIT, 'svn', ['svn', 'cp', 'trunk', 'tags/rev1'])
|
||||
os.spawnvp(os.P_WAIT, 'svn', ['svn', 'ci', '-m', 'Tag rev 1.'])
|
||||
#r4
|
||||
os.spawnvp(os.P_WAIT, 'svn', ['svn', 'cp', 'trunk', 'branches/crazy'])
|
||||
os.spawnvp(os.P_WAIT, 'svn', ['svn', 'ci', '-m', 'Branch to crazy.'])
|
||||
|
||||
#r5
|
||||
open(os.path.join(wc_path, 'trunk', 'gamma'), 'w').write('This is %s.\n'
|
||||
% 'gamma')
|
||||
os.spawnvp(os.P_WAIT, 'svn', ['svn', 'add', 'trunk/gamma', ])
|
||||
os.spawnvp(os.P_WAIT, 'svn', ['svn', 'ci', '-m', 'Add gamma'])
|
||||
|
||||
#r6
|
||||
open(os.path.join(wc_path, 'branches', 'crazy', 'omega'),
|
||||
'w').write('This is %s.\n' % 'omega')
|
||||
os.spawnvp(os.P_WAIT, 'svn', ['svn', 'add', 'branches/crazy/omega', ])
|
||||
os.spawnvp(os.P_WAIT, 'svn', ['svn', 'ci', '-m', 'Add omega'])
|
||||
|
||||
#r7
|
||||
os.spawnvp(os.P_WAIT, 'svn', ['svn', 'cp', 'trunk', 'branches/more_crazy'])
|
||||
os.spawnvp(os.P_WAIT, 'svn', ['svn', 'ci', '-m', 'Branch to more_crazy.'])
|
||||
|
||||
self.repo = svnwrap.SubversionRepo('file://%s' % self.repo_path)
|
||||
|
||||
def tearDown(self):
|
||||
shutil.rmtree(self.tmpdir)
|
||||
os.chdir(self.oldwd)
|
||||
|
||||
|
||||
def test_num_revs(self):
|
||||
revs = list(self.repo.revisions())
|
||||
tools.eq_(len(revs), 7)
|
||||
r = revs[1]
|
||||
tools.eq_(r.revnum, 2)
|
||||
tools.eq_(sorted(r.paths.keys()),
|
||||
['trunk/alpha', 'trunk/beta', 'trunk/delta'])
|
||||
for r in revs:
|
||||
for p in r.paths:
|
||||
# make sure these paths are always non-absolute for sanity
|
||||
if p:
|
||||
assert p[0] != '/'
|
||||
revs = list(self.repo.revisions(start=3))
|
||||
tools.eq_(len(revs), 4)
|
||||
|
||||
|
||||
def test_branches(self):
|
||||
tools.eq_(self.repo.branches.keys(), ['crazy', 'more_crazy'])
|
||||
tools.eq_(self.repo.branches['crazy'], ('trunk', 2, 4))
|
||||
tools.eq_(self.repo.branches['more_crazy'], ('trunk', 5, 7))
|
||||
|
||||
|
||||
def test_tags(self):
|
||||
tags = self.repo.tags
|
||||
tools.eq_(tags.keys(), ['rev1'])
|
||||
tools.eq_(tags['rev1'], ('trunk', 2))
|
||||
|
||||
class TestRootAsSubdirOfRepo(TestBasicRepoLayout):
|
||||
def setUp(self):
|
||||
self.oldwd = os.getcwd()
|
||||
self.tmpdir = tempfile.mkdtemp('svnwrap_test')
|
||||
self.repo_path = '%s/testrepo' % self.tmpdir
|
||||
wc_path = '%s/testrepo_wc' % self.tmpdir
|
||||
os.spawnvp(os.P_WAIT, 'svnadmin', ['svnadmin', 'create',
|
||||
self.repo_path,])
|
||||
os.spawnvp(os.P_WAIT, 'svn', ['svn', 'checkout',
|
||||
'file://%s' % self.repo_path,
|
||||
wc_path,])
|
||||
self.repo_path += '/dummyproj'
|
||||
os.chdir(wc_path)
|
||||
os.mkdir('dummyproj')
|
||||
os.chdir('dummyproj')
|
||||
wc_path += '/dummyproj'
|
||||
for d in ['branches', 'tags', 'trunk']:
|
||||
os.mkdir(os.path.join(wc_path, d))
|
||||
#r1
|
||||
os.chdir('..')
|
||||
os.spawnvp(os.P_WAIT, 'svn', ['svn', 'add', 'dummyproj'])
|
||||
os.spawnvp(os.P_WAIT, 'svn', ['svn', 'ci', '-m', 'Empty dirs.'])
|
||||
os.chdir('dummyproj')
|
||||
#r2
|
||||
files = ['alpha', 'beta', 'delta']
|
||||
for f in files:
|
||||
open(os.path.join(wc_path, 'trunk', f), 'w').write('This is %s.\n' % f)
|
||||
os.chdir('trunk')
|
||||
os.spawnvp(os.P_WAIT, 'svn', ['svn', 'add']+files)
|
||||
os.spawnvp(os.P_WAIT, 'svn', ['svn', 'ci', '-m', 'Initial Files.'])
|
||||
os.chdir('..')
|
||||
#r3
|
||||
os.spawnvp(os.P_WAIT, 'svn', ['svn', 'cp', 'trunk', 'tags/rev1'])
|
||||
os.spawnvp(os.P_WAIT, 'svn', ['svn', 'ci', '-m', 'Tag rev 1.'])
|
||||
#r4
|
||||
os.spawnvp(os.P_WAIT, 'svn', ['svn', 'cp', 'trunk', 'branches/crazy'])
|
||||
os.spawnvp(os.P_WAIT, 'svn', ['svn', 'ci', '-m', 'Branch to crazy.'])
|
||||
|
||||
#r5
|
||||
open(os.path.join(wc_path, 'trunk', 'gamma'), 'w').write('This is %s.\n'
|
||||
% 'gamma')
|
||||
os.spawnvp(os.P_WAIT, 'svn', ['svn', 'add', 'trunk/gamma', ])
|
||||
os.spawnvp(os.P_WAIT, 'svn', ['svn', 'ci', '-m', 'Add gamma'])
|
||||
|
||||
#r6
|
||||
open(os.path.join(wc_path, 'branches', 'crazy', 'omega'),
|
||||
'w').write('This is %s.\n' % 'omega')
|
||||
os.spawnvp(os.P_WAIT, 'svn', ['svn', 'add', 'branches/crazy/omega', ])
|
||||
os.spawnvp(os.P_WAIT, 'svn', ['svn', 'ci', '-m', 'Add omega'])
|
||||
|
||||
#r7
|
||||
os.spawnvp(os.P_WAIT, 'svn', ['svn', 'cp', 'trunk', 'branches/more_crazy'])
|
||||
os.spawnvp(os.P_WAIT, 'svn', ['svn', 'ci', '-m', 'Branch to more_crazy.'])
|
||||
|
||||
self.repo = svnwrap.SubversionRepo('file://%s' % (self.repo_path))
|
35
util.py
Normal file
35
util.py
Normal file
@ -0,0 +1,35 @@
|
||||
import os
|
||||
import shutil
|
||||
|
||||
svn_subcommands = { }
|
||||
|
||||
def register_subcommand(name):
|
||||
def inner(fn):
|
||||
svn_subcommands[name] = fn
|
||||
return fn
|
||||
return inner
|
||||
|
||||
|
||||
def wipe_all_files(hg_wc_path):
|
||||
files = [f for f in os.listdir(hg_wc_path) if f != '.hg']
|
||||
for f in files:
|
||||
f = os.path.join(hg_wc_path, f)
|
||||
if os.path.isdir(f):
|
||||
shutil.rmtree(f)
|
||||
else:
|
||||
os.remove(f)
|
||||
|
||||
|
||||
def remove_all_files_with_status(path, rev_paths, strip_path, status):
|
||||
for p in rev_paths:
|
||||
if rev_paths[p].action == status:
|
||||
if p.startswith(strip_path):
|
||||
fi = p[len(strip_path)+1:]
|
||||
if len(fi) > 0:
|
||||
fi = os.path.join(path, fi)
|
||||
if os.path.isfile(fi):
|
||||
os.remove(fi)
|
||||
print 'D %s' % fi
|
||||
elif os.path.isdir(fi):
|
||||
shutil.rmtree(fi)
|
||||
print 'D %s' % fi
|
106
utility_commands.py
Normal file
106
utility_commands.py
Normal file
@ -0,0 +1,106 @@
|
||||
from mercurial import cmdutil
|
||||
from mercurial import node
|
||||
from hgext import rebase
|
||||
|
||||
import util
|
||||
import hg_delta_editor
|
||||
|
||||
@util.register_subcommand('url')
|
||||
def print_wc_url(ui, repo, hg_repo_path, **opts):
|
||||
hge = hg_delta_editor.HgChangeReceiver(hg_repo_path,
|
||||
ui_=ui)
|
||||
ui.status(hge.url, '\n')
|
||||
|
||||
|
||||
@util.register_subcommand('parent')
|
||||
def print_parent_revision(ui, repo, hg_repo_path, **opts):
|
||||
"""Prints the hg hash and svn revision info for the nearest svn parent of
|
||||
the current revision"""
|
||||
hge = hg_delta_editor.HgChangeReceiver(hg_repo_path,
|
||||
ui_=ui)
|
||||
svn_commit_hashes = dict(zip(hge.revmap.itervalues(),
|
||||
hge.revmap.iterkeys()))
|
||||
ha = repo.parents()[0]
|
||||
o_r = outgoing_revisions(ui, repo, hge, svn_commit_hashes)
|
||||
if o_r:
|
||||
ha = repo[o_r[-1]].parents()[0]
|
||||
if ha.node() != node.nullid:
|
||||
r, br = svn_commit_hashes[ha.node()]
|
||||
ui.status('Working copy parent revision is %s: r%s on %s\n' %
|
||||
(ha, r, br or 'trunk'))
|
||||
else:
|
||||
ui.status('Working copy seems to have no parent svn revision.\n')
|
||||
return 0
|
||||
|
||||
|
||||
@util.register_subcommand('rebase')
|
||||
def rebase_commits(ui, repo, hg_repo_path, **opts):
|
||||
"""Rebases the current uncommitted revisions onto the top of the branch.
|
||||
"""
|
||||
hge = hg_delta_editor.HgChangeReceiver(hg_repo_path,
|
||||
ui_=ui)
|
||||
svn_commit_hashes = dict(zip(hge.revmap.itervalues(),
|
||||
hge.revmap.iterkeys()))
|
||||
o_r = outgoing_revisions(ui, repo, hge, svn_commit_hashes)
|
||||
if not o_r:
|
||||
ui.status('Nothing to rebase!\n')
|
||||
return 0
|
||||
if len(repo.parents()[0].children()):
|
||||
ui.status('Refusing to rebase non-head commit like a coward\n')
|
||||
return 0
|
||||
parent_rev = repo[o_r[-1]].parents()[0]
|
||||
target_rev = parent_rev
|
||||
p_n = parent_rev.node()
|
||||
exhausted_choices = False
|
||||
while target_rev.children() and not exhausted_choices:
|
||||
for c in target_rev.children():
|
||||
exhausted_choices = True
|
||||
n = c.node()
|
||||
if (n in svn_commit_hashes and
|
||||
svn_commit_hashes[n][1] == svn_commit_hashes[p_n][1]):
|
||||
target_rev = c
|
||||
exhausted_choices = False
|
||||
break
|
||||
if parent_rev == target_rev:
|
||||
ui.status('Already up to date!\n')
|
||||
return 0
|
||||
# TODO this is really hacky, there must be a more direct way
|
||||
return rebase.rebase(ui, repo, dest=node.hex(target_rev.node()),
|
||||
base=node.hex(repo.parents()[0].node()))
|
||||
|
||||
|
||||
@util.register_subcommand('outgoing')
|
||||
def show_outgoing_to_svn(ui, repo, hg_repo_path, **opts):
|
||||
"""Commit the current revision and any required parents back to svn.
|
||||
"""
|
||||
hge = hg_delta_editor.HgChangeReceiver(hg_repo_path,
|
||||
ui_=ui)
|
||||
svn_commit_hashes = dict(zip(hge.revmap.itervalues(),
|
||||
hge.revmap.iterkeys()))
|
||||
o_r = outgoing_revisions(ui, repo, hge, svn_commit_hashes)
|
||||
if not (o_r and len(o_r)):
|
||||
ui.status('No outgoing changes found.\n')
|
||||
return 0
|
||||
displayer = cmdutil.show_changeset(ui, repo, opts, buffered=False)
|
||||
for rev in reversed(o_r):
|
||||
displayer.show(changenode=rev)
|
||||
|
||||
|
||||
def outgoing_revisions(ui, repo, hg_editor, reverse_map):
|
||||
"""Given a repo and an hg_editor, determines outgoing revisions for the
|
||||
current working copy state.
|
||||
"""
|
||||
outgoing_rev_hashes = []
|
||||
working_rev = repo.parents()
|
||||
assert len(working_rev) == 1
|
||||
working_rev = working_rev[0]
|
||||
if working_rev.node() in reverse_map:
|
||||
return
|
||||
while (not working_rev.node() in reverse_map
|
||||
and working_rev.node() != node.nullid):
|
||||
outgoing_rev_hashes.append(working_rev.node())
|
||||
working_rev = working_rev.parents()
|
||||
assert len(working_rev) == 1
|
||||
working_rev = working_rev[0]
|
||||
if working_rev.node() != node.nullid:
|
||||
return outgoing_rev_hashes
|
Loading…
Reference in New Issue
Block a user