2009-09-16 06:34:53 +04:00
|
|
|
import StringIO
|
2010-02-06 19:57:06 +03:00
|
|
|
import difflib
|
2008-11-10 03:08:35 +03:00
|
|
|
import errno
|
2009-09-16 06:34:53 +04:00
|
|
|
import gettext
|
2009-05-22 17:12:31 +04:00
|
|
|
import imp
|
2008-10-08 03:42:43 +04:00
|
|
|
import os
|
2008-11-10 03:08:35 +03:00
|
|
|
import shutil
|
|
|
|
import stat
|
2009-09-16 06:34:53 +04:00
|
|
|
import subprocess
|
|
|
|
import sys
|
2008-11-15 01:18:24 +03:00
|
|
|
import tempfile
|
|
|
|
import unittest
|
2008-11-10 03:08:35 +03:00
|
|
|
import urllib
|
2008-10-08 03:42:43 +04:00
|
|
|
|
2010-07-14 17:39:21 +04:00
|
|
|
_rootdir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
|
|
|
sys.path.insert(0, _rootdir)
|
2009-05-22 17:12:31 +04:00
|
|
|
|
2010-09-29 20:04:26 +04:00
|
|
|
from mercurial import cmdutil
|
2009-05-15 21:18:43 +04:00
|
|
|
from mercurial import commands
|
2009-09-16 06:34:53 +04:00
|
|
|
from mercurial import context
|
2011-06-15 16:44:14 +04:00
|
|
|
from mercurial import dispatch as dispatchmod
|
2008-10-09 05:09:28 +04:00
|
|
|
from mercurial import hg
|
2009-09-16 06:34:53 +04:00
|
|
|
from mercurial import i18n
|
2008-11-15 01:18:24 +03:00
|
|
|
from mercurial import node
|
|
|
|
from mercurial import ui
|
2010-11-30 04:54:11 +03:00
|
|
|
from mercurial import util
|
2010-01-30 01:36:14 +03:00
|
|
|
from mercurial import extensions
|
2008-10-09 05:09:28 +04:00
|
|
|
|
2010-10-03 01:44:37 +04:00
|
|
|
try:
|
2010-10-10 01:20:52 +04:00
|
|
|
SkipTest = unittest.SkipTest
|
|
|
|
except AttributeError:
|
2010-10-03 01:44:37 +04:00
|
|
|
try:
|
2010-10-10 01:20:52 +04:00
|
|
|
from unittest2 import SkipTest
|
2010-10-03 01:44:37 +04:00
|
|
|
except ImportError:
|
2010-10-10 01:20:52 +04:00
|
|
|
try:
|
|
|
|
from nose import SkipTest
|
|
|
|
except ImportError:
|
|
|
|
SkipTest = None
|
2010-10-03 01:44:37 +04:00
|
|
|
|
2009-05-14 06:39:39 +04:00
|
|
|
from hgsubversion import util
|
2009-05-12 22:12:32 +04:00
|
|
|
|
2009-07-29 18:26:29 +04:00
|
|
|
# Documentation for Subprocess.Popen() says:
|
|
|
|
# "Note that on Windows, you cannot set close_fds to true and
|
|
|
|
# also redirect the standard handles by setting stdin, stdout or
|
|
|
|
# stderr."
|
2011-10-12 11:35:25 +04:00
|
|
|
canCloseFds = 'win32' not in sys.platform
|
2009-07-29 18:26:29 +04:00
|
|
|
|
2009-07-29 20:01:13 +04:00
|
|
|
if not 'win32' in sys.platform:
|
|
|
|
def kill_process(popen_obj):
|
|
|
|
os.kill(popen_obj.pid, 9)
|
|
|
|
else:
|
|
|
|
import ctypes
|
2009-08-13 00:54:21 +04:00
|
|
|
from ctypes.wintypes import BOOL, DWORD, HANDLE, UINT
|
|
|
|
|
|
|
|
def win_status_check(result, func, args):
|
|
|
|
if result == 0:
|
|
|
|
raise ctypes.WinError()
|
|
|
|
return args
|
|
|
|
|
|
|
|
def WINAPI(returns, func, *params):
|
|
|
|
assert len(params) % 2 == 0
|
|
|
|
|
|
|
|
func.argtypes = tuple(params[0::2])
|
|
|
|
func.resvalue = returns
|
|
|
|
func.errcheck = win_status_check
|
|
|
|
|
|
|
|
return func
|
2009-07-29 20:01:13 +04:00
|
|
|
|
|
|
|
# dwDesiredAccess
|
|
|
|
PROCESS_TERMINATE = 0x0001
|
|
|
|
|
2009-08-13 00:54:21 +04:00
|
|
|
OpenProcess = WINAPI(HANDLE, ctypes.windll.kernel32.OpenProcess,
|
|
|
|
DWORD, 'dwDesiredAccess',
|
|
|
|
BOOL, 'bInheritHandle',
|
|
|
|
DWORD, 'dwProcessId',
|
|
|
|
)
|
2009-07-29 20:01:13 +04:00
|
|
|
|
2011-10-12 11:35:25 +04:00
|
|
|
CloseHandle = WINAPI(BOOL, ctypes.windll.kernel32.CloseHandle,
|
2009-08-13 00:54:21 +04:00
|
|
|
HANDLE, 'hObject'
|
|
|
|
)
|
2009-07-29 20:01:13 +04:00
|
|
|
|
2009-08-13 00:54:21 +04:00
|
|
|
TerminateProcess = WINAPI(BOOL, ctypes.windll.kernel32.TerminateProcess,
|
|
|
|
HANDLE, 'hProcess',
|
|
|
|
UINT, 'uExitCode'
|
|
|
|
)
|
2009-07-29 20:01:13 +04:00
|
|
|
|
|
|
|
def kill_process(popen_obj):
|
|
|
|
phnd = OpenProcess(PROCESS_TERMINATE, False, popen_obj.pid)
|
|
|
|
TerminateProcess(phnd, 1)
|
|
|
|
CloseHandle(phnd)
|
|
|
|
|
2009-01-22 05:27:51 +03:00
|
|
|
# Fixtures that need to be pulled at a subdirectory of the repo path
|
|
|
|
subdir = {'truncatedhistory.svndump': '/project2',
|
|
|
|
'fetch_missing_files_subdir.svndump': '/foo',
|
2009-04-22 00:25:08 +04:00
|
|
|
'empty_dir_in_trunk_not_repo_root.svndump': '/project',
|
2009-05-23 08:37:33 +04:00
|
|
|
'project_root_not_repo_root.svndump': '/dummyproj',
|
2009-12-24 22:38:06 +03:00
|
|
|
'project_name_with_space.svndump': '/project name',
|
svnwrap: fix handling of quotable URLs (fixes #197, refs #132)
The way hgsubversion handles URLs that may or may not be quoted is
somewhat fragile. As part of fixing issue 132 in 06d89c2063a2, the
path component of URLs was always quoted. The URL has been attempted
encoded since the initial check-in.
The fix from 06d89c2063a2 was incomplete; reverting it allows us to
clone a URL with a '~' in it.[1] Encoding the URL as UTF-8 seldom
works as expected, as the default string encoding is ASCII, causing
Python to be unable to decode any URL containing an 8-bit
character.
The core problem here is that we don't know whether the URL specified
by the user is quoted or not. Rather than trying to deal with this
ourselves, we pass the problem on to Subversion. Then, we obtain the
URL from the RA instance, where it is always quoted. (It's worth
noting that the editor interface, on the other hand, always deals with
unquoted paths...)
Thus, the following invariants should apply to SubversionRepo
attributes:
- svn_url and root will always be quoted.
- subdir will always be unquoted.
Tests are added that verify that it won't affect the conversion
whether a URL is specified in quoted or unquoted form. Furthermore, a
test fixture for this is added *twice*, so that we can thoroughly test
both quoted and unquoted URLs. I'm not adding a test dedicated to
tildes in URLs; it doesn't seem necessary.
[1] Such as <https://svn.kenai.com/svn/winsw~subversion>.
2010-10-05 06:00:36 +04:00
|
|
|
'non_ascii_path_1.svndump': '/b\xC3\xB8b',
|
|
|
|
'non_ascii_path_2.svndump': '/b%C3%B8b',
|
2009-01-22 05:27:51 +03:00
|
|
|
}
|
|
|
|
|
2008-10-08 03:42:43 +04:00
|
|
|
FIXTURES = os.path.join(os.path.abspath(os.path.dirname(__file__)),
|
|
|
|
'fixtures')
|
|
|
|
|
2010-10-05 08:02:15 +04:00
|
|
|
|
|
|
|
def _makeskip(name, message):
|
2010-10-08 22:58:26 +04:00
|
|
|
if SkipTest:
|
|
|
|
def skip(*args, **kwargs):
|
|
|
|
raise SkipTest(message)
|
|
|
|
skip.__name__ = name
|
|
|
|
return skip
|
2010-10-05 08:02:15 +04:00
|
|
|
|
2010-10-05 08:03:12 +04:00
|
|
|
def requiresmodule(mod):
|
|
|
|
"""Skip a test if the specified module is not None."""
|
|
|
|
def decorator(fn):
|
2010-10-08 22:58:26 +04:00
|
|
|
if fn is None:
|
|
|
|
return
|
2010-10-05 08:03:12 +04:00
|
|
|
if mod is not None:
|
|
|
|
return fn
|
|
|
|
return _makeskip(fn.__name__, 'missing required feature')
|
|
|
|
return decorator
|
|
|
|
|
|
|
|
|
2010-09-29 20:04:26 +04:00
|
|
|
def requiresoption(option):
|
2010-10-03 01:44:17 +04:00
|
|
|
'''Skip a test if commands.clone does not take the specified option.'''
|
2010-09-29 20:04:26 +04:00
|
|
|
def decorator(fn):
|
|
|
|
for entry in cmdutil.findcmd('clone', commands.table)[1][1]:
|
|
|
|
if entry[1] == option:
|
|
|
|
return fn
|
2010-10-03 01:44:37 +04:00
|
|
|
# no match found, so skip
|
|
|
|
if SkipTest:
|
2010-10-05 08:02:15 +04:00
|
|
|
return _makeskip(fn.__name__,
|
|
|
|
'test requires clone to accept %s' % option)
|
2010-10-03 01:44:37 +04:00
|
|
|
# no skipping support, so erase decorated method
|
|
|
|
return
|
2010-09-29 20:04:26 +04:00
|
|
|
if not isinstance(option, str):
|
|
|
|
raise TypeError('requiresoption takes a string argument')
|
|
|
|
return decorator
|
|
|
|
|
2009-10-17 07:33:41 +04:00
|
|
|
def filtermanifest(manifest):
|
2010-11-30 04:54:11 +03:00
|
|
|
return [f for f in manifest if f not in util.ignoredfiles]
|
2009-10-17 07:33:41 +04:00
|
|
|
|
2008-11-15 01:52:30 +03:00
|
|
|
def fileurl(path):
|
2010-01-20 22:50:59 +03:00
|
|
|
path = os.path.abspath(path).replace(os.sep, '/')
|
2008-11-10 03:08:35 +03:00
|
|
|
drive, path = os.path.splitdrive(path)
|
|
|
|
if drive:
|
|
|
|
drive = '/' + drive
|
2009-05-20 20:38:01 +04:00
|
|
|
url = 'file://%s%s' % (drive, path)
|
2008-11-10 03:08:35 +03:00
|
|
|
return url
|
|
|
|
|
2010-07-20 13:55:07 +04:00
|
|
|
def testui(stupid=False, layout='auto', startrev=0):
|
2010-02-26 16:50:22 +03:00
|
|
|
u = ui.ui()
|
|
|
|
bools = {True: 'true', False: 'false'}
|
|
|
|
u.setconfig('ui', 'quiet', bools[True])
|
2010-09-20 19:08:18 +04:00
|
|
|
u.setconfig('extensions', 'hgsubversion', '')
|
2010-02-26 16:50:22 +03:00
|
|
|
u.setconfig('hgsubversion', 'stupid', bools[stupid])
|
|
|
|
u.setconfig('hgsubversion', 'layout', layout)
|
2010-07-20 13:55:07 +04:00
|
|
|
u.setconfig('hgsubversion', 'startrev', startrev)
|
2010-02-26 16:50:22 +03:00
|
|
|
return u
|
|
|
|
|
2008-10-08 03:42:43 +04:00
|
|
|
def load_svndump_fixture(path, fixture_name):
|
|
|
|
'''Loads an svnadmin dump into a fresh repo at path, which should not
|
|
|
|
already exist.
|
|
|
|
'''
|
2009-05-15 21:18:43 +04:00
|
|
|
if os.path.exists(path): rmtree(path)
|
2011-10-12 11:35:25 +04:00
|
|
|
subprocess.call(['svnadmin', 'create', path, ],
|
2009-05-15 21:18:43 +04:00
|
|
|
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
2008-10-08 03:42:43 +04:00
|
|
|
inp = open(os.path.join(FIXTURES, fixture_name))
|
2011-10-12 11:35:25 +04:00
|
|
|
proc = subprocess.Popen(['svnadmin', 'load', path, ], stdin=inp,
|
2009-05-15 21:18:43 +04:00
|
|
|
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
2008-11-10 03:08:35 +03:00
|
|
|
proc.communicate()
|
2008-10-09 05:09:28 +04:00
|
|
|
|
2011-06-15 16:44:14 +04:00
|
|
|
def dispatch(cmd):
|
|
|
|
try:
|
|
|
|
req = dispatchmod.request(cmd)
|
|
|
|
dispatchmod.dispatch(req)
|
|
|
|
except AttributeError, e:
|
|
|
|
dispatchmod.dispatch(cmd)
|
|
|
|
|
2008-11-10 03:08:35 +03:00
|
|
|
def rmtree(path):
|
|
|
|
# Read-only files cannot be removed under Windows
|
|
|
|
for root, dirs, files in os.walk(path):
|
|
|
|
for f in files:
|
|
|
|
f = os.path.join(root, f)
|
|
|
|
try:
|
|
|
|
s = os.stat(f)
|
|
|
|
except OSError, e:
|
|
|
|
if e.errno == errno.ENOENT:
|
|
|
|
continue
|
|
|
|
raise
|
|
|
|
if (s.st_mode & stat.S_IWRITE) == 0:
|
|
|
|
os.chmod(f, s.st_mode | stat.S_IWRITE)
|
|
|
|
shutil.rmtree(path)
|
2008-11-15 01:18:24 +03:00
|
|
|
|
2010-07-14 17:39:24 +04:00
|
|
|
def _verify_our_modules():
|
|
|
|
'''
|
|
|
|
Verify that hgsubversion was imported from the correct location.
|
|
|
|
|
|
|
|
The correct location is any location within the parent directory of the
|
|
|
|
directory containing this file.
|
|
|
|
'''
|
|
|
|
|
|
|
|
for modname, module in sys.modules.iteritems():
|
|
|
|
if not module or not modname.startswith('hgsubversion.'):
|
|
|
|
continue
|
|
|
|
|
|
|
|
modloc = module.__file__
|
|
|
|
cp = os.path.commonprefix((os.path.abspath(__file__), modloc))
|
|
|
|
assert cp.rstrip(os.sep) == _rootdir, (
|
|
|
|
'Module location verification failed: hgsubversion was imported '
|
|
|
|
'from the wrong path!'
|
|
|
|
)
|
2009-09-16 06:33:41 +04:00
|
|
|
|
2011-06-15 16:44:14 +04:00
|
|
|
def hgclone(ui, source, dest, update=True):
|
|
|
|
if getattr(hg, 'peer', None):
|
|
|
|
# Since 1.9 (d976542986d2)
|
|
|
|
src, dest = hg.clone(ui, {}, source, dest, update=update)
|
|
|
|
else:
|
|
|
|
src, dest = hg.clone(ui, source, dest, update=update)
|
|
|
|
return src, dest
|
|
|
|
|
2008-11-15 01:18:24 +03:00
|
|
|
class TestBase(unittest.TestCase):
|
|
|
|
def setUp(self):
|
2010-07-14 17:39:24 +04:00
|
|
|
_verify_our_modules()
|
|
|
|
|
2011-10-12 11:35:25 +04:00
|
|
|
self.oldenv = dict([(k, os.environ.get(k, None),) for k in
|
|
|
|
('LANG', 'LC_ALL', 'HGRCPATH',)])
|
2009-09-16 06:33:41 +04:00
|
|
|
self.oldt = i18n.t
|
|
|
|
os.environ['LANG'] = os.environ['LC_ALL'] = 'C'
|
|
|
|
i18n.t = gettext.translation('hg', i18n.localedir, fallback=True)
|
|
|
|
|
2008-11-15 01:18:24 +03:00
|
|
|
self.oldwd = os.getcwd()
|
2008-12-21 04:13:46 +03:00
|
|
|
self.tmpdir = tempfile.mkdtemp(
|
|
|
|
'svnwrap_test', dir=os.environ.get('HGSUBVERSION_TEST_TEMP', None))
|
2009-03-31 19:50:21 +04:00
|
|
|
self.hgrc = os.path.join(self.tmpdir, '.hgrc')
|
|
|
|
os.environ['HGRCPATH'] = self.hgrc
|
|
|
|
rc = open(self.hgrc, 'w')
|
2009-05-12 22:14:15 +04:00
|
|
|
for l in '[extensions]', 'hgsubversion=':
|
|
|
|
print >> rc, l
|
2008-12-21 04:13:46 +03:00
|
|
|
|
2008-11-15 01:18:24 +03:00
|
|
|
self.repo_path = '%s/testrepo' % self.tmpdir
|
|
|
|
self.wc_path = '%s/testrepo_wc' % self.tmpdir
|
2010-09-08 11:57:06 +04:00
|
|
|
self.svn_wc = None
|
2009-05-15 21:18:43 +04:00
|
|
|
|
|
|
|
# Previously, we had a MockUI class that wrapped ui, and giving access
|
|
|
|
# to the stream. The ui.pushbuffer() and ui.popbuffer() can be used
|
|
|
|
# instead. Using the regular UI class, with all stderr redirected to
|
|
|
|
# stdout ensures that the test setup is much more similar to usage
|
|
|
|
# setups.
|
2009-05-12 22:14:15 +04:00
|
|
|
self.patch = (ui.ui.write_err, ui.ui.write)
|
|
|
|
setattr(ui.ui, self.patch[0].func_name, self.patch[1])
|
2008-11-15 01:18:24 +03:00
|
|
|
|
|
|
|
def tearDown(self):
|
2009-09-16 06:33:41 +04:00
|
|
|
for var, val in self.oldenv.iteritems():
|
|
|
|
if val is None:
|
|
|
|
del os.environ[var]
|
|
|
|
else:
|
|
|
|
os.environ[var] = val
|
|
|
|
i18n.t = self.oldt
|
2008-11-15 01:18:24 +03:00
|
|
|
rmtree(self.tmpdir)
|
|
|
|
os.chdir(self.oldwd)
|
2009-05-12 22:14:15 +04:00
|
|
|
setattr(ui.ui, self.patch[0].func_name, self.patch[0])
|
2008-11-15 01:52:30 +03:00
|
|
|
|
2010-07-14 17:39:24 +04:00
|
|
|
_verify_our_modules()
|
|
|
|
|
2010-02-26 16:50:22 +03:00
|
|
|
def ui(self, stupid=False, layout='auto'):
|
|
|
|
return testui(stupid, layout)
|
|
|
|
|
2010-07-20 13:55:07 +04:00
|
|
|
def _load_fixture_and_fetch(self, fixture_name, subdir=None, stupid=False,
|
2012-04-19 20:29:25 +04:00
|
|
|
layout='auto', startrev=0, externals=None,
|
|
|
|
noupdate=True):
|
2009-10-17 07:33:41 +04:00
|
|
|
if layout == 'single':
|
|
|
|
if subdir is None:
|
|
|
|
subdir = 'trunk'
|
|
|
|
elif subdir is None:
|
|
|
|
subdir = ''
|
2012-04-19 20:29:25 +04:00
|
|
|
load_svndump_fixture(self.repo_path, fixture_name)
|
|
|
|
projectpath = self.repo_path
|
|
|
|
if subdir:
|
|
|
|
projectpath += '/' + subdir
|
|
|
|
|
|
|
|
cmd = [
|
|
|
|
'clone',
|
|
|
|
'--layout=%s' % layout,
|
|
|
|
'--startrev=%s' % startrev,
|
|
|
|
fileurl(projectpath),
|
|
|
|
self.wc_path,
|
|
|
|
]
|
|
|
|
if stupid:
|
|
|
|
cmd.append('--stupid')
|
|
|
|
if noupdate:
|
|
|
|
cmd.append('--noupdate')
|
|
|
|
if externals:
|
|
|
|
cmd[:0] = ['--config', 'hgsubversion.externals=%s' % externals]
|
|
|
|
|
|
|
|
dispatch(cmd)
|
|
|
|
|
|
|
|
return hg.repository(testui(), self.wc_path)
|
2008-12-12 04:49:36 +03:00
|
|
|
|
2010-09-08 11:57:06 +04:00
|
|
|
def _add_svn_rev(self, changes):
|
|
|
|
'''changes is a dict of filename -> contents'''
|
|
|
|
if self.svn_wc is None:
|
|
|
|
self.svn_wc = os.path.join(self.tmpdir, 'testsvn_wc')
|
|
|
|
subprocess.call([
|
|
|
|
'svn', 'co', '-q', fileurl(self.repo_path),
|
|
|
|
self.svn_wc
|
|
|
|
],
|
|
|
|
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
|
|
|
|
|
|
|
for filename, contents in changes.iteritems():
|
|
|
|
# filenames are / separated
|
|
|
|
filename = filename.replace('/', os.path.sep)
|
|
|
|
filename = os.path.join(self.svn_wc, filename)
|
|
|
|
open(filename, 'w').write(contents)
|
|
|
|
# may be redundant
|
|
|
|
subprocess.call(['svn', 'add', '-q', filename],
|
|
|
|
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
|
|
|
subprocess.call([
|
|
|
|
'svn', 'commit', '-q', self.svn_wc, '-m', 'test changes'],
|
|
|
|
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
|
|
|
|
2008-11-15 01:18:24 +03:00
|
|
|
# define this as a property so that it reloads anytime we need it
|
|
|
|
@property
|
|
|
|
def repo(self):
|
2010-02-26 16:50:22 +03:00
|
|
|
return hg.repository(testui(), self.wc_path)
|
2008-11-15 01:18:24 +03:00
|
|
|
|
2009-05-28 00:51:04 +04:00
|
|
|
def pushrevisions(self, stupid=False, expected_extra_back=0):
|
2008-12-31 05:33:57 +03:00
|
|
|
before = len(self.repo)
|
2009-05-15 21:18:43 +04:00
|
|
|
self.repo.ui.setconfig('hgsubversion', 'stupid', str(stupid))
|
2010-06-28 06:18:47 +04:00
|
|
|
res = commands.push(self.repo.ui, self.repo)
|
2008-12-31 05:33:57 +03:00
|
|
|
after = len(self.repo)
|
2009-05-28 00:51:04 +04:00
|
|
|
self.assertEqual(expected_extra_back, after - before)
|
2010-06-28 06:18:47 +04:00
|
|
|
return res
|
2008-11-15 01:18:24 +03:00
|
|
|
|
|
|
|
def svnls(self, path, rev='HEAD'):
|
|
|
|
path = self.repo_path + '/' + path
|
2009-05-12 22:12:32 +04:00
|
|
|
path = util.normalize_url(fileurl(path))
|
2008-11-15 01:18:24 +03:00
|
|
|
args = ['svn', 'ls', '-r', rev, '-R', path]
|
2008-11-15 01:52:30 +03:00
|
|
|
p = subprocess.Popen(args,
|
|
|
|
stdout=subprocess.PIPE,
|
2009-05-15 21:18:43 +04:00
|
|
|
stderr=subprocess.STDOUT)
|
2008-11-15 01:18:24 +03:00
|
|
|
stdout, stderr = p.communicate()
|
|
|
|
if p.returncode:
|
|
|
|
raise Exception('svn ls failed on %s: %r' % (path, stderr))
|
|
|
|
entries = [e.strip('/') for e in stdout.splitlines()]
|
|
|
|
entries.sort()
|
|
|
|
return entries
|
2008-11-15 01:18:24 +03:00
|
|
|
|
2010-11-25 23:55:21 +03:00
|
|
|
def svnco(self, svnpath, rev, path):
|
|
|
|
path = os.path.join(self.wc_path, path)
|
|
|
|
subpath = os.path.dirname(path)
|
|
|
|
if not os.path.isdir(subpath):
|
|
|
|
os.makedirs(subpath)
|
|
|
|
svnpath = fileurl(self.repo_path + '/' + svnpath)
|
|
|
|
args = ['svn', 'co', '-r', rev, svnpath, path]
|
|
|
|
p = subprocess.Popen(args,
|
|
|
|
stdout=subprocess.PIPE,
|
|
|
|
stderr=subprocess.STDOUT)
|
|
|
|
stdout, stderr = p.communicate()
|
|
|
|
if p.returncode:
|
|
|
|
raise Exception('svn co failed on %s: %r' % (svnpath, stderr))
|
|
|
|
|
2011-03-11 00:17:56 +03:00
|
|
|
def svnpropget(self, path, prop, rev='HEAD'):
|
|
|
|
path = self.repo_path + '/' + path
|
|
|
|
path = util.normalize_url(fileurl(path))
|
|
|
|
args = ['svn', 'propget', '-r', str(rev), prop, path]
|
|
|
|
p = subprocess.Popen(args,
|
|
|
|
stdout=subprocess.PIPE,
|
|
|
|
stderr=subprocess.STDOUT)
|
|
|
|
stdout, stderr = p.communicate()
|
|
|
|
if p.returncode:
|
|
|
|
raise Exception('svn ls failed on %s: %r' % (path, stderr))
|
|
|
|
return stdout.strip()
|
|
|
|
|
2009-05-28 00:51:04 +04:00
|
|
|
def commitchanges(self, changes, parent='tip', message='automated test'):
|
2008-11-15 01:18:24 +03:00
|
|
|
"""Commit changes to mercurial directory
|
|
|
|
|
|
|
|
'changes' is a sequence of tuples (source, dest, data). It can look
|
|
|
|
like:
|
|
|
|
- (source, source, data) to set source content to data
|
2008-11-15 01:52:30 +03:00
|
|
|
- (source, dest, None) to set dest content to source one, and mark it as
|
2008-11-15 01:18:24 +03:00
|
|
|
copied from source.
|
|
|
|
- (source, dest, data) to set dest content to data, and mark it as copied
|
|
|
|
from source.
|
|
|
|
- (source, None, None) to remove source.
|
|
|
|
"""
|
|
|
|
repo = self.repo
|
2009-04-25 05:31:17 +04:00
|
|
|
parentctx = repo[parent]
|
2008-11-15 01:18:24 +03:00
|
|
|
|
|
|
|
changed, removed = [], []
|
|
|
|
for source, dest, newdata in changes:
|
|
|
|
if dest is None:
|
|
|
|
removed.append(source)
|
|
|
|
else:
|
|
|
|
changed.append(dest)
|
|
|
|
|
|
|
|
def filectxfn(repo, memctx, path):
|
|
|
|
if path in removed:
|
2010-01-26 19:36:13 +03:00
|
|
|
raise IOError(errno.ENOENT,
|
|
|
|
"File \"%s\" no longer exists" % path)
|
2008-11-15 01:18:24 +03:00
|
|
|
entry = [e for e in changes if path == e[1]][0]
|
|
|
|
source, dest, newdata = entry
|
|
|
|
if newdata is None:
|
|
|
|
newdata = parentctx[source].data()
|
|
|
|
copied = None
|
|
|
|
if source != dest:
|
|
|
|
copied = source
|
|
|
|
return context.memfilectx(path=dest,
|
|
|
|
data=newdata,
|
|
|
|
islink=False,
|
|
|
|
isexec=False,
|
|
|
|
copied=copied)
|
2008-11-15 01:52:30 +03:00
|
|
|
|
2008-11-15 01:18:24 +03:00
|
|
|
ctx = context.memctx(repo,
|
|
|
|
(parentctx.node(), node.nullid),
|
2009-05-28 00:51:04 +04:00
|
|
|
message,
|
2008-11-15 01:18:24 +03:00
|
|
|
changed + removed,
|
|
|
|
filectxfn,
|
|
|
|
'an_author',
|
|
|
|
'2008-10-07 20:59:48 -0500')
|
|
|
|
nodeid = repo.commitctx(ctx)
|
|
|
|
repo = self.repo
|
2009-04-25 05:31:17 +04:00
|
|
|
hg.clean(repo, nodeid)
|
2008-11-15 01:18:24 +03:00
|
|
|
return nodeid
|
2008-11-21 07:41:16 +03:00
|
|
|
|
|
|
|
def assertchanges(self, changes, ctx):
|
|
|
|
"""Assert that all 'changes' (as in defined in commitchanged())
|
|
|
|
went into ctx.
|
|
|
|
"""
|
|
|
|
for source, dest, data in changes:
|
|
|
|
if dest is None:
|
|
|
|
self.assertTrue(source not in ctx)
|
|
|
|
continue
|
|
|
|
self.assertTrue(dest in ctx)
|
|
|
|
if data is None:
|
|
|
|
data = ctx.parents()[0][source].data()
|
|
|
|
self.assertEqual(ctx[dest].data(), data)
|
|
|
|
if dest != source:
|
|
|
|
copy = ctx[dest].renamed()
|
|
|
|
self.assertEqual(copy[0], source)
|
2010-01-30 01:36:14 +03:00
|
|
|
|
2010-06-22 23:58:24 +04:00
|
|
|
def assertMultiLineEqual(self, first, second, msg=None):
|
|
|
|
"""Assert that two multi-line strings are equal. (Based on Py3k code.)
|
|
|
|
"""
|
2010-11-11 23:30:31 +03:00
|
|
|
try:
|
|
|
|
return super(TestBase, self).assertMultiLineEqual(first, second,
|
|
|
|
msg)
|
|
|
|
except AttributeError:
|
|
|
|
pass
|
|
|
|
|
2010-06-22 23:58:24 +04:00
|
|
|
self.assert_(isinstance(first, str),
|
|
|
|
('First argument is not a string'))
|
|
|
|
self.assert_(isinstance(second, str),
|
|
|
|
('Second argument is not a string'))
|
|
|
|
|
|
|
|
if first != second:
|
|
|
|
diff = ''.join(difflib.unified_diff(first.splitlines(True),
|
|
|
|
second.splitlines(True),
|
|
|
|
fromfile='a',
|
|
|
|
tofile='b'))
|
|
|
|
msg = '%s\n%s' % (msg or '', diff)
|
|
|
|
raise self.failureException, msg
|
|
|
|
|
2010-01-30 01:36:14 +03:00
|
|
|
def draw(self, repo):
|
|
|
|
"""Helper function displaying a repository graph, especially
|
|
|
|
useful when debugging comprehensive tests.
|
|
|
|
"""
|
|
|
|
# Could be more elegant, but it works with stock hg
|
|
|
|
_ui = ui.ui()
|
|
|
|
_ui.setconfig('extensions', 'graphlog', '')
|
|
|
|
extensions.loadall(_ui)
|
|
|
|
graphlog = extensions.find('graphlog')
|
|
|
|
templ = """\
|
|
|
|
changeset: {rev}:{node|short}
|
|
|
|
branch: {branches}
|
2010-02-06 19:36:12 +03:00
|
|
|
tags: {tags}
|
2010-01-30 01:36:14 +03:00
|
|
|
summary: {desc|firstline}
|
|
|
|
files: {files}
|
|
|
|
|
|
|
|
"""
|
|
|
|
graphlog.graphlog(_ui, repo, rev=None, template=templ)
|