do not attempt to translate ui.debug output

This commit is contained in:
Martin Geisler 2009-09-19 01:15:38 +02:00
parent b946928cc3
commit 9f1896c083
29 changed files with 120 additions and 120 deletions

View File

@ -60,12 +60,12 @@ import getpass, urllib
def buildmatch(ui, repo, user, key):
'''return tuple of (match function, list enabled).'''
if not ui.has_section(key):
ui.debug(_('acl: %s not enabled\n') % key)
ui.debug('acl: %s not enabled\n' % key)
return None
pats = [pat for pat, users in ui.configitems(key)
if user in users.replace(',', ' ').split()]
ui.debug(_('acl: %s enabled, %d entries for user %s\n') %
ui.debug('acl: %s enabled, %d entries for user %s\n' %
(key, len(pats), user))
if pats:
return match.match(repo.root, '', pats)
@ -77,7 +77,7 @@ def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
raise util.Abort(_('config error - hook type "%s" cannot stop '
'incoming changesets') % hooktype)
if source not in ui.config('acl', 'sources', 'serve').split():
ui.debug(_('acl: changes have source "%s" - skipping\n') % source)
ui.debug('acl: changes have source "%s" - skipping\n' % source)
return
user = None
@ -99,9 +99,9 @@ def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
ctx = repo[rev]
for f in ctx.files():
if deny and deny(f):
ui.debug(_('acl: user %s denied on %s\n') % (user, f))
ui.debug('acl: user %s denied on %s\n' % (user, f))
raise util.Abort(_('acl: access denied for changeset %s') % ctx)
if allow and not allow(f):
ui.debug(_('acl: user %s not allowed on %s\n') % (user, f))
ui.debug('acl: user %s not allowed on %s\n' % (user, f))
raise util.Abort(_('acl: access denied for changeset %s') % ctx)
ui.debug(_('acl: allowing changeset %s\n') % ctx)
ui.debug('acl: allowing changeset %s\n' % ctx)

View File

@ -153,7 +153,7 @@ def churn(ui, repo, *pats, **opts):
maxname = max(len(k) for k, v in rate)
ttywidth = util.termwidth()
ui.debug(_("assuming %i character terminal\n") % ttywidth)
ui.debug("assuming %i character terminal\n" % ttywidth)
width = ttywidth - maxname - 2 - 6 - 2 - 2
for date, count in rate:

View File

@ -266,7 +266,7 @@ class commandline(object):
def _run(self, cmd, *args, **kwargs):
cmdline = self._cmdline(cmd, *args, **kwargs)
self.ui.debug(_('running: %s\n') % (cmdline,))
self.ui.debug('running: %s\n' % (cmdline,))
self.prerun()
try:
return util.popen(cmdline)

View File

@ -199,7 +199,7 @@ def createlog(ui, directory=None, root="", rlog=True, cache=None):
cmd = [util.shellquote(arg) for arg in cmd]
ui.note(_("running %s\n") % (' '.join(cmd)))
ui.debug(_("prefix=%r directory=%r root=%r\n") % (prefix, directory, root))
ui.debug("prefix=%r directory=%r root=%r\n" % (prefix, directory, root))
pfp = util.popen(' '.join(cmd))
peek = pfp.readline()
@ -378,7 +378,7 @@ def createlog(ui, directory=None, root="", rlog=True, cache=None):
e.revision[-1] == 1 and # 1.1 or 1.1.x.1
len(e.comment) == 1 and
file_added_re.match(e.comment[0])):
ui.debug(_('found synthetic revision in %s: %r\n')
ui.debug('found synthetic revision in %s: %r\n'
% (e.rcs, e.comment[0]))
e.synthetic = True

View File

@ -75,7 +75,7 @@ class darcs_source(converter_source, commandline):
self.parents[child] = []
def after(self):
self.ui.debug(_('cleaning up %s\n') % self.tmppath)
self.ui.debug('cleaning up %s\n' % self.tmppath)
shutil.rmtree(self.tmppath, ignore_errors=True)
def xml(self, cmd, **kwargs):

View File

@ -125,7 +125,7 @@ class gnuarch_source(converter_source, commandline):
break
def after(self):
self.ui.debug(_('cleaning up %s\n') % self.tmppath)
self.ui.debug('cleaning up %s\n' % self.tmppath)
shutil.rmtree(self.tmppath, ignore_errors=True)
def getheads(self):
@ -195,7 +195,7 @@ class gnuarch_source(converter_source, commandline):
return os.system(cmdline)
def _update(self, rev):
self.ui.debug(_('applying revision %s...\n') % rev)
self.ui.debug('applying revision %s...\n' % rev)
changeset, status = self.runlines('replay', '-d', self.tmppath,
rev)
if status:
@ -205,7 +205,7 @@ class gnuarch_source(converter_source, commandline):
self._obtainrevision(rev)
else:
old_rev = self.parents[rev][0]
self.ui.debug(_('computing changeset between %s and %s...\n')
self.ui.debug('computing changeset between %s and %s...\n'
% (old_rev, rev))
self._parsechangeset(changeset, rev)
@ -254,10 +254,10 @@ class gnuarch_source(converter_source, commandline):
return changes, copies
def _obtainrevision(self, rev):
self.ui.debug(_('obtaining revision %s...\n') % rev)
self.ui.debug('obtaining revision %s...\n' % rev)
output = self._execute('get', rev, self.tmppath)
self.checkexit(output)
self.ui.debug(_('analyzing revision %s...\n') % rev)
self.ui.debug('analyzing revision %s...\n' % rev)
files = self._readcontents(self.tmppath)
self.changes[rev].add_files += files

View File

@ -55,12 +55,12 @@ class mercurial_sink(converter_sink):
self.filemapmode = False
def before(self):
self.ui.debug(_('run hg sink pre-conversion action\n'))
self.ui.debug('run hg sink pre-conversion action\n')
self.wlock = self.repo.wlock()
self.lock = self.repo.lock()
def after(self):
self.ui.debug(_('run hg sink post-conversion action\n'))
self.ui.debug('run hg sink post-conversion action\n')
self.lock.release()
self.wlock.release()
@ -348,10 +348,10 @@ class mercurial_source(converter_source):
self.convertfp.flush()
def before(self):
self.ui.debug(_('run hg source pre-conversion action\n'))
self.ui.debug('run hg source pre-conversion action\n')
def after(self):
self.ui.debug(_('run hg source post-conversion action\n'))
self.ui.debug('run hg source post-conversion action\n')
def hasnativeorder(self):
return True

View File

@ -531,7 +531,7 @@ class svn_source(converter_source):
"""
if not path.startswith(self.rootmodule):
# Requests on foreign branches may be forbidden at server level
self.ui.debug(_('ignoring foreign branch %r\n') % path)
self.ui.debug('ignoring foreign branch %r\n' % path)
return None
if not stop:
@ -559,7 +559,7 @@ class svn_source(converter_source):
if not path.startswith(p) or not paths[p].copyfrom_path:
continue
newpath = paths[p].copyfrom_path + path[len(p):]
self.ui.debug(_("branch renamed from %s to %s at %d\n") %
self.ui.debug("branch renamed from %s to %s at %d\n" %
(path, newpath, revnum))
path = newpath
break
@ -567,7 +567,7 @@ class svn_source(converter_source):
stream.close()
if not path.startswith(self.rootmodule):
self.ui.debug(_('ignoring foreign branch %r\n') % path)
self.ui.debug('ignoring foreign branch %r\n' % path)
return None
return self.revid(dirent.created_rev, path)
@ -579,7 +579,7 @@ class svn_source(converter_source):
prevmodule = self.prevmodule
if prevmodule is None:
prevmodule = ''
self.ui.debug(_("reparent to %s\n") % svnurl)
self.ui.debug("reparent to %s\n" % svnurl)
svn.ra.reparent(self.ra, svnurl)
self.prevmodule = module
return prevmodule
@ -612,14 +612,14 @@ class svn_source(converter_source):
copyfrom_path = self.getrelpath(ent.copyfrom_path, pmodule)
if not copyfrom_path:
continue
self.ui.debug(_("copied to %s from %s@%s\n") %
self.ui.debug("copied to %s from %s@%s\n" %
(entrypath, copyfrom_path, ent.copyfrom_rev))
copies[self.recode(entrypath)] = self.recode(copyfrom_path)
elif kind == 0: # gone, but had better be a deleted *file*
self.ui.debug(_("gone from %s\n") % ent.copyfrom_rev)
self.ui.debug("gone from %s\n" % ent.copyfrom_rev)
pmodule, prevnum = self.revsplit(parents[0])[1:]
parentpath = pmodule + "/" + entrypath
self.ui.debug(_("entry %s\n") % parentpath)
self.ui.debug("entry %s\n" % parentpath)
# We can avoid the reparent calls if the module has
# not changed but it probably does not worth the pain.
@ -646,7 +646,7 @@ class svn_source(converter_source):
del copies[childpath]
entries.append(childpath)
else:
self.ui.debug(_('unknown path in revision %d: %s\n') % \
self.ui.debug('unknown path in revision %d: %s\n' % \
(revnum, path))
elif kind == svn.core.svn_node_dir:
# If the directory just had a prop change,
@ -679,7 +679,7 @@ class svn_source(converter_source):
if not copyfrompath:
continue
copyfrom[path] = ent
self.ui.debug(_("mark %s came from %s:%d\n")
self.ui.debug("mark %s came from %s:%d\n"
% (path, copyfrompath, ent.copyfrom_rev))
children = self._find_children(ent.copyfrom_path, ent.copyfrom_rev)
children.sort()
@ -703,7 +703,7 @@ class svn_source(converter_source):
"""Return the parsed commit object or None, and True if
the revision is a branch root.
"""
self.ui.debug(_("parsing revision %d (%d changes)\n") %
self.ui.debug("parsing revision %d (%d changes)\n" %
(revnum, len(orig_paths)))
branched = False
@ -732,7 +732,7 @@ class svn_source(converter_source):
self.ui.note(_('found parent of branch %s at %d: %s\n') %
(self.module, prevnum, prevmodule))
else:
self.ui.debug(_("no copyfrom path, don't know what to do.\n"))
self.ui.debug("no copyfrom path, don't know what to do.\n")
paths = []
# filter out unrelated paths
@ -785,7 +785,7 @@ class svn_source(converter_source):
lastonbranch = True
break
if not paths:
self.ui.debug(_('revision %d has no entries\n') % revnum)
self.ui.debug('revision %d has no entries\n' % revnum)
continue
cset, lastonbranch = parselogentry(paths, revnum, author,
date, message)
@ -867,7 +867,7 @@ class svn_source(converter_source):
return relative
# The path is outside our tracked tree...
self.ui.debug(_('%r is not under %r, ignoring\n') % (path, module))
self.ui.debug('%r is not under %r, ignoring\n' % (path, module))
return None
def _checkpath(self, path, revnum):

View File

@ -142,13 +142,13 @@ def dodiff(ui, repo, diffcmd, diffopts, pats, opts):
cmdline = ('%s %s %s %s' %
(util.shellquote(diffcmd), ' '.join(diffopts),
util.shellquote(dir1), util.shellquote(dir2)))
ui.debug(_('running %r in %s\n') % (cmdline, tmproot))
ui.debug('running %r in %s\n' % (cmdline, tmproot))
util.system(cmdline, cwd=tmproot)
for copy_fn, working_fn, mtime in fns_and_mtime:
if os.path.getmtime(copy_fn) != mtime:
ui.debug(_('file changed while diffing. '
'Overwriting: %s (src: %s)\n') % (working_fn, copy_fn))
ui.debug('file changed while diffing. '
'Overwriting: %s (src: %s)\n' % (working_fn, copy_fn))
util.copyfile(copy_fn, working_fn)
return 1

View File

@ -229,10 +229,10 @@ def hook(ui, repo, hooktype, node=None, url=None, **kwargs):
n = bin(node)
cia = hgcia(ui, repo)
if not cia.user:
ui.debug(_('cia: no user specified'))
ui.debug('cia: no user specified')
return
if not cia.project:
ui.debug(_('cia: no project specified'))
ui.debug('cia: no project specified')
return
if hooktype == 'changegroup':
start = repo.changelog.rev(n)

View File

@ -308,7 +308,7 @@ def view(ui, repo, *etc, **opts):
os.chdir(repo.root)
optstr = ' '.join(['--%s %s' % (k, v) for k, v in opts.iteritems() if v])
cmd = ui.config("hgk", "path", "hgk") + " %s %s" % (optstr, " ".join(etc))
ui.debug(_("running %s\n") % cmd)
ui.debug("running %s\n" % cmd)
util.system(cmd)
cmdtable = {

View File

@ -31,7 +31,7 @@ def start_server(function):
'removing it)\n'))
os.unlink(os.path.join(self.root, '.hg', 'inotify.sock'))
if err[0] in (errno.ECONNREFUSED, errno.ENOENT) and autostart:
self.ui.debug(_('(starting inotify server)\n'))
self.ui.debug('(starting inotify server)\n')
try:
try:
server.start(self.ui, self.dirstate, self.root)
@ -50,7 +50,7 @@ def start_server(function):
'server: %s\n') % err[-1])
elif err[0] in (errno.ECONNREFUSED, errno.ENOENT):
# silently ignore normal errors if autostart is False
self.ui.debug(_('(inotify server not running)\n'))
self.ui.debug('(inotify server not running)\n')
else:
self.ui.warn(_('failed to contact inotify server: %s\n')
% err[-1])

View File

@ -354,7 +354,7 @@ def demo(ui, repo, *args, **opts):
repo.commit(text=msg)
ui.status(_('\n\tkeywords expanded\n'))
ui.write(repo.wread(fn))
ui.debug(_('\nremoving temporary repository %s\n') % tmpdir)
ui.debug('\nremoving temporary repository %s\n' % tmpdir)
shutil.rmtree(tmpdir, ignore_errors=True)
def expand(ui, repo, *pats, **opts):

View File

@ -321,7 +321,7 @@ class queue(object):
if bad:
raise util.Abort(bad)
guards = sorted(set(guards))
self.ui.debug(_('active guards: %s\n') % ' '.join(guards))
self.ui.debug('active guards: %s\n' % ' '.join(guards))
self.active_guards = guards
self.guards_dirty = True

View File

@ -276,10 +276,10 @@ def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
ctx = repo[node]
if not n.subs:
ui.debug(_('notify: no subscribers to repository %s\n') % n.root)
ui.debug('notify: no subscribers to repository %s\n' % n.root)
return
if n.skipsource(source):
ui.debug(_('notify: changes have source "%s" - skipping\n') % source)
ui.debug('notify: changes have source "%s" - skipping\n' % source)
return
ui.pushbuffer()

View File

@ -35,7 +35,7 @@ def rebasemerge(repo, rev, first=False):
if not first:
ancestor.ancestor = newancestor
else:
repo.ui.debug(_("first revision, do not change ancestor\n"))
repo.ui.debug("first revision, do not change ancestor\n")
stats = merge.update(repo, rev, True, True, False)
return stats
@ -149,7 +149,7 @@ def concludenode(repo, rev, p1, p2, state, collapse, last=False, skipped=None,
"""Skip commit if collapsing has been required and rev is not the last
revision, commit otherwise
"""
repo.ui.debug(_(" set parents\n"))
repo.ui.debug(" set parents\n")
if collapse and not last:
repo.dirstate.setparents(repo[p1].node())
return None
@ -187,23 +187,23 @@ def concludenode(repo, rev, p1, p2, state, collapse, last=False, skipped=None,
def rebasenode(repo, rev, target, state, skipped, targetancestors, collapse,
extrafn):
'Rebase a single revision'
repo.ui.debug(_("rebasing %d:%s\n") % (rev, repo[rev]))
repo.ui.debug("rebasing %d:%s\n" % (rev, repo[rev]))
p1, p2 = defineparents(repo, rev, target, state, targetancestors)
repo.ui.debug(_(" future parents are %d and %d\n") % (repo[p1].rev(),
repo.ui.debug(" future parents are %d and %d\n" % (repo[p1].rev(),
repo[p2].rev()))
# Merge phase
if len(repo.parents()) != 2:
# Update to target and merge it with local
if repo['.'].rev() != repo[p1].rev():
repo.ui.debug(_(" update to %d:%s\n") % (repo[p1].rev(), repo[p1]))
repo.ui.debug(" update to %d:%s\n" % (repo[p1].rev(), repo[p1]))
merge.update(repo, p1, False, True, False)
else:
repo.ui.debug(_(" already in target\n"))
repo.ui.debug(" already in target\n")
repo.dirstate.write()
repo.ui.debug(_(" merge against %d:%s\n") % (repo[rev].rev(), repo[rev]))
repo.ui.debug(" merge against %d:%s\n" % (repo[rev].rev(), repo[rev]))
first = repo[rev].rev() == repo[min(state)].rev()
stats = rebasemerge(repo, rev, first)
@ -211,7 +211,7 @@ def rebasenode(repo, rev, target, state, skipped, targetancestors, collapse,
raise util.Abort(_('fix unresolved conflicts with hg resolve then '
'run hg rebase --continue'))
else: # we have an interrupted rebase
repo.ui.debug(_('resuming interrupted rebase\n'))
repo.ui.debug('resuming interrupted rebase\n')
# Keep track of renamed files in the revision that is going to be rebased
# Here we simulate the copies and renames in the source changeset
@ -234,7 +234,7 @@ def rebasenode(repo, rev, target, state, skipped, targetancestors, collapse,
else:
if not collapse:
repo.ui.note(_('no changes, revision %d skipped\n') % rev)
repo.ui.debug(_('next revision set to %s\n') % p1)
repo.ui.debug('next revision set to %s\n' % p1)
skipped.add(rev)
state[rev] = p1
@ -280,7 +280,7 @@ def updatemq(repo, state, skipped, **opts):
mqrebase = {}
for p in repo.mq.applied:
if repo[p.rev].rev() in state:
repo.ui.debug(_('revision %d is an mq patch (%s), finalize it.\n') %
repo.ui.debug('revision %d is an mq patch (%s), finalize it.\n' %
(repo[p.rev].rev(), p.name))
mqrebase[repo[p.rev].rev()] = (p.name, isagitpatch(repo, p.name))
@ -290,7 +290,7 @@ def updatemq(repo, state, skipped, **opts):
# We must start import from the newest revision
for rev in sorted(mqrebase, reverse=True):
if rev not in skipped:
repo.ui.debug(_('import mq patch %d (%s)\n')
repo.ui.debug('import mq patch %d (%s)\n'
% (state[rev], mqrebase[rev][0]))
repo.mq.qimport(repo, (), patchname=mqrebase[rev][0],
git=mqrebase[rev][1],rev=[str(state[rev])])
@ -311,7 +311,7 @@ def storestatus(repo, originalwd, target, state, collapse, keep, keepbranches,
newrev = repo[v].hex()
f.write("%s:%s\n" % (oldrev, newrev))
f.close()
repo.ui.debug(_('rebase status stored\n'))
repo.ui.debug('rebase status stored\n')
def clearstatus(repo):
'Remove the status files'
@ -342,7 +342,7 @@ def restorestatus(repo):
else:
oldrev, newrev = l.split(':')
state[repo[oldrev].rev()] = repo[newrev].rev()
repo.ui.debug(_('rebase status resumed\n'))
repo.ui.debug('rebase status resumed\n')
return originalwd, target, state, collapse, keep, keepbranches, external
except IOError, err:
if err.errno != errno.ENOENT:
@ -392,12 +392,12 @@ def buildstate(repo, dest, src, base, collapse):
cwd = repo['.'].rev()
if cwd == dest:
repo.ui.debug(_('already working on current\n'))
repo.ui.debug('already working on current\n')
return None
targetancestors = set(repo.changelog.ancestors(dest))
if cwd in targetancestors:
repo.ui.debug(_('already working on the current branch\n'))
repo.ui.debug('already working on the current branch\n')
return None
cwdancestors = set(repo.changelog.ancestors(cwd))
@ -405,7 +405,7 @@ def buildstate(repo, dest, src, base, collapse):
rebasingbranch = cwdancestors - targetancestors
source = min(rebasingbranch)
repo.ui.debug(_('rebase onto %d starting from %d\n') % (dest, source))
repo.ui.debug('rebase onto %d starting from %d\n' % (dest, source))
state = dict.fromkeys(repo.changelog.descendants(source), nullrev)
external = nullrev
if collapse:
@ -429,8 +429,8 @@ def pullrebase(orig, ui, repo, *args, **opts):
if opts.get('rebase'):
if opts.get('update'):
del opts['update']
ui.debug(_('--update and --rebase are not compatible, ignoring '
'the update flag\n'))
ui.debug('--update and --rebase are not compatible, ignoring '
'the update flag\n')
cmdutil.bail_if_changed(repo)
revsprepull = len(repo)

View File

@ -463,7 +463,7 @@ def dorecord(ui, repo, committer, *pats, **opts):
fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
dir=backupdir)
os.close(fd)
ui.debug(_('backup %r as %r\n') % (f, tmpname))
ui.debug('backup %r as %r\n' % (f, tmpname))
util.copyfile(repo.wjoin(f), tmpname)
backups[f] = tmpname
@ -481,7 +481,7 @@ def dorecord(ui, repo, committer, *pats, **opts):
# 3b. (apply)
if dopatch:
try:
ui.debug(_('applying patch\n'))
ui.debug('applying patch\n')
ui.debug(fp.getvalue())
pfiles = {}
patch.internalpatch(fp, ui, 1, repo.root, files=pfiles,
@ -512,7 +512,7 @@ def dorecord(ui, repo, committer, *pats, **opts):
# 5. finally restore backed-up files
try:
for realname, tmpname in backups.iteritems():
ui.debug(_('restoring %r to %r\n') % (tmpname, realname))
ui.debug('restoring %r to %r\n' % (tmpname, realname))
util.copyfile(tmpname, repo.wjoin(realname))
os.unlink(tmpname)
os.rmdir(backupdir)

View File

@ -142,6 +142,6 @@ def reposetup(ui, repo):
for f in funcs.split():
wrapname(f, wrapper)
wrapname("mercurial.osutil.listdir", wrapperforlistdir)
ui.debug(_("[win32mbcs] activated with encoding: %s\n")
ui.debug("[win32mbcs] activated with encoding: %s\n"
% encoding.encoding)

View File

@ -1789,7 +1789,7 @@ def import_(ui, repo, patch1, *patches, **opts):
else:
# launch the editor
message = None
ui.debug(_('message:\n%s\n') % message)
ui.debug('message:\n%s\n' % message)
wp = repo.parents()
if opts.get('exact'):

View File

@ -144,16 +144,16 @@ def copies(repo, c1, c2, ca, checkdirs=False):
elif of in ma:
diverge.setdefault(of, []).append(f)
repo.ui.debug(_(" searching for copies back to rev %d\n") % limit)
repo.ui.debug(" searching for copies back to rev %d\n" % limit)
u1 = _nonoverlap(m1, m2, ma)
u2 = _nonoverlap(m2, m1, ma)
if u1:
repo.ui.debug(_(" unmatched files in local:\n %s\n")
repo.ui.debug(" unmatched files in local:\n %s\n"
% "\n ".join(u1))
if u2:
repo.ui.debug(_(" unmatched files in other:\n %s\n")
repo.ui.debug(" unmatched files in other:\n %s\n"
% "\n ".join(u2))
for f in u1:
@ -169,7 +169,7 @@ def copies(repo, c1, c2, ca, checkdirs=False):
diverge2.update(fl) # reverse map for below
if fullcopy:
repo.ui.debug(_(" all copies found (* = to merge, ! = divergent):\n"))
repo.ui.debug(" all copies found (* = to merge, ! = divergent):\n")
for f in fullcopy:
note = ""
if f in copy: note += "*"
@ -180,7 +180,7 @@ def copies(repo, c1, c2, ca, checkdirs=False):
if not fullcopy or not checkdirs:
return copy, diverge
repo.ui.debug(_(" checking for directory renames\n"))
repo.ui.debug(" checking for directory renames\n")
# generate a directory move map
d1, d2 = _dirs(m1), _dirs(m2)
@ -216,7 +216,7 @@ def copies(repo, c1, c2, ca, checkdirs=False):
return copy, diverge
for d in dirmove:
repo.ui.debug(_(" dir %s -> %s\n") % (d, dirmove[d]))
repo.ui.debug(" dir %s -> %s\n" % (d, dirmove[d]))
# check unaccounted nonoverlapping files against directory moves
for f in u1 + u2:
@ -227,7 +227,7 @@ def copies(repo, c1, c2, ca, checkdirs=False):
df = dirmove[d] + f[len(d):]
if df not in copy:
copy[f] = df
repo.ui.debug(_(" file %s -> %s\n") % (f, copy[f]))
repo.ui.debug(" file %s -> %s\n" % (f, copy[f]))
break
return copy, diverge

View File

@ -214,7 +214,7 @@ class cmdalias(object):
def __call__(self, ui, *args, **opts):
if self.shadows:
ui.debug(_("alias '%s' shadows command\n") % self.name)
ui.debug("alias '%s' shadows command\n" % self.name)
return self.fn(ui, *args, **opts)

View File

@ -140,7 +140,7 @@ def filemerge(repo, mynode, orig, fcd, fco, fca):
binary = isbin(fcd) or isbin(fco) or isbin(fca)
symlink = 'l' in fcd.flags() + fco.flags()
tool, toolpath = _picktool(repo, ui, fd, binary, symlink)
ui.debug(_("picked tool '%s' for %s (binary %s symlink %s)\n") %
ui.debug("picked tool '%s' for %s (binary %s symlink %s)\n" %
(tool, fd, binary, symlink))
if not tool or tool == 'internal:prompt':
@ -170,13 +170,13 @@ def filemerge(repo, mynode, orig, fcd, fco, fca):
else:
ui.status(_("merging %s\n") % fd)
ui.debug(_("my %s other %s ancestor %s\n") % (fcd, fco, fca))
ui.debug("my %s other %s ancestor %s\n" % (fcd, fco, fca))
# do we attempt to simplemerge first?
if _toolbool(ui, tool, "premerge", not (binary or symlink)):
r = simplemerge.simplemerge(ui, a, b, c, quiet=True)
if not r:
ui.debug(_(" premerge successful\n"))
ui.debug(" premerge successful\n")
os.unlink(back)
os.unlink(b)
os.unlink(c)

View File

@ -35,7 +35,7 @@ class httprepository(repo.repository):
self._url, authinfo = url.getauthinfo(path)
self.ui = ui
self.ui.debug(_('using %s\n') % self._url)
self.ui.debug('using %s\n' % self._url)
self.urlopener = url.opener(ui, authinfo)
@ -56,7 +56,7 @@ class httprepository(repo.repository):
self.caps = set(self.do_read('capabilities').split())
except error.RepoError:
self.caps = set()
self.ui.debug(_('capabilities: %s\n') %
self.ui.debug('capabilities: %s\n' %
(' '.join(self.caps or ['none'])))
return self.caps
@ -68,21 +68,21 @@ class httprepository(repo.repository):
def do_cmd(self, cmd, **args):
data = args.pop('data', None)
headers = args.pop('headers', {})
self.ui.debug(_("sending %s command\n") % cmd)
self.ui.debug("sending %s command\n" % cmd)
q = {"cmd": cmd}
q.update(args)
qs = '?%s' % urllib.urlencode(q)
cu = "%s%s" % (self._url, qs)
try:
if data:
self.ui.debug(_("sending %s bytes\n") % len(data))
self.ui.debug("sending %s bytes\n" % len(data))
resp = self.urlopener.open(urllib2.Request(cu, data, headers))
except urllib2.HTTPError, inst:
if inst.code == 401:
raise util.Abort(_('authorization failed'))
raise
except httplib.HTTPException, inst:
self.ui.debug(_('http error while sending %s command\n') % cmd)
self.ui.debug('http error while sending %s command\n' % cmd)
self.ui.traceback()
raise IOError(None, inst)
except IndexError:
@ -105,7 +105,7 @@ class httprepository(repo.repository):
if not (proto.startswith('application/mercurial-') or
proto.startswith('text/plain') or
proto.startswith('application/hg-changegroup')):
self.ui.debug(_("requested URL: '%s'\n") % url.hidepassword(cu))
self.ui.debug("requested URL: '%s'\n" % url.hidepassword(cu))
raise error.RepoError(_("'%s' does not appear to be an hg repository")
% safeurl)

View File

@ -527,7 +527,7 @@ class localrepository(repo.repository):
for mf, fn, cmd in self.filterpats[filter]:
if mf(filename):
self.ui.debug(_("filtering %s through %s\n") % (filename, cmd))
self.ui.debug("filtering %s through %s\n" % (filename, cmd))
data = fn(data, cmd, ui=self.ui, repo=self, filename=filename)
break
@ -724,14 +724,14 @@ class localrepository(repo.repository):
# find source in nearest ancestor if we've lost track
if not crev:
self.ui.debug(_(" %s: searching for copy revision for %s\n") %
self.ui.debug(" %s: searching for copy revision for %s\n" %
(fname, cfname))
for ancestor in self['.'].ancestors():
if cfname in ancestor:
crev = ancestor[cfname].filenode()
break
self.ui.debug(_(" %s: copy %s:%s\n") % (fname, cfname, hex(crev)))
self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev)))
meta["copy"] = cfname
meta["copyrev"] = hex(crev)
fparent1, fparent2 = nullid, newfparent
@ -1287,22 +1287,22 @@ class localrepository(repo.repository):
if n[0] in seen:
continue
self.ui.debug(_("examining %s:%s\n")
self.ui.debug("examining %s:%s\n"
% (short(n[0]), short(n[1])))
if n[0] == nullid: # found the end of the branch
pass
elif n in seenbranch:
self.ui.debug(_("branch already found\n"))
self.ui.debug("branch already found\n")
continue
elif n[1] and n[1] in m: # do we know the base?
self.ui.debug(_("found incomplete branch %s:%s\n")
self.ui.debug("found incomplete branch %s:%s\n"
% (short(n[0]), short(n[1])))
search.append(n[0:2]) # schedule branch range for scanning
seenbranch.add(n)
else:
if n[1] not in seen and n[1] not in fetch:
if n[2] in m and n[3] in m:
self.ui.debug(_("found new changeset %s\n") %
self.ui.debug("found new changeset %s\n" %
short(n[1]))
fetch.add(n[1]) # earliest unknown
for p in n[2:4]:
@ -1317,11 +1317,11 @@ class localrepository(repo.repository):
if r:
reqcnt += 1
self.ui.debug(_("request %d: %s\n") %
self.ui.debug("request %d: %s\n" %
(reqcnt, " ".join(map(short, r))))
for p in xrange(0, len(r), 10):
for b in remote.branches(r[p:p+10]):
self.ui.debug(_("received %s:%s\n") %
self.ui.debug("received %s:%s\n" %
(short(b[0]), short(b[1])))
unknown.append(b)
@ -1334,15 +1334,15 @@ class localrepository(repo.repository):
p = n[0]
f = 1
for i in l:
self.ui.debug(_("narrowing %d:%d %s\n") % (f, len(l), short(i)))
self.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
if i in m:
if f <= 2:
self.ui.debug(_("found new branch changeset %s\n") %
self.ui.debug("found new branch changeset %s\n" %
short(p))
fetch.add(p)
base[i] = 1
else:
self.ui.debug(_("narrowed branch search to %s:%s\n")
self.ui.debug("narrowed branch search to %s:%s\n"
% (short(p), short(i)))
newsearch.append((p, i))
break
@ -1361,10 +1361,10 @@ class localrepository(repo.repository):
else:
raise util.Abort(_("repository is unrelated"))
self.ui.debug(_("found new changesets starting at ") +
self.ui.debug("found new changesets starting at " +
" ".join([short(f) for f in fetch]) + "\n")
self.ui.debug(_("%d total queries\n") % reqcnt)
self.ui.debug("%d total queries\n" % reqcnt)
return base.keys(), list(fetch), heads
@ -1381,7 +1381,7 @@ class localrepository(repo.repository):
base = {}
self.findincoming(remote, base, heads, force=force)
self.ui.debug(_("common changesets up to ")
self.ui.debug("common changesets up to "
+ " ".join(map(short, base.keys())) + "\n")
remain = set(self.changelog.nodemap)
@ -1602,7 +1602,7 @@ class localrepository(repo.repository):
if self.ui.verbose or source == 'bundle':
self.ui.status(_("%d changesets found\n") % len(nodes))
if self.ui.debugflag:
self.ui.debug(_("list of changesets:\n"))
self.ui.debug("list of changesets:\n")
for node in nodes:
self.ui.debug("%s\n" % hex(node))
@ -1988,7 +1988,7 @@ class localrepository(repo.repository):
- number of heads stays the same: 1
"""
def csmap(x):
self.ui.debug(_("add changeset %s\n") % short(x))
self.ui.debug("add changeset %s\n" % short(x))
return len(cl)
def revmap(x):
@ -2034,7 +2034,7 @@ class localrepository(repo.repository):
f = changegroup.getchunk(source)
if not f:
break
self.ui.debug(_("adding %s revisions\n") % f)
self.ui.debug("adding %s revisions\n" % f)
fl = self.file(f)
o = len(fl)
chunkiter = changegroup.chunkiter(source)
@ -2067,7 +2067,7 @@ class localrepository(repo.repository):
if changesets > 0:
# forcefully update the on-disk branch cache
self.ui.debug(_("updating the branch cache\n"))
self.ui.debug("updating the branch cache\n")
self.branchtags()
self.hook("changegroup", node=hex(cl.node(clstart)),
source=srctype, url=url)
@ -2116,7 +2116,7 @@ class localrepository(repo.repository):
except (ValueError, TypeError):
raise error.ResponseError(
_('Unexpected response from remote server:'), l)
self.ui.debug(_('adding %s (%s)\n') % (name, util.bytecount(size)))
self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
# for backwards compat, name was partially encoded
ofp = self.sopener(store.decodedir(name), 'w')
for chunk in util.filechunkiter(fp, limit=size):

View File

@ -161,8 +161,8 @@ def manifestmerge(repo, p1, p2, pa, overwrite, partial):
act("divergent renames", "dr", of, fl)
repo.ui.note(_("resolving manifests\n"))
repo.ui.debug(_(" overwrite %s partial %s\n") % (overwrite, bool(partial)))
repo.ui.debug(_(" ancestor %s local %s remote %s\n") % (pa, p1, p2))
repo.ui.debug(" overwrite %s partial %s\n" % (overwrite, bool(partial)))
repo.ui.debug(" ancestor %s local %s remote %s\n" % (pa, p1, p2))
m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
copied = set(copy.values())
@ -252,7 +252,7 @@ def applyupdates(repo, action, wctx, mctx):
f2, fd, flags, move = a[2:]
if f == '.hgsubstate': # merged internally
continue
repo.ui.debug(_("preserving %s for resolve of %s\n") % (f, fd))
repo.ui.debug("preserving %s for resolve of %s\n" % (f, fd))
fcl = wctx[f]
fco = mctx[f2]
fca = fcl.ancestor(fco) or repo.filectx(f, fileid=nullrev)
@ -263,7 +263,7 @@ def applyupdates(repo, action, wctx, mctx):
# remove renamed files after safely stored
for f in moves:
if util.lexists(repo.wjoin(f)):
repo.ui.debug(_("removing %s\n") % f)
repo.ui.debug("removing %s\n" % f)
os.unlink(repo.wjoin(f))
audit_path = util.path_auditor(repo.root)
@ -299,7 +299,7 @@ def applyupdates(repo, action, wctx, mctx):
merged += 1
util.set_flags(repo.wjoin(fd), 'l' in flags, 'x' in flags)
if f != fd and move and util.lexists(repo.wjoin(f)):
repo.ui.debug(_("removing %s\n") % f)
repo.ui.debug("removing %s\n" % f)
os.unlink(repo.wjoin(f))
elif m == "g": # get
flags = a[2]

View File

@ -93,12 +93,12 @@ def extract(ui, fileobj):
hgpatch = False
ignoretext = False
ui.debug(_('found patch at byte %d\n') % m.start(0))
ui.debug('found patch at byte %d\n' % m.start(0))
diffs_seen += 1
cfp = cStringIO.StringIO()
for line in payload[:m.start(0)].splitlines():
if line.startswith('# HG changeset patch'):
ui.debug(_('patch generated by hg export\n'))
ui.debug('patch generated by hg export\n')
hgpatch = True
# drop earlier commit message content
cfp.seek(0)
@ -1155,7 +1155,7 @@ def patch(patchname, ui, strip=1, cwd=None, files={}, eolmode='strict'):
return internalpatch(patchname, ui, strip, cwd, files, eolmode)
except NoHunks:
patcher = util.find_exe('gpatch') or util.find_exe('patch') or 'patch'
ui.debug(_('no valid hunks found; trying with %r instead\n') %
ui.debug('no valid hunks found; trying with %r instead\n' %
patcher)
if util.needbinarypatch():
args.append('--binary')

View File

@ -75,7 +75,7 @@ class sshrepository(repo.repository):
if lines[-1] == "1\n" and l == "\n":
break
if l:
ui.debug(_("remote: "), l)
ui.debug("remote: ", l)
lines.append(l)
max_noise -= 1
else:
@ -113,7 +113,7 @@ class sshrepository(repo.repository):
__del__ = cleanup
def do_cmd(self, cmd, **args):
self.ui.debug(_("sending %s command\n") % cmd)
self.ui.debug("sending %s command\n" % cmd)
self.pipeo.write("%s\n" % cmd)
for k, v in args.iteritems():
self.pipeo.write("%s %d\n" % (k, len(v)))

View File

@ -46,7 +46,7 @@ def stream_out(repo, untrusted=False):
# get consistent snapshot of repo, lock during scan
lock = repo.lock()
try:
repo.ui.debug(_('scanning\n'))
repo.ui.debug('scanning\n')
for name, ename, size in repo.store.walk():
# for backwards compat, name was partially encoded
entries.append((store.encodedir(name), size))
@ -57,11 +57,11 @@ def stream_out(repo, untrusted=False):
raise StreamException(2)
yield '0\n'
repo.ui.debug(_('%d files, %d bytes to transfer\n') %
repo.ui.debug('%d files, %d bytes to transfer\n' %
(len(entries), total_bytes))
yield '%d %d\n' % (len(entries), total_bytes)
for name, size in entries:
repo.ui.debug(_('sending %s (%d bytes)\n') % (name, size))
repo.ui.debug('sending %s (%d bytes)\n' % (name, size))
yield '%s\0%d\n' % (name, size)
for chunk in util.filechunkiter(repo.sopener(name), limit=size):
yield chunk

View File

@ -197,7 +197,7 @@ class proxyhandler(urllib2.ProxyHandler):
proxyuser, proxypasswd or ''),
proxypath, proxyquery, proxyfrag))
proxies = {'http': proxyurl, 'https': proxyurl}
ui.debug(_('proxying through http://%s:%s\n') %
ui.debug('proxying through http://%s:%s\n' %
(proxyhost, proxyport))
else:
proxies = {}
@ -504,7 +504,7 @@ def opener(ui, authinfo=None):
if authinfo is not None:
passmgr.add_password(*authinfo)
user, passwd = authinfo[2:4]
ui.debug(_('http auth: user %s, password %s\n') %
ui.debug('http auth: user %s, password %s\n' %
(user, passwd and '*' * len(passwd) or 'not set'))
handlers.extend((urllib2.HTTPBasicAuthHandler(passmgr),