mirror of
https://github.com/facebook/sapling.git
synced 2024-12-26 06:21:48 +03:00
py3: iter{keys,values,items} -> pycompat.iter{keys,values,items}
Reviewed By: quark-zju Differential Revision: D19608323 fbshipit-source-id: dd186ef16d6422a56af41fcaa850d9838ae9a240
This commit is contained in:
parent
ec51946a13
commit
82715fd2ea
@ -52,7 +52,16 @@ import random
|
||||
import sys
|
||||
import time
|
||||
|
||||
from edenscm.mercurial import context, error, hg, patch, registrar, scmutil, util
|
||||
from edenscm.mercurial import (
|
||||
context,
|
||||
error,
|
||||
hg,
|
||||
patch,
|
||||
pycompat,
|
||||
registrar,
|
||||
scmutil,
|
||||
util,
|
||||
)
|
||||
from edenscm.mercurial.i18n import _
|
||||
from edenscm.mercurial.node import nullid, nullrev, short
|
||||
|
||||
@ -208,7 +217,7 @@ def analyze(ui, repo, *revs, **opts):
|
||||
for filename, mar, lineadd, lineremove, isbin in parsegitdiff(diff):
|
||||
if isbin:
|
||||
continue
|
||||
added = sum(lineadd.itervalues(), 0)
|
||||
added = sum(pycompat.itervalues(lineadd), 0)
|
||||
if mar == "m":
|
||||
if added and lineremove:
|
||||
lineschanged[roundto(added, 5), roundto(lineremove, 5)] += 1
|
||||
@ -398,7 +407,7 @@ def synthesize(ui, repo, descpath, **opts):
|
||||
repo,
|
||||
[pctx.node(), nullid],
|
||||
message,
|
||||
files.iterkeys(),
|
||||
pycompat.iterkeys(files),
|
||||
filectxfn,
|
||||
ui.username(),
|
||||
"%d %d" % util.makedate(),
|
||||
|
@ -40,6 +40,7 @@ from edenscm.mercurial import (
|
||||
node,
|
||||
patch,
|
||||
phases,
|
||||
pycompat,
|
||||
registrar,
|
||||
scmutil,
|
||||
util,
|
||||
@ -249,7 +250,7 @@ def overlaycontext(
|
||||
date = ctx.date()
|
||||
desc = ctx.description()
|
||||
user = ctx.user()
|
||||
files = set(ctx.files()).union(memworkingcopy.iterkeys())
|
||||
files = set(ctx.files()).union(pycompat.iterkeys(memworkingcopy))
|
||||
store = overlaystore(ctx, memworkingcopy)
|
||||
return context.memctx(
|
||||
repo=ctx.repo(),
|
||||
@ -697,7 +698,7 @@ class fixupstate(object):
|
||||
|
||||
def apply(self):
|
||||
"""apply fixups to individual filefixupstates"""
|
||||
for path, state in self.fixupmap.iteritems():
|
||||
for path, state in pycompat.iteritems(self.fixupmap):
|
||||
if self.ui.debugflag:
|
||||
self.ui.write(_("applying fixups to %s\n") % path)
|
||||
state.apply()
|
||||
@ -706,7 +707,8 @@ class fixupstate(object):
|
||||
def chunkstats(self):
|
||||
"""-> {path: chunkstats}. collect chunkstats from filefixupstates"""
|
||||
return dict(
|
||||
(path, state.chunkstats) for path, state in self.fixupmap.iteritems()
|
||||
(path, state.chunkstats)
|
||||
for path, state in pycompat.iteritems(self.fixupmap)
|
||||
)
|
||||
|
||||
def commit(self):
|
||||
@ -729,7 +731,7 @@ class fixupstate(object):
|
||||
chunkstats = self.chunkstats
|
||||
if ui.verbose:
|
||||
# chunkstats for each file
|
||||
for path, stat in chunkstats.iteritems():
|
||||
for path, stat in pycompat.iteritems(chunkstats):
|
||||
if stat[0]:
|
||||
ui.write(
|
||||
_n(
|
||||
@ -831,9 +833,9 @@ class fixupstate(object):
|
||||
return False
|
||||
pctx = parents[0]
|
||||
# ctx changes more files (not a subset of memworkingcopy)
|
||||
if not set(ctx.files()).issubset(set(memworkingcopy.iterkeys())):
|
||||
if not set(ctx.files()).issubset(set(pycompat.iterkeys(memworkingcopy))):
|
||||
return False
|
||||
for path, content in memworkingcopy.iteritems():
|
||||
for path, content in pycompat.iteritems(memworkingcopy):
|
||||
if path not in pctx or path not in ctx:
|
||||
return False
|
||||
fctx = ctx[path]
|
||||
@ -900,7 +902,7 @@ def overlaydiffcontext(ctx, chunks):
|
||||
if not path or not info:
|
||||
continue
|
||||
patchmap[path].append(info)
|
||||
for path, patches in patchmap.iteritems():
|
||||
for path, patches in pycompat.iteritems(patchmap):
|
||||
if path not in ctx or not patches:
|
||||
continue
|
||||
patches.sort(reverse=True)
|
||||
@ -1074,7 +1076,7 @@ def _amendcmd(flag, orig, ui, repo, *pats, **opts):
|
||||
if not opts.get(flag):
|
||||
return orig(ui, repo, *pats, **opts)
|
||||
# use absorb
|
||||
for k, v in opts.iteritems(): # check unsupported flags
|
||||
for k, v in pycompat.iteritems(opts): # check unsupported flags
|
||||
if v and k not in ["interactive", flag]:
|
||||
raise error.Abort(
|
||||
_("--%s does not support --%s") % (flag, k.replace("_", "-"))
|
||||
@ -1087,7 +1089,7 @@ def _amendcmd(flag, orig, ui, repo, *pats, **opts):
|
||||
# what's going on and is less verbose.
|
||||
adoptedsum = 0
|
||||
messages = []
|
||||
for path, (adopted, total) in state.chunkstats.iteritems():
|
||||
for path, (adopted, total) in pycompat.iteritems(state.chunkstats):
|
||||
adoptedsum += adopted
|
||||
if adopted == total:
|
||||
continue
|
||||
|
@ -23,6 +23,7 @@ from edenscm.mercurial import (
|
||||
mutation,
|
||||
node as nodemod,
|
||||
phases,
|
||||
pycompat,
|
||||
registrar,
|
||||
scmutil,
|
||||
)
|
||||
@ -248,7 +249,7 @@ def metaedit(ui, repo, templ, *revs, **opts):
|
||||
mapping = dict(
|
||||
map(
|
||||
lambda oldnew: (oldnew[0], [oldnew[1]]),
|
||||
replacemap.iteritems(),
|
||||
pycompat.iteritems(replacemap),
|
||||
)
|
||||
)
|
||||
templ.setprop("nodereplacements", mapping)
|
||||
|
@ -14,6 +14,7 @@ from edenscm.mercurial import (
|
||||
node as nodemod,
|
||||
obsolete,
|
||||
obsutil,
|
||||
pycompat,
|
||||
registrar,
|
||||
visibility,
|
||||
)
|
||||
@ -93,14 +94,14 @@ def unamend(ui, repo, **opts):
|
||||
cm = predctx.manifest()
|
||||
dirstate = repo.dirstate
|
||||
diff = cm.diff(wm)
|
||||
changedfiles.extend(diff.iterkeys())
|
||||
changedfiles.extend(pycompat.iterkeys(diff))
|
||||
|
||||
tr = repo.transaction("unamend")
|
||||
with dirstate.parentchange():
|
||||
dirstate.rebuild(prednode, cm, changedfiles)
|
||||
# we want added and removed files to be shown
|
||||
# properly, not with ? and ! prefixes
|
||||
for filename, data in diff.iteritems():
|
||||
for filename, data in pycompat.iteritems(diff):
|
||||
if data[0][0] is None:
|
||||
dirstate.add(filename)
|
||||
if data[1][0] is None:
|
||||
|
@ -30,7 +30,7 @@ import datetime
|
||||
import errno
|
||||
import os
|
||||
|
||||
from edenscm.mercurial import (
|
||||
from edenscm.mercurial import (pycompat,
|
||||
changelog,
|
||||
error,
|
||||
extensions,
|
||||
@ -309,7 +309,7 @@ cdef class nodemap(object):
|
||||
# index. Ideally we can keep changelog always up-to-date with the
|
||||
# index. But that requires more changes (ex. removing index.insert API
|
||||
# and index takes care of data writes).
|
||||
candidates.update(k for k in self._overrides.iterkeys()
|
||||
candidates.update(k for k in pycompat.iterkeys(self._overrides)
|
||||
if hex(k).startswith(hexprefix))
|
||||
if len(candidates) == 1:
|
||||
return list(candidates)[0]
|
||||
|
@ -11,7 +11,15 @@ import os
|
||||
import re
|
||||
import socket
|
||||
|
||||
from edenscm.mercurial import encoding, error, node as nodemod, perftrace, phases, util
|
||||
from edenscm.mercurial import (
|
||||
encoding,
|
||||
error,
|
||||
node as nodemod,
|
||||
perftrace,
|
||||
phases,
|
||||
pycompat,
|
||||
util,
|
||||
)
|
||||
from edenscm.mercurial.i18n import _
|
||||
|
||||
from . import dependencies
|
||||
@ -135,7 +143,7 @@ def pushbackupbookmarks(repo, remotepath, getconnection, backupstate):
|
||||
# to commits that are public.
|
||||
with perftrace.trace("Compute Bookmarks"):
|
||||
bookmarks = {}
|
||||
for name, node in repo._bookmarks.iteritems():
|
||||
for name, node in pycompat.iteritems(repo._bookmarks):
|
||||
ctx = repo[node]
|
||||
if ctx.rev() in ancestors or ctx.phase() == phases.public:
|
||||
bookmarks[name] = ctx.hex()
|
||||
@ -250,7 +258,7 @@ def downloadbackupbookmarks(
|
||||
bookmarks = conn.peer.listkeyspatterns("bookmarks", patterns=[pattern])
|
||||
|
||||
backupinfo = util.sortdict()
|
||||
for name, hexnode in bookmarks.iteritems():
|
||||
for name, hexnode in pycompat.iteritems(bookmarks):
|
||||
|
||||
match = _backupbookmarkre.match(name)
|
||||
if match:
|
||||
|
@ -18,6 +18,7 @@ from edenscm.mercurial import (
|
||||
lock as lockmod,
|
||||
node as nodemod,
|
||||
progress,
|
||||
pycompat,
|
||||
registrar,
|
||||
scmutil,
|
||||
util,
|
||||
@ -521,7 +522,7 @@ def cloudrestorebackup(ui, repo, dest=None, **opts):
|
||||
)
|
||||
|
||||
changes = []
|
||||
for name, hexnode in bookmarks.iteritems():
|
||||
for name, hexnode in pycompat.iteritems(bookmarks):
|
||||
if hexnode in repo:
|
||||
changes.append((name, nodemod.bin(hexnode)))
|
||||
else:
|
||||
|
@ -13,7 +13,7 @@ import socket
|
||||
import ssl
|
||||
import time
|
||||
|
||||
from edenscm.mercurial import error, perftrace, util
|
||||
from edenscm.mercurial import error, perftrace, pycompat, util
|
||||
from edenscm.mercurial.i18n import _
|
||||
|
||||
from . import baseservice, error as ccerror
|
||||
@ -35,7 +35,7 @@ def cleandict(d):
|
||||
return d
|
||||
return dict(
|
||||
(k, cleandict(v))
|
||||
for k, v in d.iteritems()
|
||||
for k, v in pycompat.iteritems(d)
|
||||
if (v is not None and not (util.safehasattr(v, "__len__") and len(v) == 0))
|
||||
)
|
||||
|
||||
|
@ -330,7 +330,7 @@ class commandline(object):
|
||||
|
||||
def _cmdline(self, cmd, *args, **kwargs):
|
||||
cmdline = [self.command, cmd] + list(args)
|
||||
for k, v in kwargs.iteritems():
|
||||
for k, v in pycompat.iteritems(kwargs):
|
||||
if len(k) == 1:
|
||||
cmdline.append("-" + k)
|
||||
else:
|
||||
|
@ -14,7 +14,7 @@ from __future__ import absolute_import, print_function
|
||||
import posixpath
|
||||
import shlex
|
||||
|
||||
from edenscm.mercurial import error
|
||||
from edenscm.mercurial import error, pycompat
|
||||
from edenscm.mercurial.i18n import _
|
||||
|
||||
from . import common
|
||||
@ -129,7 +129,7 @@ class filemapper(object):
|
||||
repo belong to the source repo and what parts don't."""
|
||||
if self.targetprefixes is None:
|
||||
self.targetprefixes = set()
|
||||
for before, after in self.rename.iteritems():
|
||||
for before, after in pycompat.iteritems(self.rename):
|
||||
self.targetprefixes.add(after)
|
||||
|
||||
# If "." is a target, then all target files are considered from the
|
||||
|
@ -37,6 +37,7 @@ from edenscm.mercurial import (
|
||||
merge as mergemod,
|
||||
node as nodemod,
|
||||
phases,
|
||||
pycompat,
|
||||
scmutil,
|
||||
util,
|
||||
)
|
||||
@ -135,7 +136,7 @@ class mercurial_sink(common.converter_sink):
|
||||
|
||||
if missings:
|
||||
self.after()
|
||||
for pbranch, heads in sorted(missings.iteritems()):
|
||||
for pbranch, heads in sorted(pycompat.iteritems(missings)):
|
||||
pbranchpath = os.path.join(self.path, pbranch)
|
||||
prepo = hg.peer(self.ui, {}, pbranchpath)
|
||||
self.ui.note(_("pulling from %s into %s\n") % (pbranch, branch))
|
||||
@ -163,7 +164,7 @@ class mercurial_sink(common.converter_sink):
|
||||
False, # followcopies
|
||||
)
|
||||
|
||||
for file, (action, info, msg) in actions.iteritems():
|
||||
for file, (action, info, msg) in pycompat.iteritems(actions):
|
||||
if source.targetfilebelongstosource(file):
|
||||
# If the file belongs to the source repo, ignore the p2
|
||||
# since it will be covered by the existing fileset.
|
||||
@ -447,7 +448,7 @@ class mercurial_source(common.converter_source):
|
||||
maappend = ma.append
|
||||
rappend = r.append
|
||||
d = ctx1.manifest().diff(ctx2.manifest())
|
||||
for f, ((node1, flag1), (node2, flag2)) in d.iteritems():
|
||||
for f, ((node1, flag1), (node2, flag2)) in pycompat.iteritems(d):
|
||||
if node2 is None:
|
||||
rappend(f)
|
||||
else:
|
||||
|
@ -765,7 +765,7 @@ class repo_source(common.converter_source):
|
||||
|
||||
self.pathprojectindex = self.repo._buildprojectmap()
|
||||
self.projectpathindex = {
|
||||
project: path for path, project in self.pathprojectindex.iteritems()
|
||||
project: path for path, project in pycompat.iteritems(self.pathprojectindex)
|
||||
}
|
||||
self.commitprojectindex = self._buildcommitprojectmap()
|
||||
self.objecthashprojectindex = {}
|
||||
|
@ -147,7 +147,7 @@ def get_log_child(
|
||||
def receiver(orig_paths, revnum, author, date, message, pool):
|
||||
paths = {}
|
||||
if orig_paths is not None:
|
||||
for k, v in orig_paths.iteritems():
|
||||
for k, v in pycompat.iteritems(orig_paths):
|
||||
paths[k] = changedpath(v)
|
||||
pickle.dump((paths, revnum, author, date, message), fp, protocol)
|
||||
|
||||
@ -239,7 +239,7 @@ class directlogstream(list):
|
||||
def receiver(orig_paths, revnum, author, date, message, pool):
|
||||
paths = {}
|
||||
if orig_paths is not None:
|
||||
for k, v in orig_paths.iteritems():
|
||||
for k, v in pycompat.iteritems(orig_paths):
|
||||
paths[k] = changedpath(v)
|
||||
self.append((paths, revnum, author, date, message))
|
||||
|
||||
@ -455,7 +455,7 @@ class svn_source(converter_source):
|
||||
|
||||
def setrevmap(self, revmap):
|
||||
lastrevs = {}
|
||||
for revid in revmap.iterkeys():
|
||||
for revid in pycompat.iterkeys(revmap):
|
||||
uuid, module, revnum = revsplit(revid)
|
||||
lastrevnum = lastrevs.setdefault(module, revnum)
|
||||
if revnum > lastrevnum:
|
||||
@ -557,7 +557,9 @@ class svn_source(converter_source):
|
||||
self.baseurl + quote(module), optrev(revnum), True, self.ctx
|
||||
)
|
||||
files = [
|
||||
n for n, e in entries.iteritems() if e.kind == svn.core.svn_node_file
|
||||
n
|
||||
for n, e in pycompat.iteritems(entries)
|
||||
if e.kind == svn.core.svn_node_file
|
||||
]
|
||||
self.removed = set()
|
||||
|
||||
@ -850,7 +852,7 @@ class svn_source(converter_source):
|
||||
parents = []
|
||||
# check whether this revision is the start of a branch or part
|
||||
# of a branch renaming
|
||||
orig_paths = sorted(orig_paths.iteritems())
|
||||
orig_paths = sorted(pycompat.iteritems(orig_paths))
|
||||
root_paths = [(p, e) for p, e in orig_paths if self.module.startswith(p)]
|
||||
if root_paths:
|
||||
path, ent = root_paths[-1]
|
||||
@ -1012,7 +1014,7 @@ class svn_source(converter_source):
|
||||
path += "/"
|
||||
return (
|
||||
(path + p)
|
||||
for p, e in entries.iteritems()
|
||||
for p, e in pycompat.iteritems(entries)
|
||||
if e.kind == svn.core.svn_node_file
|
||||
)
|
||||
|
||||
|
@ -50,6 +50,7 @@ from edenscm.mercurial import (
|
||||
filemerge,
|
||||
node,
|
||||
phases,
|
||||
pycompat,
|
||||
registrar,
|
||||
scmutil,
|
||||
util,
|
||||
@ -257,7 +258,7 @@ def _amend(orig, ui, repo, old, extra, pats, opts):
|
||||
orig_encoded = json.loads(orig_data)
|
||||
orig_amend_copies = dict(
|
||||
(k.decode("base64"), v.decode("base64"))
|
||||
for (k, v) in orig_encoded.iteritems()
|
||||
for (k, v) in pycompat.iteritems(orig_encoded)
|
||||
)
|
||||
|
||||
# Copytrace information is not valid if it refers to a file that
|
||||
@ -267,21 +268,21 @@ def _amend(orig, ui, repo, old, extra, pats, opts):
|
||||
#
|
||||
# Find chained copies and renames (a -> b -> c) and collapse them to
|
||||
# (a -> c). Delete the entry for b if this was a rename.
|
||||
for dst, src in amend_copies.iteritems():
|
||||
for dst, src in pycompat.iteritems(amend_copies):
|
||||
if src in orig_amend_copies:
|
||||
amend_copies[dst] = orig_amend_copies[src]
|
||||
if src not in amended_ctx:
|
||||
del orig_amend_copies[src]
|
||||
|
||||
# Copy any left over copies from the previous context.
|
||||
for dst, src in orig_amend_copies.iteritems():
|
||||
for dst, src in pycompat.iteritems(orig_amend_copies):
|
||||
if dst not in amend_copies:
|
||||
amend_copies[dst] = src
|
||||
|
||||
# Write out the entry for the new amend commit.
|
||||
encoded = dict(
|
||||
(k.encode("base64"), v.encode("base64"))
|
||||
for (k, v) in amend_copies.iteritems()
|
||||
for (k, v) in pycompat.iteritems(amend_copies)
|
||||
)
|
||||
db[node] = json.dumps(encoded)
|
||||
try:
|
||||
@ -315,7 +316,8 @@ def _getamendcopies(repo, dest, ancestor):
|
||||
# Load the amend copytrace data from this commit.
|
||||
encoded = json.loads(db[ctx.node()])
|
||||
return dict(
|
||||
(k.decode("base64"), v.decode("base64")) for (k, v) in encoded.iteritems()
|
||||
(k.decode("base64"), v.decode("base64"))
|
||||
for (k, v) in pycompat.iteritems(encoded)
|
||||
)
|
||||
except Exception:
|
||||
repo.ui.log("copytrace", "Failed to load amend copytrace for %s" % dest.hex())
|
||||
@ -448,7 +450,7 @@ def _domergecopies(orig, repo, cdst, csrc, base):
|
||||
return orig(repo, cdst, csrc, base)
|
||||
|
||||
cp = copiesmod._forwardcopies(base, csrc)
|
||||
for dst, src in cp.iteritems():
|
||||
for dst, src in pycompat.iteritems(cp):
|
||||
if src in mdst:
|
||||
copies[dst] = src
|
||||
|
||||
@ -502,7 +504,7 @@ def _domergecopies(orig, repo, cdst, csrc, base):
|
||||
amend_copies = _getamendcopies(repo, cdst, base.p1())
|
||||
if amend_copies:
|
||||
repo.ui.debug("Loaded amend copytrace for %s" % cdst)
|
||||
for dst, src in amend_copies.iteritems():
|
||||
for dst, src in pycompat.iteritems(amend_copies):
|
||||
if dst not in copies:
|
||||
copies[dst] = src
|
||||
|
||||
|
@ -51,6 +51,7 @@ from edenscm.mercurial import (
|
||||
extensions,
|
||||
localrepo,
|
||||
match as matchmod,
|
||||
pycompat,
|
||||
scmutil,
|
||||
util,
|
||||
)
|
||||
@ -114,7 +115,7 @@ def getmirrors(maps, filename):
|
||||
if filename.startswith(subdir):
|
||||
return []
|
||||
|
||||
for key, mirrordirs in maps.iteritems():
|
||||
for key, mirrordirs in pycompat.iteritems(maps):
|
||||
for subdir in mirrordirs:
|
||||
if filename.startswith(subdir):
|
||||
return mirrordirs
|
||||
|
@ -3,7 +3,13 @@
|
||||
# This software may be used and distributed according to the terms of the
|
||||
# GNU General Public License version 2.
|
||||
|
||||
from edenscm.mercurial import cmdutil, crecord as crecordmod, patch as patchmod, util
|
||||
from edenscm.mercurial import (
|
||||
cmdutil,
|
||||
crecord as crecordmod,
|
||||
patch as patchmod,
|
||||
pycompat,
|
||||
util,
|
||||
)
|
||||
from edenscm.mercurial.i18n import _
|
||||
|
||||
|
||||
@ -113,6 +119,9 @@ def recordfilter(ui, headers, operation=None):
|
||||
applied[h.filename()] = [h] + h.hunks
|
||||
|
||||
return (
|
||||
sum([i for i in applied.itervalues() if i[0].special() or len(i) > 1], []),
|
||||
sum(
|
||||
[i for i in pycompat.itervalues(applied) if i[0].special() or len(i) > 1],
|
||||
[],
|
||||
),
|
||||
{},
|
||||
)
|
||||
|
@ -347,7 +347,7 @@ def reposetup(ui, repo):
|
||||
|
||||
if not repo.local():
|
||||
return
|
||||
for name, fn in filters.iteritems():
|
||||
for name, fn in pycompat.iteritems(filters):
|
||||
repo.adddatafilter(name, fn)
|
||||
|
||||
ui.setconfig("patch", "eol", "auto", "eol")
|
||||
|
@ -160,7 +160,7 @@ class annotateopts(object):
|
||||
defaults = {"diffopts": None, "followrename": True, "followmerge": True}
|
||||
|
||||
def __init__(self, **opts):
|
||||
for k, v in self.defaults.iteritems():
|
||||
for k, v in pycompat.iteritems(self.defaults):
|
||||
setattr(self, k, opts.get(k, v))
|
||||
|
||||
@util.propertycache
|
||||
@ -554,7 +554,7 @@ class _annotatecontext(object):
|
||||
# find an unresolved line and its linelog rev to annotate
|
||||
hsh = None
|
||||
try:
|
||||
for (rev, _linenum), idxs in key2idxs.iteritems():
|
||||
for (rev, _linenum), idxs in pycompat.iteritems(key2idxs):
|
||||
if revmap.rev2flag(rev) & revmapmod.sidebranchflag:
|
||||
continue
|
||||
hsh = annotateresult[idxs[0]][0]
|
||||
@ -565,7 +565,7 @@ class _annotatecontext(object):
|
||||
# the remaining key2idxs are not in main branch, resolving them
|
||||
# using the hard way...
|
||||
revlines = {}
|
||||
for (rev, linenum), idxs in key2idxs.iteritems():
|
||||
for (rev, linenum), idxs in pycompat.iteritems(key2idxs):
|
||||
if rev not in revlines:
|
||||
hsh = annotateresult[idxs[0]][0]
|
||||
if self.ui.debugflag:
|
||||
|
@ -8,7 +8,15 @@
|
||||
import contextlib
|
||||
import os
|
||||
|
||||
from edenscm.mercurial import error, extensions, hg, localrepo, scmutil, wireproto
|
||||
from edenscm.mercurial import (
|
||||
error,
|
||||
extensions,
|
||||
hg,
|
||||
localrepo,
|
||||
pycompat,
|
||||
scmutil,
|
||||
wireproto,
|
||||
)
|
||||
from edenscm.mercurial.i18n import _
|
||||
|
||||
from . import context
|
||||
@ -199,7 +207,7 @@ def clientfetch(repo, paths, lastnodemap=None, peer=None):
|
||||
|
||||
ui.debug("fastannotate: server returned\n")
|
||||
for result in results:
|
||||
for path, content in result.iteritems():
|
||||
for path, content in pycompat.iteritems(result):
|
||||
# ignore malicious paths
|
||||
if not path.startswith("fastannotate/") or "/../" in (path + "/"):
|
||||
ui.debug("fastannotate: ignored malicious path %s\n" % path)
|
||||
|
@ -626,7 +626,7 @@ def _walk(self, match, event):
|
||||
state.setignorelist(ignorelist)
|
||||
|
||||
results.pop(".hg", None)
|
||||
return results.iteritems()
|
||||
return pycompat.iteritems(results)
|
||||
|
||||
|
||||
def overridestatus(
|
||||
|
@ -23,7 +23,7 @@ maintainers if the command is legitimate. To customize this footer, set:
|
||||
import getopt
|
||||
import re
|
||||
|
||||
from edenscm.mercurial import error, extensions, fancyopts, registrar, util
|
||||
from edenscm.mercurial import error, extensions, fancyopts, pycompat, registrar, util
|
||||
from edenscm.mercurial.i18n import _
|
||||
|
||||
|
||||
@ -112,7 +112,10 @@ def parseoptions(ui, cmdoptions, args):
|
||||
|
||||
args = list([convert(x) for x in args])
|
||||
opts = dict(
|
||||
[(k, convert(v)) if isinstance(v, str) else (k, v) for k, v in opts.iteritems()]
|
||||
[
|
||||
(k, convert(v)) if isinstance(v, str) else (k, v)
|
||||
for k, v in pycompat.iteritems(opts)
|
||||
]
|
||||
)
|
||||
|
||||
return args, opts
|
||||
@ -127,7 +130,7 @@ class Command(object):
|
||||
def __str__(self):
|
||||
cmd = "hg " + self.name
|
||||
if self.opts:
|
||||
for k, values in sorted(self.opts.iteritems()):
|
||||
for k, values in sorted(pycompat.iteritems(self.opts)):
|
||||
for v in values:
|
||||
if v:
|
||||
cmd += " %s %s" % (k, v)
|
||||
|
@ -73,6 +73,9 @@ from git_handler import GitHandler
|
||||
# Disable DeprecationWarning from newer dulwich since hggit also supports older
|
||||
# dulwich.
|
||||
warnings.filterwarnings(r"ignore", r"", DeprecationWarning, r"edenscm.hgext.hggit")
|
||||
warnings.filterwarnings(
|
||||
r"ignore", r"", DeprecationWarning, r"edenscm.mercurial.pycompat"
|
||||
)
|
||||
|
||||
try:
|
||||
from edenscm.mercurial import exchange
|
||||
|
@ -88,8 +88,8 @@ hasconfigitems = False
|
||||
def registerconfigs(configitem):
|
||||
global hasconfigitems
|
||||
hasconfigitems = True
|
||||
for section, items in CONFIG_DEFAULTS.iteritems():
|
||||
for item, default in items.iteritems():
|
||||
for section, items in CONFIG_DEFAULTS.items():
|
||||
for item, default in items.items():
|
||||
configitem(section, item, default=default)
|
||||
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
# git2hg.py - convert Git repositories and commits to Mercurial ones
|
||||
|
||||
from dulwich.objects import Commit, Tag
|
||||
from edenscm.mercurial import util
|
||||
from edenscm.mercurial import pycompat, util
|
||||
from edenscm.mercurial.node import bin
|
||||
|
||||
|
||||
@ -24,7 +24,7 @@ def find_incoming(git_object_store, git_map, refs):
|
||||
def get_heads(refs):
|
||||
todo = []
|
||||
seenheads = set()
|
||||
for ref, sha in refs.iteritems():
|
||||
for ref, sha in pycompat.iteritems(refs):
|
||||
# refs could contain refs on the server that we haven't pulled down
|
||||
# the objects for; also make sure it's a sha and not a symref
|
||||
if ref != "HEAD" and sha in git_object_store:
|
||||
|
@ -256,7 +256,7 @@ class GitHandler(object):
|
||||
|
||||
def save_tags(self):
|
||||
file = self.vfs(self.tags_file, "w+", atomictemp=True)
|
||||
for name, sha in sorted(self.tags.iteritems()):
|
||||
for name, sha in sorted(pycompat.iteritems(self.tags)):
|
||||
if not self.repo.tagtype(name) == "global":
|
||||
file.write("%s %s\n" % (sha, name))
|
||||
# If this complains, atomictempfile no longer has close
|
||||
@ -417,7 +417,9 @@ class GitHandler(object):
|
||||
client.send_pack(path, changed, lambda have, want: [])
|
||||
|
||||
changed_refs = [
|
||||
ref for ref, sha in new_refs.iteritems() if sha != old_refs.get(ref)
|
||||
ref
|
||||
for ref, sha in pycompat.iteritems(new_refs)
|
||||
if sha != old_refs.get(ref)
|
||||
]
|
||||
new = [bin(self.map_hg_get(new_refs[ref])) for ref in changed_refs]
|
||||
old = {}
|
||||
@ -436,7 +438,7 @@ class GitHandler(object):
|
||||
remote_name = self.remote_name(remote)
|
||||
|
||||
if remote_name and new_refs:
|
||||
for ref, new_sha in sorted(new_refs.iteritems()):
|
||||
for ref, new_sha in sorted(pycompat.iteritems(new_refs)):
|
||||
old_sha = old_refs.get(ref)
|
||||
if old_sha is None:
|
||||
if self.ui.verbose:
|
||||
@ -997,7 +999,7 @@ class GitHandler(object):
|
||||
manifest2 = self.repo.changectx(p2).manifest()
|
||||
return [
|
||||
path
|
||||
for path, node1 in manifest1.iteritems()
|
||||
for path, node1 in pycompat.iteritems(manifest1)
|
||||
if path not in files and manifest2.get(path, node1) != node1
|
||||
]
|
||||
|
||||
@ -1237,7 +1239,7 @@ class GitHandler(object):
|
||||
|
||||
# mapped nodes might be hidden
|
||||
unfiltered = self.repo.unfiltered()
|
||||
for rev, rev_refs in exportable.iteritems():
|
||||
for rev, rev_refs in pycompat.iteritems(exportable):
|
||||
ctx = self.repo[rev]
|
||||
if not rev_refs:
|
||||
raise error.Abort(
|
||||
@ -1304,7 +1306,7 @@ class GitHandler(object):
|
||||
if refs is None:
|
||||
return None
|
||||
filteredrefs = self.filter_refs(refs, heads)
|
||||
return [x for x in filteredrefs.itervalues() if x not in self.git]
|
||||
return [x for x in pycompat.itervalues(filteredrefs) if x not in self.git]
|
||||
|
||||
try:
|
||||
with progress.bar(self.ui, "") as prog:
|
||||
@ -1357,7 +1359,7 @@ class GitHandler(object):
|
||||
else:
|
||||
raise error.Abort("ambiguous reference %s: %r" % (h, r))
|
||||
else:
|
||||
for ref, sha in refs.iteritems():
|
||||
for ref, sha in pycompat.iteritems(refs):
|
||||
if not ref.endswith("^{}") and (
|
||||
ref.startswith("refs/heads/") or ref.startswith("refs/tags/")
|
||||
):
|
||||
@ -1387,7 +1389,9 @@ class GitHandler(object):
|
||||
return obj.commit_time >= min_timestamp
|
||||
|
||||
return util.OrderedDict(
|
||||
(ref, sha) for ref, sha in refs.iteritems() if check_min_time(self.git[sha])
|
||||
(ref, sha)
|
||||
for ref, sha in pycompat.iteritems(refs)
|
||||
if check_min_time(self.git[sha])
|
||||
)
|
||||
|
||||
def update_references(self):
|
||||
@ -1395,7 +1399,7 @@ class GitHandler(object):
|
||||
|
||||
# Create a local Git branch name for each
|
||||
# Mercurial bookmark.
|
||||
for hg_sha, refs in exportable.iteritems():
|
||||
for hg_sha, refs in pycompat.iteritems(exportable):
|
||||
for git_ref in refs.heads:
|
||||
git_sha = self.map_git_get(hg_sha)
|
||||
if git_sha:
|
||||
@ -1433,7 +1437,7 @@ class GitHandler(object):
|
||||
bms = self.repo._bookmarks
|
||||
for filtered_bm, bm in self._filter_for_bookmarks(bms):
|
||||
res[hex(bms[bm])].heads.add("refs/heads/" + filtered_bm)
|
||||
for tag, sha in self.tags.iteritems():
|
||||
for tag, sha in pycompat.iteritems(self.tags):
|
||||
res[sha].tags.add("refs/tags/" + tag)
|
||||
return res
|
||||
|
||||
@ -1447,7 +1451,7 @@ class GitHandler(object):
|
||||
|
||||
suffix = self.branch_bookmark_suffix or ""
|
||||
changes = []
|
||||
for head, sha in heads.iteritems():
|
||||
for head, sha in pycompat.iteritems(heads):
|
||||
# refs contains all the refs in the server, not just
|
||||
# the ones we are pulling
|
||||
hgsha = self.map_hg_get(sha)
|
||||
@ -1479,7 +1483,7 @@ class GitHandler(object):
|
||||
for t in list(remote_refs):
|
||||
if t.startswith(remote_name + "/"):
|
||||
del remote_refs[t]
|
||||
for ref_name, sha in refs.iteritems():
|
||||
for ref_name, sha in pycompat.iteritems(refs):
|
||||
if ref_name.startswith("refs/heads"):
|
||||
hgsha = self.map_hg_get(sha)
|
||||
if hgsha is None or hgsha not in self.repo:
|
||||
|
@ -47,12 +47,12 @@ def generate_repo_subclass(baseclass):
|
||||
def _findtags(self):
|
||||
(tags, tagtypes) = super(hgrepo, self)._findtags()
|
||||
|
||||
for tag, rev in self.githandler.tags.iteritems():
|
||||
for tag, rev in pycompat.iteritems(self.githandler.tags):
|
||||
if isinstance(tag, pycompat.unicode):
|
||||
tag = tag.encode("utf-8")
|
||||
tags[tag] = bin(rev)
|
||||
tagtypes[tag] = "git"
|
||||
for tag, rev in self.githandler.remote_refs.iteritems():
|
||||
for tag, rev in pycompat.iteritems(self.githandler.remote_refs):
|
||||
if isinstance(tag, pycompat.unicode):
|
||||
tag = tag.encode("utf-8")
|
||||
tags[tag] = rev
|
||||
|
@ -10,6 +10,7 @@ from edenscm.mercurial import (
|
||||
context,
|
||||
manifest,
|
||||
match as matchmod,
|
||||
pycompat,
|
||||
util,
|
||||
)
|
||||
from edenscm.mercurial.node import bin, hex, nullid
|
||||
@ -31,7 +32,9 @@ class overlaymanifest(object):
|
||||
|
||||
def withflags(self):
|
||||
self.load()
|
||||
return set([path for path, flag in self._flags.iteritems() if flag != ""])
|
||||
return set(
|
||||
[path for path, flag in pycompat.iteritems(self._flags) if flag != ""]
|
||||
)
|
||||
|
||||
def copy(self):
|
||||
return overlaymanifest(self.repo, self.tree.id)
|
||||
@ -59,7 +62,7 @@ class overlaymanifest(object):
|
||||
return ""
|
||||
|
||||
def addtree(tree, dirname):
|
||||
for entry in tree.iteritems():
|
||||
for entry in pycompat.iteritems(tree):
|
||||
if entry.mode & 0o40000:
|
||||
# expand directory
|
||||
subtree = self.repo.handler.git.get_object(entry.sha)
|
||||
@ -84,7 +87,9 @@ class overlaymanifest(object):
|
||||
|
||||
def iteritems(self):
|
||||
self.load()
|
||||
return self._map.iteritems()
|
||||
return pycompat.iteritems(self._map)
|
||||
|
||||
items = iteritems
|
||||
|
||||
def __iter__(self):
|
||||
self.load()
|
||||
@ -122,7 +127,7 @@ class overlaymanifest(object):
|
||||
|
||||
if matcher is None:
|
||||
matcher = matchmod.always("", "")
|
||||
for fn, n1 in self.iteritems():
|
||||
for fn, n1 in pycompat.iteritems(self):
|
||||
if not matcher(fn):
|
||||
continue
|
||||
fl1 = self._flags.get(fn, "")
|
||||
@ -133,7 +138,7 @@ class overlaymanifest(object):
|
||||
if n1 != n2 or fl1 != fl2:
|
||||
diff[fn] = ((n1, fl1), (n2, fl2))
|
||||
|
||||
for fn, n2 in m2.iteritems():
|
||||
for fn, n2 in pycompat.iteritems(m2):
|
||||
if fn not in self:
|
||||
if not matcher(fn):
|
||||
continue
|
||||
|
@ -3,7 +3,7 @@ functions."""
|
||||
import re
|
||||
|
||||
from dulwich import errors
|
||||
from edenscm.mercurial import error, lock as lockmod, util as hgutil
|
||||
from edenscm.mercurial import error, lock as lockmod, pycompat, util as hgutil
|
||||
from edenscm.mercurial.i18n import _
|
||||
|
||||
|
||||
@ -19,7 +19,7 @@ gitschemes = ("git", "git+ssh", "git+http", "git+https")
|
||||
|
||||
def serialize_hgsub(data):
|
||||
"""Produces a string from OrderedDict hgsub content"""
|
||||
return "".join(["%s = %s\n" % (n, v) for n, v in data.iteritems()])
|
||||
return "".join(["%s = %s\n" % (n, v) for n, v in pycompat.iteritems(data)])
|
||||
|
||||
|
||||
def transform_notgit(f):
|
||||
|
@ -890,7 +890,7 @@ def wraprepo(repo):
|
||||
refs = dict(self._bookmarks)
|
||||
refs["tip"] = self["tip"].rev()
|
||||
sha = ""
|
||||
for k, v in sorted(refs.iteritems()):
|
||||
for k, v in sorted(pycompat.iteritems(refs)):
|
||||
if k != "tip":
|
||||
v = hex(v)
|
||||
sha = hashlib.sha1("%s%s%s" % (sha, k, v)).hexdigest()
|
||||
@ -1131,7 +1131,7 @@ def wraprepo(repo):
|
||||
# the old cached ctx, since the old ctx contains a reference to
|
||||
# the old revlog, which is now out of date.
|
||||
mfl = self.manifestlog
|
||||
for dirname, lrucache in oldmancache.iteritems():
|
||||
for dirname, lrucache in pycompat.iteritems(oldmancache):
|
||||
if dirname == "":
|
||||
for oldmfnode in lrucache:
|
||||
oldmfctx = lrucache[oldmfnode]
|
||||
@ -1236,7 +1236,7 @@ def wraprepo(repo):
|
||||
break
|
||||
|
||||
fullrevisions = []
|
||||
for chunks in groupedrevdata.itervalues():
|
||||
for chunks in pycompat.itervalues(groupedrevdata):
|
||||
chunkcount = chunks[0][2]
|
||||
if chunkcount == 1:
|
||||
fullrevisions.append(chunks[0])
|
||||
@ -1305,7 +1305,9 @@ def wraprepo(repo):
|
||||
)
|
||||
|
||||
# Compute new bookmarks, and delete old bookmarks
|
||||
newbookmarks = dict((k, hex(v)) for k, v in self._bookmarks.iteritems())
|
||||
newbookmarks = dict(
|
||||
(k, hex(v)) for k, v in pycompat.iteritems(self._bookmarks)
|
||||
)
|
||||
oldbookmarks = []
|
||||
cursor.execute(
|
||||
"SELECT name, value FROM revision_references "
|
||||
@ -1338,7 +1340,7 @@ def wraprepo(repo):
|
||||
values.append(reponame)
|
||||
values.append(head)
|
||||
|
||||
for k, v in newbookmarks.iteritems():
|
||||
for k, v in pycompat.iteritems(newbookmarks):
|
||||
tmpl.append("(%s, 'bookmarks', %s, %s)")
|
||||
values.append(repo.sqlreponame)
|
||||
values.append(k)
|
||||
@ -1794,7 +1796,7 @@ def addentries(repo, queue, transaction, ignoreexisting=False):
|
||||
if revlog.dfh and not revlog.dfh.closed:
|
||||
revlog.dfh.flush()
|
||||
|
||||
for filelog in revlogs.itervalues():
|
||||
for filelog in pycompat.itervalues(revlogs):
|
||||
flushrevlog(filelog)
|
||||
|
||||
if manifest:
|
||||
|
@ -1260,7 +1260,7 @@ def _finishhistedit(ui, repo, state, fm):
|
||||
|
||||
mapping, tmpnodes, created, ntm = processreplacement(state)
|
||||
if mapping:
|
||||
for prec, succs in mapping.iteritems():
|
||||
for prec, succs in pycompat.iteritems(mapping):
|
||||
if not succs:
|
||||
ui.debug("histedit: %s is dropped\n" % node.short(prec))
|
||||
else:
|
||||
@ -1296,7 +1296,7 @@ def _finishhistedit(ui, repo, state, fm):
|
||||
nodechanges = fd(
|
||||
{
|
||||
hf(oldn): fl([hf(n) for n in newn], name="node")
|
||||
for oldn, newn in mapping.iteritems()
|
||||
for oldn, newn in pycompat.iteritems(mapping)
|
||||
},
|
||||
key="oldnode",
|
||||
value="newnodes",
|
||||
@ -1505,7 +1505,7 @@ def ruleeditor(repo, ui, actions, editcomment=""):
|
||||
tsum = summary[len(fword) + 1 :].lstrip()
|
||||
# safe but slow: reverse iterate over the actions so we
|
||||
# don't clash on two commits having the same summary
|
||||
for na, l in reversed(list(newact.iteritems())):
|
||||
for na, l in reversed(list(pycompat.iteritems(newact))):
|
||||
actx = repo[na.node]
|
||||
asum = _getsummary(actx)
|
||||
if asum == tsum:
|
||||
@ -1518,7 +1518,7 @@ def ruleeditor(repo, ui, actions, editcomment=""):
|
||||
|
||||
# copy over and flatten the new list
|
||||
actions = []
|
||||
for na, l in newact.iteritems():
|
||||
for na, l in pycompat.iteritems(newact):
|
||||
actions.append(na)
|
||||
actions += l
|
||||
|
||||
|
@ -6,7 +6,7 @@
|
||||
import json
|
||||
import struct
|
||||
|
||||
from edenscm.mercurial import error, extensions, node as nodemod
|
||||
from edenscm.mercurial import error, extensions, node as nodemod, pycompat
|
||||
from edenscm.mercurial.i18n import _
|
||||
|
||||
|
||||
@ -53,7 +53,7 @@ def saveremotebookmarks(repo, newbookmarks, remote):
|
||||
del newbookmarks[rname]
|
||||
bookmarks[rname] = hexnode
|
||||
|
||||
for bookmark, hexnode in newbookmarks.iteritems():
|
||||
for bookmark, hexnode in pycompat.iteritems(newbookmarks):
|
||||
bookmarks[bookmark] = hexnode
|
||||
remotenamesext.saveremotenames(repo, {remotepath: bookmarks})
|
||||
|
||||
@ -63,7 +63,7 @@ def savelocalbookmarks(repo, bookmarks):
|
||||
return
|
||||
with repo.wlock(), repo.lock(), repo.transaction("bookmark") as tr:
|
||||
changes = []
|
||||
for scratchbook, node in bookmarks.iteritems():
|
||||
for scratchbook, node in pycompat.iteritems(bookmarks):
|
||||
changectx = repo[node]
|
||||
changes.append((scratchbook, changectx.node()))
|
||||
repo._bookmarks.applychanges(repo, tr, changes)
|
||||
@ -109,7 +109,7 @@ def deleteremotebookmarks(ui, repo, path, names):
|
||||
|
||||
def encodebookmarks(bookmarks):
|
||||
encoded = {}
|
||||
for bookmark, node in bookmarks.iteritems():
|
||||
for bookmark, node in pycompat.iteritems(bookmarks):
|
||||
encoded[bookmark] = node
|
||||
dumped = json.dumps(encoded)
|
||||
result = struct.pack(">i", len(dumped)) + dumped
|
||||
@ -123,7 +123,7 @@ def decodebookmarks(stream):
|
||||
# python json module always returns unicode strings. We need to convert
|
||||
# it back to bytes string
|
||||
result = {}
|
||||
for bookmark, node in unicodedict.iteritems():
|
||||
for bookmark, node in pycompat.iteritems(unicodedict):
|
||||
bookmark = bookmark.encode("ascii")
|
||||
node = node.encode("ascii")
|
||||
result[bookmark] = node
|
||||
|
@ -14,6 +14,7 @@ from edenscm.mercurial import (
|
||||
exchange,
|
||||
extensions,
|
||||
mutation,
|
||||
pycompat,
|
||||
revsetlang,
|
||||
util,
|
||||
)
|
||||
@ -117,7 +118,7 @@ def getscratchbranchparts(
|
||||
parts.append(
|
||||
bundle2.bundlepart(
|
||||
constants.scratchbranchparttype.upper(),
|
||||
advisoryparams=params.iteritems(),
|
||||
advisoryparams=pycompat.iteritems(params),
|
||||
data=cg,
|
||||
)
|
||||
)
|
||||
@ -244,7 +245,7 @@ def _bundlesetup():
|
||||
decodedbookmarks = bookmarks.decodebookmarks(part)
|
||||
toinsert = {}
|
||||
todelete = []
|
||||
for bookmark, node in decodedbookmarks.iteritems():
|
||||
for bookmark, node in pycompat.iteritems(decodedbookmarks):
|
||||
if node:
|
||||
toinsert[bookmark] = node
|
||||
else:
|
||||
|
@ -23,6 +23,7 @@ from edenscm.mercurial import (
|
||||
peer,
|
||||
phases,
|
||||
pushkey,
|
||||
pycompat,
|
||||
scmutil,
|
||||
ui as uimod,
|
||||
visibility,
|
||||
@ -316,7 +317,7 @@ def _bookmarks(orig, ui, repo, *names, **opts):
|
||||
def _showbookmarks(ui, remotebookmarks, **opts):
|
||||
# Copy-paste from commands.py
|
||||
fm = ui.formatter("bookmarks", opts)
|
||||
for bmark, n in sorted(remotebookmarks.iteritems()):
|
||||
for bmark, n in sorted(pycompat.iteritems(remotebookmarks)):
|
||||
fm.startitem()
|
||||
if not ui.quiet:
|
||||
fm.plain(" ")
|
||||
@ -520,7 +521,7 @@ def _update(orig, ui, repo, node=None, rev=None, **opts):
|
||||
"commitcloud.meta": ("date:", "summary:", "author:"),
|
||||
"commitcloud.commitcloud": ("#commitcloud",),
|
||||
}
|
||||
for label, keywords in replacements.iteritems():
|
||||
for label, keywords in pycompat.iteritems(replacements):
|
||||
for kw in keywords:
|
||||
remoteerror = remoteerror.replace(kw, ui.label(kw, label))
|
||||
|
||||
|
@ -14,6 +14,7 @@ from edenscm.mercurial import (
|
||||
mutation,
|
||||
node as nodemod,
|
||||
pushkey,
|
||||
pycompat,
|
||||
util,
|
||||
wireproto,
|
||||
)
|
||||
@ -44,7 +45,7 @@ def extsetup(ui):
|
||||
|
||||
def wireprotolistkeyspatterns(repo, proto, namespace, patterns):
|
||||
patterns = wireproto.decodelist(patterns)
|
||||
d = repo.listkeys(encoding.tolocal(namespace), patterns).iteritems()
|
||||
d = pycompat.iteritems(repo.listkeys(encoding.tolocal(namespace), patterns))
|
||||
return pushkey.encodekeys(d)
|
||||
|
||||
|
||||
|
@ -22,6 +22,7 @@ from edenscm.mercurial import (
|
||||
error,
|
||||
hg,
|
||||
patch,
|
||||
pycompat,
|
||||
registrar,
|
||||
scmutil,
|
||||
util,
|
||||
@ -99,7 +100,7 @@ def debugfillinfinitepushmetadata(ui, repo, **opts):
|
||||
nodesmetadata[node] = output
|
||||
|
||||
with index:
|
||||
for node, metadata in nodesmetadata.iteritems():
|
||||
for node, metadata in pycompat.iteritems(nodesmetadata):
|
||||
dumped = json.dumps(metadata, sort_keys=True)
|
||||
index.saveoptionaljsonmetadata(node, dumped)
|
||||
|
||||
|
@ -25,6 +25,7 @@ from edenscm.mercurial import (
|
||||
mutation,
|
||||
node as nodemod,
|
||||
phases,
|
||||
pycompat,
|
||||
util,
|
||||
wireproto,
|
||||
)
|
||||
@ -113,7 +114,7 @@ def localrepolistkeys(orig, self, namespace, patterns=None):
|
||||
if pattern.endswith("*"):
|
||||
pattern = "re:^" + pattern[:-1] + ".*"
|
||||
kind, pat, matcher = util.stringmatcher(pattern)
|
||||
for bookmark, node in bookmarks.iteritems():
|
||||
for bookmark, node in pycompat.iteritems(bookmarks):
|
||||
if matcher(bookmark):
|
||||
results[bookmark] = node
|
||||
return results
|
||||
@ -376,7 +377,7 @@ def _generateoutputparts(
|
||||
if part.type == "changegroup":
|
||||
haschangegroup = True
|
||||
newpart = bundle2.bundlepart(part.type, data=part.read())
|
||||
for key, value in part.params.iteritems():
|
||||
for key, value in pycompat.iteritems(part.params):
|
||||
newpart.addparam(key, value)
|
||||
parts.append(newpart)
|
||||
|
||||
@ -525,7 +526,7 @@ def processparts(orig, repo, op, unbundler):
|
||||
# differs from previous behavior, we need to put it behind a
|
||||
# config flag for incremental rollout.
|
||||
bundlepart = bundle2.bundlepart(part.type, data=part.read())
|
||||
for key, value in part.params.iteritems():
|
||||
for key, value in pycompat.iteritems(part.params):
|
||||
bundlepart.addparam(key, value)
|
||||
|
||||
# Certain parts require a response
|
||||
|
@ -10,7 +10,7 @@ import time
|
||||
import warnings
|
||||
|
||||
import mysql.connector
|
||||
from edenscm.mercurial import error, util
|
||||
from edenscm.mercurial import error, pycompat, util
|
||||
from edenscm.mercurial.i18n import _
|
||||
|
||||
|
||||
@ -208,7 +208,8 @@ class sqlindex(object):
|
||||
self.sqlconnect()
|
||||
|
||||
data = [
|
||||
(bookmark, node, self.reponame) for bookmark, node in bookmarks.iteritems()
|
||||
(bookmark, node, self.reponame)
|
||||
for bookmark, node in pycompat.iteritems(bookmarks)
|
||||
]
|
||||
|
||||
self.sqlcursor.executemany(
|
||||
@ -420,7 +421,7 @@ class sqlindex(object):
|
||||
|
||||
data = [
|
||||
(bookmark, node, hashlib.sha1(bookmark).hexdigest(), self.reponame)
|
||||
for (bookmark, node) in bookmarks.iteritems()
|
||||
for (bookmark, node) in pycompat.iteritems(bookmarks)
|
||||
]
|
||||
|
||||
self.sqlcursor.executemany(
|
||||
|
@ -112,7 +112,7 @@ def recordbookmarks(orig, store, fp):
|
||||
repo = store._repo
|
||||
if util.safehasattr(repo, "journal"):
|
||||
oldmarks = bookmarks.bmstore(repo)
|
||||
for mark, value in store.iteritems():
|
||||
for mark, value in pycompat.iteritems(store):
|
||||
oldvalue = oldmarks.get(mark, node.nullid)
|
||||
if value != oldvalue:
|
||||
repo.journal.record(bookmarktype, mark, oldvalue, value)
|
||||
@ -139,7 +139,7 @@ def _mergeentriesiter(*iterables, **kwargs):
|
||||
pass
|
||||
|
||||
while iterable_map:
|
||||
value, key, it = order(iterable_map.itervalues())
|
||||
value, key, it = order(pycompat.itervalues(iterable_map))
|
||||
yield value
|
||||
try:
|
||||
iterable_map[key][0] = next(it)
|
||||
|
@ -9,7 +9,7 @@ from __future__ import absolute_import
|
||||
|
||||
import re
|
||||
|
||||
from edenscm.mercurial import error
|
||||
from edenscm.mercurial import error, pycompat
|
||||
from edenscm.mercurial.i18n import _
|
||||
|
||||
|
||||
@ -33,7 +33,7 @@ class gitlfspointer(dict):
|
||||
|
||||
def serialize(self):
|
||||
sortkeyfunc = lambda x: (x[0] != "version", x)
|
||||
items = sorted(self.validate().iteritems(), key=sortkeyfunc)
|
||||
items = sorted(pycompat.iteritems(self.validate()), key=sortkeyfunc)
|
||||
return "".join("%s %s\n" % (k, v) for k, v in items)
|
||||
|
||||
def oid(self):
|
||||
@ -45,7 +45,7 @@ class gitlfspointer(dict):
|
||||
def hgmeta(self):
|
||||
"""Translate LFS metadata to hg filelog metadata dictionary"""
|
||||
hgmeta = {}
|
||||
for k, v in self.iteritems():
|
||||
for k, v in pycompat.iteritems(self):
|
||||
if k.startswith("x-hg-"):
|
||||
name = k[len("x-hg-") :]
|
||||
hgmeta[name] = v
|
||||
@ -64,7 +64,7 @@ class gitlfspointer(dict):
|
||||
def validate(self):
|
||||
"""raise InvalidPointer on error. return self if there is no error"""
|
||||
requiredcount = 0
|
||||
for k, v in self.iteritems():
|
||||
for k, v in pycompat.iteritems(self):
|
||||
if k in self._requiredre:
|
||||
if not self._requiredre[k].match(v):
|
||||
raise InvalidPointer(_("unexpected value: %s=%r") % (k, v))
|
||||
|
@ -9,7 +9,7 @@ from __future__ import absolute_import
|
||||
|
||||
import hashlib
|
||||
|
||||
from edenscm.mercurial import error, filelog, revlog, util
|
||||
from edenscm.mercurial import error, filelog, pycompat, revlog, util
|
||||
from edenscm.mercurial.i18n import _
|
||||
from edenscm.mercurial.node import bin, nullid, short
|
||||
|
||||
@ -81,7 +81,7 @@ def writetostore(self, text):
|
||||
|
||||
# translate hg filelog metadata to lfs metadata with "x-hg-" prefix
|
||||
if hgmeta is not None:
|
||||
for k, v in hgmeta.iteritems():
|
||||
for k, v in pycompat.iteritems(hgmeta):
|
||||
metadata["x-hg-%s" % k] = v
|
||||
|
||||
rawtext = metadata.serialize()
|
||||
|
@ -207,7 +207,7 @@ class linkrevdbreadonly(object):
|
||||
# "close" in a "finally" block and it probably does not want close() to
|
||||
# raise an exception there.
|
||||
if util.safehasattr(self, "_dbs"):
|
||||
for db in self._dbs.itervalues():
|
||||
for db in pycompat.itervalues(self._dbs):
|
||||
db.close()
|
||||
self._dbs.clear()
|
||||
|
||||
@ -451,15 +451,15 @@ def debugverifylinkrevcache(ui, repo, *pats, **opts):
|
||||
db = repo._linkrevcache
|
||||
paths = dict(db._getdb(db._pathdbname))
|
||||
nodes = dict(db._getdb(db._nodedbname))
|
||||
pathsrev = dict((v, k) for k, v in paths.iteritems())
|
||||
nodesrev = dict((v, k) for k, v in nodes.iteritems())
|
||||
pathsrev = dict((v, k) for k, v in pycompat.iteritems(paths))
|
||||
nodesrev = dict((v, k) for k, v in pycompat.iteritems(nodes))
|
||||
lrevs = dict(db._getdb(db._linkrevdbname))
|
||||
|
||||
readfilelog = ui.configbool("linkrevcache", "readfilelog", True)
|
||||
|
||||
total = len(lrevs)
|
||||
with progress.bar(ui, _("verifying"), total=total) as prog:
|
||||
for i, (k, v) in enumerate(lrevs.iteritems()):
|
||||
for i, (k, v) in enumerate(pycompat.iteritems(lrevs)):
|
||||
prog.value = i
|
||||
pathid, nodeid = k.split("\0")
|
||||
path = pathsrev[pathid]
|
||||
|
@ -39,7 +39,7 @@ from __future__ import absolute_import
|
||||
|
||||
import time
|
||||
|
||||
from edenscm.mercurial import context, error, mutation
|
||||
from edenscm.mercurial import context, error, mutation, pycompat
|
||||
from edenscm.mercurial.i18n import _
|
||||
from edenscm.mercurial.node import hex, nullid, nullrev
|
||||
|
||||
@ -150,7 +150,7 @@ class pushrequest(object):
|
||||
filechanges = {}
|
||||
examinepaths = set(files.keys())
|
||||
|
||||
for path, info in files.iteritems():
|
||||
for path, info in pycompat.iteritems(files):
|
||||
if info.deleted:
|
||||
filechanges[path] = None
|
||||
else:
|
||||
@ -215,7 +215,7 @@ class pushrequest(object):
|
||||
"""
|
||||
mctx = ctx.manifestctx()
|
||||
conflicts = []
|
||||
for path, expected in self.fileconditions.iteritems():
|
||||
for path, expected in pycompat.iteritems(self.fileconditions):
|
||||
try:
|
||||
actual = mctx.find(path)
|
||||
except KeyError:
|
||||
|
@ -361,7 +361,7 @@ def _makerage(ui, repo, **opts):
|
||||
"shared": lambda category: shallowutil.getcachepackpath(repo, category),
|
||||
}
|
||||
|
||||
for loc, getpath in packlocs.iteritems():
|
||||
for loc, getpath in pycompat.iteritems(packlocs):
|
||||
for category in constants.ALL_CATEGORIES:
|
||||
path = getpath(category)
|
||||
detailed.append(
|
||||
|
@ -146,7 +146,7 @@ def _ctxdesc(ctx):
|
||||
desc = '%s "%s"' % (ctx, ctx.description().split("\n", 1)[0])
|
||||
repo = ctx.repo()
|
||||
names = []
|
||||
for nsname, ns in repo.names.iteritems():
|
||||
for nsname, ns in pycompat.iteritems(repo.names):
|
||||
if nsname == "branches":
|
||||
continue
|
||||
names.extend(ns.names(repo, ctx.node()))
|
||||
@ -230,7 +230,7 @@ class rebaseruntime(object):
|
||||
f.write("0\n") # used to be the "keepbranches" flag.
|
||||
f.write("%s\n" % (self.activebookmark or ""))
|
||||
destmap = self.destmap
|
||||
for d, v in self.state.iteritems():
|
||||
for d, v in pycompat.iteritems(self.state):
|
||||
oldrev = repo[d].hex()
|
||||
if v >= 0:
|
||||
newrev = repo[v].hex()
|
||||
@ -486,7 +486,7 @@ class rebaseruntime(object):
|
||||
# if we fail before the transaction closes.
|
||||
self.storestatus()
|
||||
|
||||
cands = [k for k, v in self.state.iteritems() if v == revtodo]
|
||||
cands = [k for k, v in pycompat.iteritems(self.state) if v == revtodo]
|
||||
total = len(cands)
|
||||
pos = 0
|
||||
with progress.bar(ui, _("rebasing"), _("changesets"), total) as prog:
|
||||
@ -763,7 +763,7 @@ class rebaseruntime(object):
|
||||
)
|
||||
if newnode is not None:
|
||||
newrev = repo[newnode].rev()
|
||||
for oldrev in self.state.iterkeys():
|
||||
for oldrev in pycompat.iterkeys(self.state):
|
||||
self.state[oldrev] = newrev
|
||||
|
||||
# restore original working directory
|
||||
@ -1180,7 +1180,7 @@ def _definedestmap(
|
||||
# emulate the old behavior, showing "nothing to rebase" (a better
|
||||
# behavior may be abort with "cannot find branching point" error)
|
||||
bpbase.clear()
|
||||
for bp, bs in bpbase.iteritems(): # calculate roots
|
||||
for bp, bs in pycompat.iteritems(bpbase): # calculate roots
|
||||
roots += list(repo.revs("children(%d) & ancestors(%ld)", bp, bs))
|
||||
|
||||
rebaseset = repo.revs("%ld::", roots)
|
||||
@ -1853,7 +1853,9 @@ def needupdate(repo, state):
|
||||
return False
|
||||
|
||||
# We should be standing on the first as-of-yet unrebased commit.
|
||||
firstunrebased = min([old for old, new in state.iteritems() if new == nullrev])
|
||||
firstunrebased = min(
|
||||
[old for old, new in pycompat.iteritems(state) if new == nullrev]
|
||||
)
|
||||
if firstunrebased in parents:
|
||||
return True
|
||||
|
||||
@ -2172,7 +2174,7 @@ def summaryhook(ui, repo):
|
||||
msg = _('rebase: (use "hg rebase --abort" to clear broken state)\n')
|
||||
ui.write(msg)
|
||||
return
|
||||
numrebased = len([i for i in state.itervalues() if i >= 0])
|
||||
numrebased = len([i for i in pycompat.itervalues(state) if i >= 0])
|
||||
# i18n: column positioning for "hg summary"
|
||||
ui.write(
|
||||
_("rebase: %s, %s (rebase --continue)\n")
|
||||
|
@ -199,6 +199,7 @@ from edenscm.mercurial import (
|
||||
merge,
|
||||
patch,
|
||||
progress,
|
||||
pycompat,
|
||||
registrar,
|
||||
repair,
|
||||
repoview,
|
||||
@ -538,7 +539,7 @@ def onetimeclientsetup(ui):
|
||||
if shallowrepo.requirement in repo.requirements:
|
||||
files = []
|
||||
sparsematch = repo.maybesparsematch(mctx.rev())
|
||||
for f, (m, actionargs, msg) in actions.iteritems():
|
||||
for f, (m, actionargs, msg) in pycompat.iteritems(actions):
|
||||
if sparsematch and not sparsematch(f):
|
||||
continue
|
||||
if m in ("c", "dc", "cm"):
|
||||
|
@ -13,7 +13,7 @@ import sys
|
||||
# pyre-fixme[21]: Could not find `bindings`.
|
||||
from bindings import revisionstore
|
||||
from edenscm.hgext import extutil
|
||||
from edenscm.mercurial import error, filelog, progress, revlog, util
|
||||
from edenscm.mercurial import error, filelog, progress, pycompat, revlog, util
|
||||
from edenscm.mercurial.i18n import _
|
||||
from edenscm.mercurial.node import bin, hex, nullid, short
|
||||
|
||||
@ -213,7 +213,7 @@ def verifyremotefilelog(ui, path, **opts):
|
||||
continue
|
||||
filepath = os.path.join(root, file)
|
||||
size, firstnode, mapping = parsefileblob(filepath, decompress)
|
||||
for p1, p2, linknode, copyfrom in mapping.itervalues():
|
||||
for p1, p2, linknode, copyfrom in pycompat.itervalues(mapping):
|
||||
if linknode == nullid:
|
||||
actualpath = os.path.relpath(root, path)
|
||||
key = fileserverclient.getcachekey("reponame", actualpath, file)
|
||||
|
@ -24,6 +24,7 @@ from edenscm.mercurial import (
|
||||
httppeer,
|
||||
perftrace,
|
||||
progress,
|
||||
pycompat,
|
||||
revlog,
|
||||
sshpeer,
|
||||
util,
|
||||
@ -289,7 +290,7 @@ class getpackclient(object):
|
||||
|
||||
# Issue request
|
||||
pipeo = shallowutil.trygetattr(remote, ("_pipeo", "pipeo"))
|
||||
for filename, nodes in grouped.iteritems():
|
||||
for filename, nodes in pycompat.iteritems(grouped):
|
||||
filenamelen = struct.pack(constants.FILENAMESTRUCT, len(filename))
|
||||
countlen = struct.pack(constants.PACKREQUESTCOUNTSTRUCT, len(nodes))
|
||||
rawnodes = "".join(n for n in nodes)
|
||||
|
@ -12,7 +12,7 @@ import os
|
||||
|
||||
# pyre-fixme[21]: Could not find `bindings`.
|
||||
from bindings import revisionstore
|
||||
from edenscm.mercurial import ancestor, error, filelog, mdiff, revlog, util
|
||||
from edenscm.mercurial import ancestor, error, filelog, mdiff, pycompat, revlog, util
|
||||
from edenscm.mercurial.i18n import _
|
||||
from edenscm.mercurial.node import bin, nullid
|
||||
|
||||
@ -192,7 +192,6 @@ class remotefilelog(object):
|
||||
|
||||
__bool__ = __nonzero__
|
||||
|
||||
|
||||
def __len__(self):
|
||||
if self.filename == ".hgtags":
|
||||
# The length of .hgtags is used to fast path tag checking.
|
||||
@ -383,7 +382,7 @@ class remotefilelog(object):
|
||||
return nullid
|
||||
|
||||
revmap, parentfunc = self._buildrevgraph(a, b)
|
||||
nodemap = dict(((v, k) for (k, v) in revmap.iteritems()))
|
||||
nodemap = dict(((v, k) for (k, v) in pycompat.iteritems(revmap)))
|
||||
|
||||
ancs = ancestor.ancestors(parentfunc, revmap[a], revmap[b])
|
||||
if ancs:
|
||||
@ -398,7 +397,7 @@ class remotefilelog(object):
|
||||
return nullid
|
||||
|
||||
revmap, parentfunc = self._buildrevgraph(a, b)
|
||||
nodemap = dict(((v, k) for (k, v) in revmap.iteritems()))
|
||||
nodemap = dict(((v, k) for (k, v) in pycompat.iteritems(revmap)))
|
||||
|
||||
ancs = ancestor.commonancestorsheads(parentfunc, revmap[a], revmap[b])
|
||||
return map(nodemap.__getitem__, ancs)
|
||||
@ -414,7 +413,7 @@ class remotefilelog(object):
|
||||
parentsmap = collections.defaultdict(list)
|
||||
allparents = set()
|
||||
for mapping in (amap, bmap):
|
||||
for node, pdata in mapping.iteritems():
|
||||
for node, pdata in pycompat.iteritems(mapping):
|
||||
parents = parentsmap[node]
|
||||
p1, p2, linknode, copyfrom = pdata
|
||||
# Don't follow renames (copyfrom).
|
||||
@ -430,7 +429,7 @@ class remotefilelog(object):
|
||||
parentrevs = collections.defaultdict(list)
|
||||
revmap = {}
|
||||
queue = collections.deque(
|
||||
((None, n) for n in parentsmap.iterkeys() if n not in allparents)
|
||||
((None, n) for n in pycompat.iterkeys(parentsmap) if n not in allparents)
|
||||
)
|
||||
while queue:
|
||||
prevrev, current = queue.pop()
|
||||
|
@ -496,7 +496,7 @@ def getpack(repo, proto, args, version=1):
|
||||
starttime = time.time()
|
||||
|
||||
# Sort the files by name, so we provide deterministic results
|
||||
for filename, nodes in sorted(files.iteritems()):
|
||||
for filename, nodes in sorted(pycompat.iteritems(files)):
|
||||
args.append([filename, [hex(n) for n in nodes]])
|
||||
fl = repo.file(filename)
|
||||
|
||||
|
@ -170,7 +170,7 @@ class shallowcg1packer(changegroup.cg1packer):
|
||||
if filestosend is not NoFiles:
|
||||
mflog = repo.manifestlog
|
||||
with progress.bar(repo.ui, _("manifests"), total=len(mfs)) as prog:
|
||||
for mfnode, clnode in mfs.iteritems():
|
||||
for mfnode, clnode in pycompat.iteritems(mfs):
|
||||
prog.value += 1
|
||||
if filestosend == LocalFiles and not containslocalfiles(
|
||||
mfnode
|
||||
@ -190,7 +190,7 @@ class shallowcg1packer(changegroup.cg1packer):
|
||||
p1node = tmfl[mfnode].parents[0]
|
||||
p1ctx = tmfl[p1node]
|
||||
|
||||
diff = p1ctx.read().diff(mfctx.read()).iteritems()
|
||||
diff = pycompat.iteritems(p1ctx.read().diff(mfctx.read()))
|
||||
for filename, ((anode, aflag), (bnode, bflag)) in diff:
|
||||
if bnode is not None:
|
||||
fclnodes = fnodes.setdefault(filename, {})
|
||||
@ -252,7 +252,7 @@ class shallowcg1packer(changegroup.cg1packer):
|
||||
linkrevnodes = linknodes(filerevlog, fname)
|
||||
# Normally we'd prune the linkrevnodes first,
|
||||
# but that would perform the server fetches one by one.
|
||||
for fnode, cnode in list(linkrevnodes.iteritems()):
|
||||
for fnode, cnode in list(pycompat.iteritems(linkrevnodes)):
|
||||
# Adjust linknodes so remote file revisions aren't sent
|
||||
if filestosend == LocalFiles:
|
||||
if phasecache.phase(
|
||||
|
@ -140,7 +140,7 @@ def sumdicts(*dicts):
|
||||
"""
|
||||
result = defaultdict(lambda: 0)
|
||||
for dict in dicts:
|
||||
for k, v in dict.iteritems():
|
||||
for k, v in pycompat.iteritems(dict):
|
||||
result[k] += v
|
||||
return result
|
||||
|
||||
@ -148,7 +148,7 @@ def sumdicts(*dicts):
|
||||
def prefixkeys(dict, prefix):
|
||||
"""Returns ``dict`` with ``prefix`` prepended to all its keys."""
|
||||
result = {}
|
||||
for k, v in dict.iteritems():
|
||||
for k, v in pycompat.iteritems(dict):
|
||||
result[prefix + k] = v
|
||||
return result
|
||||
|
||||
@ -197,7 +197,7 @@ def _buildpackmeta(metadict):
|
||||
length limit is exceeded
|
||||
"""
|
||||
metabuf = ""
|
||||
for k, v in sorted((metadict or {}).iteritems()):
|
||||
for k, v in sorted(pycompat.iteritems((metadict or {}))):
|
||||
if len(k) != 1:
|
||||
raise error.ProgrammingError("packmeta: illegal key: %s" % k)
|
||||
if len(v) > 0xFFFE:
|
||||
@ -227,7 +227,7 @@ def buildpackmeta(metadict):
|
||||
and METAKEYFLAG will be dropped if its value is 0.
|
||||
"""
|
||||
newmeta = {}
|
||||
for k, v in (metadict or {}).iteritems():
|
||||
for k, v in pycompat.iteritems((metadict or {})):
|
||||
expectedtype = _metaitemtypes.get(k, (bytes,))
|
||||
if not isinstance(v, expectedtype):
|
||||
raise error.ProgrammingError("packmeta: wrong type of key %s" % k)
|
||||
@ -248,7 +248,7 @@ def parsepackmeta(metabuf):
|
||||
integers.
|
||||
"""
|
||||
metadict = _parsepackmeta(metabuf)
|
||||
for k, v in metadict.iteritems():
|
||||
for k, v in pycompat.iteritems(metadict):
|
||||
if k in _metaitemtypes and int in _metaitemtypes[k]:
|
||||
metadict[k] = bin2int(v)
|
||||
return metadict
|
||||
|
@ -45,6 +45,7 @@ from edenscm.mercurial import (
|
||||
mutation,
|
||||
namespaces,
|
||||
obsutil,
|
||||
pycompat,
|
||||
registrar,
|
||||
repair,
|
||||
repoview,
|
||||
@ -255,7 +256,7 @@ def _trypullremotebookmark(mayberemotebookmark, repo, ui):
|
||||
_("`%s` not found: assuming it is a remote bookmark " "and trying to pull it\n")
|
||||
% mayberemotebookmark
|
||||
)
|
||||
sourcerenames = dict((v, k) for k, v in _getrenames(ui).iteritems())
|
||||
sourcerenames = dict((v, k) for k, v in pycompat.iteritems(_getrenames(ui)))
|
||||
remote, bookmarkname = splitremotename(mayberemotebookmark)
|
||||
paths = dict((path, url) for path, url in ui.configitems("paths"))
|
||||
if remote in sourcerenames:
|
||||
@ -314,12 +315,12 @@ def updateaccessedbookmarks(repo, remotepath, bookmarks):
|
||||
newbookmarks[rname] = node
|
||||
|
||||
nodemap = repo.unfiltered().changelog.nodemap
|
||||
for rname, node in bookmarks.iteritems():
|
||||
for rname, node in pycompat.iteritems(bookmarks):
|
||||
# if the node is known locally, update the old value or add new
|
||||
if bin(node) in nodemap:
|
||||
newbookmarks[rname] = node
|
||||
|
||||
for rname, node in newbookmarks.iteritems():
|
||||
for rname, node in pycompat.iteritems(newbookmarks):
|
||||
totalaccessednames += 1
|
||||
_writesingleremotename(f, remotepath, "bookmarks", rname, node)
|
||||
|
||||
@ -710,7 +711,7 @@ class lazyremotenamedict(Mapping):
|
||||
self._load()
|
||||
if resolvenodes is None:
|
||||
resolvenodes = self._repo.ui.configbool("remotenames", "resolvenodes")
|
||||
for k, vtup in self.potentialentries.iteritems():
|
||||
for k, vtup in pycompat.iteritems(self.potentialentries):
|
||||
if resolvenodes:
|
||||
self._fetchandcache(k)
|
||||
yield (k, [bin(vtup[0])])
|
||||
@ -751,7 +752,7 @@ class remotenames(dict):
|
||||
# Only supported for bookmarks
|
||||
bmchanges = changes.get("bookmarks", {})
|
||||
remotepathbooks = {}
|
||||
for remotename, node in bmchanges.iteritems():
|
||||
for remotename, node in pycompat.iteritems(bmchanges):
|
||||
path, name = splitremotename(remotename)
|
||||
remotepathbooks.setdefault(path, {})[name] = node
|
||||
|
||||
@ -764,7 +765,7 @@ class remotenames(dict):
|
||||
if not self._node2marks:
|
||||
mark2nodes = self.mark2nodes()
|
||||
self._node2marks = {}
|
||||
for name, node in mark2nodes.iteritems():
|
||||
for name, node in pycompat.iteritems(mark2nodes):
|
||||
self._node2marks.setdefault(node[0], []).append(name)
|
||||
return self._node2marks
|
||||
|
||||
@ -773,7 +774,7 @@ class remotenames(dict):
|
||||
mark2nodes = self.mark2nodes()
|
||||
self._hoist2nodes = {}
|
||||
hoist += "/"
|
||||
for name, node in mark2nodes.iteritems():
|
||||
for name, node in pycompat.iteritems(mark2nodes):
|
||||
if name.startswith(hoist):
|
||||
name = name[len(hoist) :]
|
||||
self._hoist2nodes[name] = node
|
||||
@ -784,7 +785,7 @@ class remotenames(dict):
|
||||
mark2nodes = self.mark2nodes()
|
||||
self._node2hoists = {}
|
||||
hoist += "/"
|
||||
for name, node in mark2nodes.iteritems():
|
||||
for name, node in pycompat.iteritems(mark2nodes):
|
||||
if name.startswith(hoist):
|
||||
name = name[len(hoist) :]
|
||||
self._node2hoists.setdefault(node[0], []).append(name)
|
||||
@ -797,7 +798,7 @@ class remotenames(dict):
|
||||
if not self._node2branch:
|
||||
branch2nodes = self.branch2nodes()
|
||||
self._node2branch = {}
|
||||
for name, nodes in branch2nodes.iteritems():
|
||||
for name, nodes in pycompat.iteritems(branch2nodes):
|
||||
for node in nodes:
|
||||
self._node2branch[node] = [name]
|
||||
return self._node2branch
|
||||
@ -1255,7 +1256,7 @@ def _pushrevs(repo, ui, rev):
|
||||
|
||||
|
||||
def expullcmd(orig, ui, repo, source="default", **opts):
|
||||
revrenames = dict((v, k) for k, v in _getrenames(ui).iteritems())
|
||||
revrenames = dict((v, k) for k, v in pycompat.iteritems(_getrenames(ui)))
|
||||
source = revrenames.get(source, source)
|
||||
|
||||
if opts.get("update") and opts.get("rebase"):
|
||||
@ -1328,7 +1329,7 @@ def expushcmd(orig, ui, repo, dest=None, **opts):
|
||||
paths = dict((path, url) for path, url in ui.configitems("paths"))
|
||||
# XXX T58629567: The following line triggers an infinite loop in pyre, let's disable it for now.
|
||||
if not typing.TYPE_CHECKING:
|
||||
revrenames = dict((v, k) for k, v in _getrenames(ui).iteritems())
|
||||
revrenames = dict((v, k) for k, v in pycompat.iteritems(_getrenames(ui)))
|
||||
|
||||
origdest = dest
|
||||
defaultpush = ui.paths.get("default-push") or ui.paths.get("default")
|
||||
@ -1477,7 +1478,7 @@ def _readtracking(repo):
|
||||
def _writetracking(repo, tracking):
|
||||
with repo.wlock():
|
||||
data = ""
|
||||
for book, track in tracking.iteritems():
|
||||
for book, track in pycompat.iteritems(tracking):
|
||||
data += "%s %s\n" % (book, track)
|
||||
vfs = repo.sharedvfs
|
||||
vfs.write("bookmarks.tracking", data)
|
||||
@ -1588,7 +1589,7 @@ def displaylocalbookmarks(ui, repo, opts, fm):
|
||||
distances = readdistancecache(repo)
|
||||
nq = not ui.quiet
|
||||
|
||||
for bmark, n in sorted(marks.iteritems()):
|
||||
for bmark, n in sorted(pycompat.iteritems(marks)):
|
||||
current = repo._activebookmark
|
||||
if bmark == current:
|
||||
prefix, label = "*", "bookmarks.current bookmarks.active"
|
||||
@ -1679,7 +1680,7 @@ def _getremotepeer(ui, repo, opts):
|
||||
|
||||
def _showfetchedbookmarks(ui, remote, bookmarks, opts, fm):
|
||||
remotepath = activepath(ui, remote)
|
||||
for bmark, n in sorted(bookmarks.iteritems()):
|
||||
for bmark, n in sorted(pycompat.iteritems(bookmarks)):
|
||||
fm.startitem()
|
||||
if not ui.quiet:
|
||||
fm.plain(" ")
|
||||
@ -1783,7 +1784,9 @@ def _getrenames(ui):
|
||||
|
||||
def expandscheme(ui, uri):
|
||||
"""For a given uri, expand the scheme for it"""
|
||||
urischemes = [s for s in schemes.schemes.iterkeys() if uri.startswith("%s://" % s)]
|
||||
urischemes = [
|
||||
s for s in pycompat.iterkeys(schemes.schemes) if uri.startswith("%s://" % s)
|
||||
]
|
||||
for s in urischemes:
|
||||
# TODO: refactor schemes so we don't
|
||||
# duplicate this logic
|
||||
@ -1956,9 +1959,9 @@ def saveremotenames(repo, remotebookmarks, override=True):
|
||||
|
||||
journal = []
|
||||
nm = repo.unfiltered().changelog.nodemap
|
||||
for remote, rmbookmarks in remotebookmarks.iteritems():
|
||||
for remote, rmbookmarks in pycompat.iteritems(remotebookmarks):
|
||||
rmbookmarks = {} if rmbookmarks is None else rmbookmarks
|
||||
for name, node in rmbookmarks.iteritems():
|
||||
for name, node in pycompat.iteritems(rmbookmarks):
|
||||
oldnode = oldbooks.get((remote, name), hex(nullid))
|
||||
newnode = node
|
||||
if not bin(newnode) in nm:
|
||||
@ -2016,7 +2019,7 @@ def writedistancecache(repo, distance):
|
||||
try:
|
||||
cachevfs = shareawarecachevfs(repo)
|
||||
f = cachevfs("distance", "w", atomictemp=True)
|
||||
for k, v in distance.iteritems():
|
||||
for k, v in pycompat.iteritems(distance):
|
||||
f.write("%s %d %d\n" % (k, v[0], v[1]))
|
||||
except (IOError, OSError):
|
||||
pass
|
||||
@ -2093,7 +2096,7 @@ def precachedistance(repo):
|
||||
distances = {}
|
||||
if repo.ui.configbool("remotenames", "precachedistance"):
|
||||
distances = {}
|
||||
for bmark, tracked in _readtracking(repo).iteritems():
|
||||
for bmark, tracked in pycompat.iteritems(_readtracking(repo)):
|
||||
distance = calculatenamedistance(repo, bmark, tracked)
|
||||
if distance != (None, None):
|
||||
distances[bmark] = distance
|
||||
|
@ -190,7 +190,7 @@ def _moveto(repo, bookmark, ctx, clean=False):
|
||||
diff = m1.diff(m2)
|
||||
|
||||
changedfiles = []
|
||||
changedfiles.extend(diff.iterkeys())
|
||||
changedfiles.extend(pycompat.iterkeys(diff))
|
||||
|
||||
dirstate = repo.dirstate
|
||||
dirchanges = [f for f in dirstate if dirstate[f] != "n"]
|
||||
|
@ -23,7 +23,7 @@ import sys
|
||||
import time
|
||||
import traceback
|
||||
|
||||
from edenscm.mercurial import registrar, util
|
||||
from edenscm.mercurial import pycompat, registrar, util
|
||||
|
||||
|
||||
pathformat = "/tmp/trace-%(pid)s-%(time)s.log"
|
||||
@ -40,7 +40,7 @@ configitem("sigtrace", "memsignal", default="USR2")
|
||||
|
||||
def printstacks(sig, currentframe):
|
||||
content = ""
|
||||
for tid, frame in sys._current_frames().iteritems():
|
||||
for tid, frame in pycompat.iteritems(sys._current_frames()):
|
||||
content += "Thread %s:\n%s\n" % (tid, util.smarttraceback(frame))
|
||||
|
||||
path = pathformat % {"time": time.time(), "pid": os.getpid()}
|
||||
|
@ -181,7 +181,8 @@ class pathcopiesserializer(jsonserializer):
|
||||
@classmethod
|
||||
def serialize(cls, copydict):
|
||||
encoded = dict(
|
||||
(k.encode("base64"), v.encode("base64")) for (k, v) in copydict.iteritems()
|
||||
(k.encode("base64"), v.encode("base64"))
|
||||
for (k, v) in pycompat.iteritems(copydict)
|
||||
)
|
||||
return super(pathcopiesserializer, cls).serialize(encoded)
|
||||
|
||||
@ -189,7 +190,8 @@ class pathcopiesserializer(jsonserializer):
|
||||
def deserialize(cls, string):
|
||||
encoded = super(pathcopiesserializer, cls).deserialize(string)
|
||||
return dict(
|
||||
(k.decode("base64"), v.decode("base64")) for k, v in encoded.iteritems()
|
||||
(k.decode("base64"), v.decode("base64"))
|
||||
for k, v in pycompat.iteritems(encoded)
|
||||
)
|
||||
|
||||
|
||||
|
@ -89,7 +89,7 @@ def _drawendinglines(orig, lines, extra, edgemap, seen):
|
||||
# undo the wrapfunction
|
||||
extensions.unwrapfunction(graphmod, "_drawendinglines", _drawendinglines)
|
||||
# restore the space to '|'
|
||||
for k, v in edgemap.iteritems():
|
||||
for k, v in pycompat.iteritems(edgemap):
|
||||
if v == " ":
|
||||
edgemap[k] = "|"
|
||||
orig(lines, extra, edgemap, seen)
|
||||
@ -125,7 +125,7 @@ def uisetup(ui):
|
||||
# these are very hacky but it seems to work well and it seems there
|
||||
# is no other easy choice for now.
|
||||
edgemap = state["edges"]
|
||||
for k in edgemap.iterkeys():
|
||||
for k in pycompat.iterkeys(edgemap):
|
||||
edgemap[k] = " "
|
||||
# also we need to hack _drawendinglines to draw the missing '|'s:
|
||||
# (before) (after)
|
||||
|
@ -5,7 +5,7 @@
|
||||
|
||||
# bundleparts.py - utilies to pack/unpack the snapshot metadata into bundles
|
||||
|
||||
from edenscm.mercurial import bundle2, error
|
||||
from edenscm.mercurial import bundle2, error, pycompat
|
||||
from edenscm.mercurial.i18n import _
|
||||
from edenscm.mercurial.utils import cborutil
|
||||
|
||||
@ -112,7 +112,7 @@ def binarydecode(stream):
|
||||
raise error.Abort(_("invalid bundlepart stream"))
|
||||
try:
|
||||
for section in ("metadatafiles", "auxfiles"):
|
||||
for oid, content in bundlepartdict[section].iteritems():
|
||||
for oid, content in pycompat.iteritems(bundlepartdict[section]):
|
||||
yield oid, content
|
||||
except (KeyError, ValueError):
|
||||
raise error.Abort(_("invalid bundlepart dict: %s") % (bundlepartdict,))
|
||||
|
@ -287,7 +287,7 @@ def _setupupdates(ui):
|
||||
sparsematch = repo.sparsematch(mctx.rev())
|
||||
|
||||
temporaryfiles = []
|
||||
for file, action in actions.iteritems():
|
||||
for file, action in pycompat.iteritems(actions):
|
||||
type, args, msg = action
|
||||
files.add(file)
|
||||
if sparsematch(file):
|
||||
@ -2358,7 +2358,7 @@ def _refresh(ui, repo, origstatus, origsparsematch, force):
|
||||
)
|
||||
|
||||
# Check for files that were only in the dirstate.
|
||||
for file, state in dirstate.iteritems():
|
||||
for file, state in pycompat.iteritems(dirstate):
|
||||
if not file in files:
|
||||
old = origsparsematch(file)
|
||||
new = sparsematch(file)
|
||||
@ -2371,7 +2371,7 @@ def _refresh(ui, repo, origstatus, origsparsematch, force):
|
||||
typeactions = dict((m, []) for m in "a f g am cd dc r dm dg m e k p pr".split())
|
||||
|
||||
with progress.bar(ui, _("applying"), total=len(actions)) as prog:
|
||||
for f, (m, args, msg) in actions.iteritems():
|
||||
for f, (m, args, msg) in pycompat.iteritems(actions):
|
||||
prog.value += 1
|
||||
if m not in typeactions:
|
||||
typeactions[m] = []
|
||||
|
@ -1156,7 +1156,7 @@ class treemanifestctx(object):
|
||||
raise NotImplemented("native trees don't support shallow " "readdelta yet")
|
||||
else:
|
||||
md = _buildtree(self._manifestlog)
|
||||
for f, ((n1, fl1), (n2, fl2)) in parentmf.diff(mf).iteritems():
|
||||
for f, ((n1, fl1), (n2, fl2)) in pycompat.iteritems(parentmf.diff(mf)):
|
||||
if n2:
|
||||
md[f] = n2
|
||||
if fl2:
|
||||
@ -1522,7 +1522,7 @@ def _converttotree(tr, mfl, tmfl, mfctx, linkrev=None, torevlog=False):
|
||||
def _difftoaddremove(diff):
|
||||
added = []
|
||||
removed = []
|
||||
for filename, (old, new) in diff.iteritems():
|
||||
for filename, (old, new) in pycompat.iteritems(diff):
|
||||
if new is not None and new[0] is not None:
|
||||
added.append((filename, new[0], new[1]))
|
||||
else:
|
||||
@ -1556,7 +1556,7 @@ def _getflatdiff(mfl, mfctx):
|
||||
diff = mfl[p1node].read().diff(mfctx.read())
|
||||
deletes = []
|
||||
adds = []
|
||||
for filename, ((anode, aflag), (bnode, bflag)) in diff.iteritems():
|
||||
for filename, ((anode, aflag), (bnode, bflag)) in pycompat.iteritems(diff):
|
||||
if bnode is None:
|
||||
deletes.append(filename)
|
||||
else:
|
||||
|
@ -305,7 +305,8 @@ def _logcommand(repo, tr, command):
|
||||
def _logbookmarks(repo, tr):
|
||||
revstring = "\n".join(
|
||||
sorted(
|
||||
"%s %s" % (name, hex(node)) for name, node in repo._bookmarks.iteritems()
|
||||
"%s %s" % (name, hex(node))
|
||||
for name, node in pycompat.iteritems(repo._bookmarks)
|
||||
)
|
||||
)
|
||||
return writelog(repo, tr, "bookmarks.i", revstring)
|
||||
@ -1019,7 +1020,7 @@ def _undoto(ui, repo, reverseindex, keep=False, branch=None):
|
||||
|
||||
# copy implementation for bookmarks
|
||||
itercopy = []
|
||||
for mark in repo._bookmarks.iteritems():
|
||||
for mark in pycompat.iteritems(repo._bookmarks):
|
||||
itercopy.append(mark)
|
||||
bmremove = []
|
||||
for mark in itercopy:
|
||||
@ -1051,13 +1052,13 @@ def _undoto(ui, repo, reverseindex, keep=False, branch=None):
|
||||
predctxmanifest = predctx.manifest()
|
||||
dirstate = repo.dirstate
|
||||
diff = predctxmanifest.diff(wctxmanifest)
|
||||
changedfiles.extend(diff.iterkeys())
|
||||
changedfiles.extend(pycompat.iterkeys(diff))
|
||||
|
||||
with dirstate.parentchange():
|
||||
dirstate.rebuild(prednode, predctxmanifest, changedfiles)
|
||||
# we want added and removed files to be shown
|
||||
# properly, not with ? and ! prefixes
|
||||
for filename, data in diff.iteritems():
|
||||
for filename, data in pycompat.iteritems(diff):
|
||||
if data[0][0] is None:
|
||||
dirstate.add(filename)
|
||||
if data[1][0] is None:
|
||||
|
@ -50,7 +50,7 @@ from __future__ import absolute_import
|
||||
|
||||
import re
|
||||
|
||||
from edenscm.mercurial import registrar, util
|
||||
from edenscm.mercurial import pycompat, registrar, util
|
||||
from edenscm.mercurial.i18n import _
|
||||
from edenscm.mercurial.node import short
|
||||
from edenscm.mercurial.pycompat import range
|
||||
@ -209,7 +209,7 @@ def forbidcr(ui, repo, hooktype, node, **kwargs):
|
||||
def reposetup(ui, repo):
|
||||
if not repo.local():
|
||||
return
|
||||
for name, fn in _filters.iteritems():
|
||||
for name, fn in pycompat.iteritems(_filters):
|
||||
repo.adddatafilter(name, fn)
|
||||
|
||||
|
||||
|
@ -20,7 +20,15 @@ import time
|
||||
import zipfile
|
||||
import zlib
|
||||
|
||||
from . import error, formatter, match as matchmod, progress, util, vfs as vfsmod
|
||||
from . import (
|
||||
error,
|
||||
formatter,
|
||||
match as matchmod,
|
||||
progress,
|
||||
pycompat,
|
||||
util,
|
||||
vfs as vfsmod,
|
||||
)
|
||||
from .i18n import _
|
||||
|
||||
|
||||
@ -68,7 +76,7 @@ exts = {
|
||||
|
||||
|
||||
def guesskind(dest):
|
||||
for kind, extensions in exts.iteritems():
|
||||
for kind, extensions in pycompat.iteritems(exts):
|
||||
if any(dest.endswith(ext) for ext in extensions):
|
||||
return kind
|
||||
return None
|
||||
|
@ -172,7 +172,7 @@ class bmstore(dict):
|
||||
self._aclean = True
|
||||
|
||||
def _write(self, fp):
|
||||
for name, node in self.iteritems():
|
||||
for name, node in pycompat.iteritems(self):
|
||||
fp.write("%s %s\n" % (hex(node), encoding.fromlocal(name)))
|
||||
self._clean = True
|
||||
self._repo.invalidatevolatilesets()
|
||||
@ -346,7 +346,7 @@ def headsforactive(repo):
|
||||
raise ValueError("headsforactive() only makes sense with an active bookmark")
|
||||
name = repo._activebookmark.split("@", 1)[0]
|
||||
heads = []
|
||||
for mark, n in repo._bookmarks.iteritems():
|
||||
for mark, n in pycompat.iteritems(repo._bookmarks):
|
||||
if mark.split("@", 1)[0] == name:
|
||||
heads.append(n)
|
||||
return heads
|
||||
@ -403,7 +403,7 @@ def listbinbookmarks(repo):
|
||||
marks = getattr(repo, "_bookmarks", {})
|
||||
|
||||
hasnode = repo.changelog.hasnode
|
||||
for k, v in marks.iteritems():
|
||||
for k, v in pycompat.iteritems(marks):
|
||||
# don't expose local divergent bookmarks
|
||||
if hasnode(v) and ("@" not in k or k.endswith("@")):
|
||||
yield k, v
|
||||
@ -907,7 +907,7 @@ def _printbookmarks(ui, repo, bmarks, **opts):
|
||||
hexfn = fm.hexfunc
|
||||
if len(bmarks) == 0 and fm.isplain():
|
||||
ui.status(_("no bookmarks set\n"))
|
||||
for bmark, (n, prefix, label) in sorted(bmarks.iteritems()):
|
||||
for bmark, (n, prefix, label) in sorted(pycompat.iteritems(bmarks)):
|
||||
fm.startitem()
|
||||
if not ui.quiet:
|
||||
fm.plain(" %s " % prefix, label=label)
|
||||
@ -933,7 +933,7 @@ def printbookmarks(ui, repo, **opts):
|
||||
"""
|
||||
marks = repo._bookmarks
|
||||
bmarks = {}
|
||||
for bmark, n in sorted(marks.iteritems()):
|
||||
for bmark, n in sorted(pycompat.iteritems(marks)):
|
||||
active = repo._activebookmark
|
||||
if bmark == active:
|
||||
prefix, label = "*", activebookmarklabel
|
||||
@ -975,7 +975,7 @@ def reachablerevs(repo, bookmarks):
|
||||
# both bookmarks we are deleting and other bookmarks.
|
||||
othernodes = [
|
||||
node
|
||||
for bookmark, node in repobookmarks.iteritems()
|
||||
for bookmark, node in pycompat.iteritems(repobookmarks)
|
||||
if bookmark not in bookmarks
|
||||
]
|
||||
|
||||
|
@ -12,7 +12,7 @@
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
from . import scmutil
|
||||
from . import pycompat, scmutil
|
||||
from .node import nullid, nullrev
|
||||
|
||||
|
||||
@ -112,7 +112,7 @@ class branchcache(dict):
|
||||
return heads
|
||||
|
||||
def iterbranches(self):
|
||||
for bn, heads in self.iteritems():
|
||||
for bn, heads in pycompat.iteritems(self):
|
||||
yield (bn, heads) + self._branchtip(heads)
|
||||
|
||||
def copy(self):
|
||||
|
@ -357,7 +357,7 @@ class cg1unpacker(object):
|
||||
mfnode = cl.changelogrevision(cset).manifest
|
||||
mfest = ml[mfnode].readnew()
|
||||
# store file cgnodes we must see
|
||||
for f, n in mfest.iteritems():
|
||||
for f, n in pycompat.iteritems(mfest):
|
||||
needfiles.setdefault(f, set()).add(n)
|
||||
|
||||
# process the files
|
||||
@ -1080,7 +1080,7 @@ def _addchangegroupfiles(repo, source, revmap, trp, expectedfiles, needfiles):
|
||||
if not needs:
|
||||
del needfiles[f]
|
||||
|
||||
for f, needs in needfiles.iteritems():
|
||||
for f, needs in pycompat.iteritems(needfiles):
|
||||
fl = repo.file(f)
|
||||
for n in needs:
|
||||
try:
|
||||
|
@ -439,7 +439,7 @@ def dorecord(ui, repo, commitfunc, cmdsuggest, backupall, filterfn, *pats, **opt
|
||||
# 5. finally restore backed-up files
|
||||
try:
|
||||
dirstate = repo.dirstate
|
||||
for realname, tmpname in backups.iteritems():
|
||||
for realname, tmpname in pycompat.iteritems(backups):
|
||||
ui.debug("restoring %r to %r\n" % (tmpname, realname))
|
||||
|
||||
if dirstate[realname] == "n":
|
||||
@ -1890,7 +1890,7 @@ class changeset_printer(object):
|
||||
if branch != "default":
|
||||
self.ui.write(columns["branch"] % branch, label="log.branch")
|
||||
|
||||
for nsname, ns in self.repo.names.iteritems():
|
||||
for nsname, ns in pycompat.iteritems(self.repo.names):
|
||||
# branches has special logic already handled above, so here we just
|
||||
# skip it
|
||||
if nsname == "branches":
|
||||
@ -2828,7 +2828,7 @@ def _makelogrevset(repo, pats, opts, revs):
|
||||
filematcher = lambda rev: match
|
||||
|
||||
expr = []
|
||||
for op, val in sorted(opts.iteritems()):
|
||||
for op, val in sorted(pycompat.iteritems(opts)):
|
||||
if not val:
|
||||
continue
|
||||
if op not in opt2revset:
|
||||
|
@ -1953,7 +1953,7 @@ def copy(ui, repo, *pats, **opts):
|
||||
@command("debugcommands", [], _("[COMMAND]"), norepo=True)
|
||||
def debugcommands(ui, cmd="", *args):
|
||||
"""list all available commands and options"""
|
||||
for cmd, vals in sorted(table.iteritems()):
|
||||
for cmd, vals in sorted(pycompat.iteritems(table)):
|
||||
cmd = cmd.split("|")[0].strip("^")
|
||||
opts = ", ".join([i[1] for i in vals[1]])
|
||||
ui.write("%s: %s\n" % (cmd, opts))
|
||||
@ -3472,7 +3472,7 @@ def identify(
|
||||
hexremoterev = hex(remoterev)
|
||||
bms = [
|
||||
bm
|
||||
for bm, bmr in peer.listkeys("bookmarks").iteritems()
|
||||
for bm, bmr in pycompat.iteritems(peer.listkeys("bookmarks"))
|
||||
if bmr == hexremoterev
|
||||
]
|
||||
|
||||
@ -4539,10 +4539,12 @@ def paths(ui, repo, search=None, **opts):
|
||||
ui.pager("paths")
|
||||
if search:
|
||||
pathitems = [
|
||||
(name, path) for name, path in ui.paths.iteritems() if name == search
|
||||
(name, path)
|
||||
for name, path in pycompat.iteritems(ui.paths)
|
||||
if name == search
|
||||
]
|
||||
else:
|
||||
pathitems = sorted(ui.paths.iteritems())
|
||||
pathitems = sorted(pycompat.iteritems(ui.paths))
|
||||
|
||||
fm = ui.formatter("paths", opts)
|
||||
if fm.isplain():
|
||||
@ -5985,7 +5987,7 @@ def summary(ui, repo, **opts):
|
||||
|
||||
c = repo.dirstate.copies()
|
||||
copied, renamed = [], []
|
||||
for d, s in c.iteritems():
|
||||
for d, s in pycompat.iteritems(c):
|
||||
if s in status.removed:
|
||||
status.removed.remove(s)
|
||||
renamed.append(d)
|
||||
|
@ -443,7 +443,7 @@ def debugcapabilities(ui, path, **opts):
|
||||
b2caps = bundle2.bundle2caps(peer)
|
||||
if b2caps:
|
||||
ui.write(("Bundle2 capabilities:\n"))
|
||||
for key, values in sorted(b2caps.iteritems()):
|
||||
for key, values in sorted(pycompat.iteritems(b2caps)):
|
||||
ui.write((" %s\n") % key)
|
||||
for v in values:
|
||||
ui.write((" %s\n") % v)
|
||||
@ -870,7 +870,7 @@ def debugstate(ui, repo, **opts):
|
||||
ui.write("\n")
|
||||
return
|
||||
|
||||
for path, dirstate_tuple in sorted(repo.dirstate._map._map.iteritems()):
|
||||
for path, dirstate_tuple in sorted(pycompat.iteritems(repo.dirstate._map._map)):
|
||||
status, mode, merge_state = dirstate_tuple
|
||||
if mode & 0o20000:
|
||||
display_mode = "lnk"
|
||||
@ -891,7 +891,7 @@ def debugstate(ui, repo, **opts):
|
||||
keyfunc = None # sort by filename
|
||||
ds = repo.dirstate
|
||||
dmap = ds._map
|
||||
for path, ent in sorted(dmap.iteritems(), key=keyfunc):
|
||||
for path, ent in sorted(pycompat.iteritems(dmap), key=keyfunc):
|
||||
if ent[3] == -1:
|
||||
timestr = "unset "
|
||||
elif nodates:
|
||||
@ -1902,7 +1902,7 @@ def debugnamecomplete(ui, repo, *args):
|
||||
names = set()
|
||||
# since we previously only listed open branches, we will handle that
|
||||
# specially (after this for loop)
|
||||
for name, ns in repo.names.iteritems():
|
||||
for name, ns in pycompat.iteritems(repo.names):
|
||||
if name != "branches":
|
||||
names.update(ns.listnames(repo))
|
||||
names.update(
|
||||
@ -2139,7 +2139,7 @@ def debugpathcomplete(ui, repo, *specs, **opts):
|
||||
def complete(spec, acceptable, matches, fullpaths):
|
||||
addmatch = matches.add
|
||||
speclen = len(spec)
|
||||
for f, st in repo.dirstate.iteritems():
|
||||
for f, st in pycompat.iteritems(repo.dirstate):
|
||||
if f.startswith(spec) and st[0] in acceptable:
|
||||
if fullpaths:
|
||||
addmatch(f)
|
||||
@ -2373,7 +2373,7 @@ def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
|
||||
ui.status(str(r) + "\n")
|
||||
return not r
|
||||
else:
|
||||
for k, v in sorted(target.listkeys(namespace).iteritems()):
|
||||
for k, v in sorted(pycompat.iteritems(target.listkeys(namespace))):
|
||||
ui.write("%s\t%s\n" % (util.escapestr(k), util.escapestr(v)))
|
||||
|
||||
|
||||
@ -3140,7 +3140,7 @@ def debugwireargs(ui, repopath, *vals, **opts):
|
||||
for opt in cmdutil.remoteopts:
|
||||
del opts[opt[1]]
|
||||
args = {}
|
||||
for k, v in opts.iteritems():
|
||||
for k, v in pycompat.iteritems(opts):
|
||||
if v:
|
||||
args[k] = v
|
||||
args = pycompat.strkwargs(args)
|
||||
@ -3325,7 +3325,7 @@ def debugcheckcasecollisions(ui, repo, *testfiles, **opts):
|
||||
res = 1
|
||||
else:
|
||||
seen = set()
|
||||
for mfnf in ctx.manifest().iterkeys():
|
||||
for mfnf in pycompat.iterkeys(ctx.manifest()):
|
||||
for mfn in [mfnf] + list(util.finddirs(mfnf)):
|
||||
if mfn in seen:
|
||||
continue
|
||||
@ -3361,7 +3361,7 @@ def debugexistingcasecollisions(ui, repo, *basepaths, **opts):
|
||||
dirlistmap = {}
|
||||
for entry in dirlist:
|
||||
dirlistmap.setdefault(entry.lower(), []).append(entry)
|
||||
for _lowername, entries in sorted(dirlistmap.iteritems()):
|
||||
for _lowername, entries in sorted(pycompat.iteritems(dirlistmap)):
|
||||
if len(entries) > 1:
|
||||
ui.write(
|
||||
_("%s contains collisions: %s\n")
|
||||
@ -3444,7 +3444,7 @@ def debugreadauthforuri(ui, _repo, uri, user=None):
|
||||
auth = httpconnection.readauthforuri(ui, uri, user)
|
||||
if auth is not None:
|
||||
auth, items = auth
|
||||
for k, v in sorted(items.iteritems()):
|
||||
for k, v in sorted(pycompat.iteritems(items)):
|
||||
ui.write(("auth.%s.%s=%s\n") % (auth, k, v))
|
||||
else:
|
||||
ui.warn(_("no match found\n"))
|
||||
|
@ -5,7 +5,7 @@
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
from .. import scmutil, util
|
||||
from .. import pycompat, scmutil, util
|
||||
from ..i18n import _
|
||||
from . import debug
|
||||
from .cmdtable import command
|
||||
@ -40,7 +40,7 @@ def debugdirs(ui, repo, *dirs, **opts):
|
||||
else:
|
||||
candidates = {d.strip("/"): d for d in dirs}
|
||||
matches = set()
|
||||
for f in ctx.manifest().iterkeys():
|
||||
for f in pycompat.iterkeys(ctx.manifest()):
|
||||
for p in util.finddirs(f):
|
||||
if p in candidates:
|
||||
matches.add(candidates.pop(p))
|
||||
|
@ -125,7 +125,7 @@ def commitfilteredctx(repo, ctx, match, allowempty):
|
||||
|
||||
# Filter copies
|
||||
copied = copies.pathcopies(base, ctx)
|
||||
copied = dict((dst, src) for dst, src in copied.iteritems() if dst in files)
|
||||
copied = dict((dst, src) for dst, src in pycompat.iteritems(copied) if dst in files)
|
||||
|
||||
def filectxfn(repo, memctx, path, contentctx=ctx, redirect=()):
|
||||
if path not in contentctx:
|
||||
@ -210,9 +210,11 @@ def fixdirstate(repo, oldctx, newctx, status):
|
||||
if src:
|
||||
oldcopies[f] = src[0]
|
||||
oldcopies.update(copies)
|
||||
copies = dict((dst, oldcopies.get(src, src)) for dst, src in oldcopies.iteritems())
|
||||
copies = dict(
|
||||
(dst, oldcopies.get(src, src)) for dst, src in pycompat.iteritems(oldcopies)
|
||||
)
|
||||
# Adjust the dirstate copies
|
||||
for dst, src in copies.iteritems():
|
||||
for dst, src in pycompat.iteritems(copies):
|
||||
if src not in newctx or dst in newctx or ds[dst] != "a":
|
||||
src = None
|
||||
ds.copy(src, dst)
|
||||
|
@ -85,7 +85,7 @@ class config(object):
|
||||
return sorted(self._data.keys())
|
||||
|
||||
def items(self, section):
|
||||
return list(self._data.get(section, {}).iteritems())
|
||||
return list(self._data.get(section, {}).items())
|
||||
|
||||
def set(self, section, item, value, source=""):
|
||||
if sys.version_info[0] >= 3:
|
||||
|
@ -9,7 +9,7 @@ from __future__ import absolute_import
|
||||
|
||||
import time
|
||||
|
||||
from . import extensions, sshpeer, util
|
||||
from . import extensions, pycompat, sshpeer, util
|
||||
|
||||
|
||||
class connectionpool(object):
|
||||
@ -77,7 +77,7 @@ class connectionpool(object):
|
||||
return conn
|
||||
|
||||
def close(self):
|
||||
for pathpool in self._pool.itervalues():
|
||||
for pathpool in pycompat.itervalues(self._pool):
|
||||
for conn in pathpool:
|
||||
conn.close()
|
||||
del pathpool[:]
|
||||
|
@ -148,7 +148,7 @@ class basectx(object):
|
||||
deleted, unknown, ignored = s.deleted, s.unknown, s.ignored
|
||||
deletedset = set(deleted)
|
||||
d = mf1.diff(mf2, matcher=match)
|
||||
for fn, value in d.iteritems():
|
||||
for fn, value in pycompat.iteritems(d):
|
||||
if listclean:
|
||||
cleanset.discard(fn)
|
||||
if fn in deletedset:
|
||||
|
@ -16,7 +16,7 @@ import collections
|
||||
import heapq
|
||||
import os
|
||||
|
||||
from . import match as matchmod, node, pathutil, scmutil, util
|
||||
from . import match as matchmod, node, pathutil, pycompat, scmutil, util
|
||||
from .i18n import _
|
||||
|
||||
|
||||
@ -109,7 +109,7 @@ def _findlimit(repo, a, b):
|
||||
def _chain(src, dst, a, b):
|
||||
"""chain two sets of copies a->b"""
|
||||
t = a.copy()
|
||||
for k, v in b.iteritems():
|
||||
for k, v in pycompat.iteritems(b):
|
||||
if v in t:
|
||||
# found a chain
|
||||
if t[v] != k:
|
||||
@ -220,7 +220,7 @@ def _backwardrenames(a, b):
|
||||
# arbitrarily pick one of the renames.
|
||||
f = _forwardcopies(b, a)
|
||||
r = {}
|
||||
for k, v in sorted(f.iteritems()):
|
||||
for k, v in sorted(pycompat.iteritems(f)):
|
||||
# remove copies
|
||||
if v in a:
|
||||
continue
|
||||
@ -607,7 +607,7 @@ def _fullcopytracing(repo, c1, c2, base):
|
||||
|
||||
# examine each file copy for a potential directory move, which is
|
||||
# when all the files in a directory are moved to a new directory
|
||||
for dst, src in fullcopy.iteritems():
|
||||
for dst, src in pycompat.iteritems(fullcopy):
|
||||
dsrc, ddst = pathutil.dirname(src), pathutil.dirname(dst)
|
||||
if dsrc in invalid:
|
||||
# already seen to be uninteresting
|
||||
@ -705,7 +705,7 @@ def _heuristicscopytracing(repo, c1, c2, base):
|
||||
ctx = ctx.p1()
|
||||
|
||||
cp = _forwardcopies(base, c2)
|
||||
for dst, src in cp.iteritems():
|
||||
for dst, src in pycompat.iteritems(cp):
|
||||
if src in m1:
|
||||
copies[dst] = src
|
||||
|
||||
@ -909,7 +909,7 @@ def duplicatecopies(repo, wctx, rev, fromrev, skiprev=None):
|
||||
# of the function is much faster (and is required for carrying copy
|
||||
# metadata across the rebase anyway).
|
||||
exclude = pathcopies(repo[fromrev], repo[skiprev])
|
||||
for dst, src in pathcopies(repo[fromrev], repo[rev]).iteritems():
|
||||
for dst, src in pycompat.iteritems(pathcopies(repo[fromrev], repo[rev])):
|
||||
# copies.pathcopies returns backward renames, so dst might not
|
||||
# actually be in the dirstate
|
||||
if dst in exclude:
|
||||
|
@ -318,7 +318,7 @@ class dirstate(object):
|
||||
return iter(sorted(self._map))
|
||||
|
||||
def items(self):
|
||||
return self._map.iteritems()
|
||||
return pycompat.iteritems(self._map)
|
||||
|
||||
iteritems = items
|
||||
|
||||
@ -774,7 +774,7 @@ class dirstate(object):
|
||||
def _writedirstate(self, st):
|
||||
# notify callbacks about parents change
|
||||
if self._origpl is not None and self._origpl != self._pl:
|
||||
for c, callback in sorted(self._plchangecallbacks.iteritems()):
|
||||
for c, callback in sorted(pycompat.iteritems(self._plchangecallbacks)):
|
||||
callback(self, self._origpl, self._pl)
|
||||
# if the first parent has changed then consider this a new checkout
|
||||
if self._origpl[0] != self._pl[0]:
|
||||
@ -791,7 +791,7 @@ class dirstate(object):
|
||||
delaywrite = self._ui.configint("debug", "dirstate.delaywrite")
|
||||
if delaywrite > 0:
|
||||
# do we have any files to delay for?
|
||||
for f, e in self._map.iteritems():
|
||||
for f, e in pycompat.iteritems(self._map):
|
||||
if e[0] == "n" and e[3] == now:
|
||||
import time # to avoid useless import
|
||||
|
||||
@ -1290,7 +1290,7 @@ class dirstatemap(object):
|
||||
util.clearcachedproperty(self, "otherparentset")
|
||||
|
||||
def iteritems(self):
|
||||
return self._map.iteritems()
|
||||
return pycompat.iteritems(self._map)
|
||||
|
||||
def __len__(self):
|
||||
return len(self._map)
|
||||
@ -1387,7 +1387,7 @@ class dirstatemap(object):
|
||||
except AttributeError:
|
||||
nonnorm = set()
|
||||
otherparent = set()
|
||||
for fname, e in self._map.iteritems():
|
||||
for fname, e in pycompat.iteritems(self._map):
|
||||
if e[0] != "n" or e[3] == -1:
|
||||
nonnorm.add(fname)
|
||||
if e[0] == "n" and e[2] == -2:
|
||||
@ -1408,7 +1408,7 @@ class dirstatemap(object):
|
||||
|
||||
f = {}
|
||||
normcase = util.normcase
|
||||
for name, s in self._map.iteritems():
|
||||
for name, s in pycompat.iteritems(self._map):
|
||||
if s[0] != "r":
|
||||
f[normcase(name)] = name
|
||||
f["."] = "." # prevents useless util.fspath() invocation
|
||||
|
@ -14,7 +14,7 @@ from __future__ import absolute_import
|
||||
|
||||
import functools
|
||||
|
||||
from . import bookmarks, branchmap, phases, setdiscovery, treediscovery, util
|
||||
from . import bookmarks, branchmap, phases, pycompat, setdiscovery, treediscovery, util
|
||||
from .node import hex, nullid
|
||||
|
||||
|
||||
@ -221,7 +221,7 @@ def _headssummary(pushop):
|
||||
|
||||
# A. register remote heads
|
||||
remotebranches = set()
|
||||
for branch, heads in remote.branchmap().iteritems():
|
||||
for branch, heads in pycompat.iteritems(remote.branchmap()):
|
||||
remotebranches.add(branch)
|
||||
known = []
|
||||
unsynced = []
|
||||
@ -249,13 +249,13 @@ def _headssummary(pushop):
|
||||
# This will possibly add new heads and remove existing ones.
|
||||
newmap = branchmap.branchcache(
|
||||
(branch, heads[1])
|
||||
for branch, heads in headssum.iteritems()
|
||||
for branch, heads in pycompat.iteritems(headssum)
|
||||
if heads[0] is not None
|
||||
)
|
||||
newmap.update(repo, (ctx.rev() for ctx in missingctx))
|
||||
for branch, newheads in newmap.iteritems():
|
||||
for branch, newheads in pycompat.iteritems(newmap):
|
||||
headssum[branch][1][:] = newheads
|
||||
for branch, items in headssum.iteritems():
|
||||
for branch, items in pycompat.iteritems(headssum):
|
||||
for l in items:
|
||||
if l is not None:
|
||||
l.sort()
|
||||
@ -267,7 +267,7 @@ def _headssummary(pushop):
|
||||
futureheads = set(torev(h) for h in outgoing.missingheads)
|
||||
futureheads |= set(torev(h) for h in outgoing.commonheads)
|
||||
allfuturecommon = repo.changelog.ancestors(futureheads, inclusive=True)
|
||||
for branch, heads in sorted(headssum.iteritems()):
|
||||
for branch, heads in sorted(pycompat.iteritems(headssum)):
|
||||
remoteheads, newheads, unsyncedheads, placeholder = heads
|
||||
result = _postprocessobsolete(pushop, allfuturecommon, newheads)
|
||||
headssum[branch] = (
|
||||
|
@ -17,6 +17,7 @@ from . import (
|
||||
match as matchmod,
|
||||
perftrace,
|
||||
policy,
|
||||
pycompat,
|
||||
scmutil,
|
||||
util,
|
||||
)
|
||||
@ -69,9 +70,9 @@ class eden_dirstate(dirstate.dirstate):
|
||||
# allowed. Or if it is, it should be through a separate, explicit
|
||||
# codepath.
|
||||
#
|
||||
# We do provide edeniteritems() for users to iterate through only the
|
||||
# We do provide pycompat.iteritems(ede) for users to iterate through only the
|
||||
# files explicitly tracked in the eden dirstate.
|
||||
raise NotImplementedError("eden_dirstate.iteritems()")
|
||||
raise NotImplementedError("pycompat.iteritems(eden_dirstate)")
|
||||
|
||||
def dirs(self): # override
|
||||
raise NotImplementedError("eden_dirstate.dirs()")
|
||||
@ -83,7 +84,7 @@ class eden_dirstate(dirstate.dirstate):
|
||||
This includes non-normal files (e.g., files marked for addition or
|
||||
removal), as well as normal files that have merge state information.
|
||||
"""
|
||||
return self._map._map.iteritems()
|
||||
return pycompat.iteritems(self._map._map)
|
||||
|
||||
def _p1_ctx(self):
|
||||
"""Return the context object for the first parent commit."""
|
||||
|
@ -5,7 +5,7 @@
|
||||
|
||||
"""Eden implementation for the dirstate filesystem class."""
|
||||
|
||||
from . import filesystem, perftrace, util
|
||||
from . import filesystem, perftrace, pycompat, util
|
||||
from .EdenThriftClient import ScmFileStatus
|
||||
|
||||
|
||||
@ -25,7 +25,7 @@ class eden_filesystem(filesystem.physicalfilesystem):
|
||||
ADDED = ScmFileStatus.ADDED
|
||||
IGNORED = ScmFileStatus.IGNORED
|
||||
|
||||
for path, code in edenstatus.iteritems():
|
||||
for path, code in pycompat.iteritems(edenstatus):
|
||||
if not match(path):
|
||||
continue
|
||||
|
||||
|
@ -42,7 +42,7 @@ class eden_dirstate_map(dirstate.dirstatemap):
|
||||
# Remove all "clean" entries before writing. (It's possible we should
|
||||
# never allow these to be inserted into self._map in the first place.)
|
||||
to_remove = []
|
||||
for path, v in self._map.iteritems():
|
||||
for path, v in pycompat.iteritems(self._map):
|
||||
if v[0] == "n" and v[2] == MERGE_STATE_NOT_APPLICABLE:
|
||||
to_remove.append(path)
|
||||
for path in to_remove:
|
||||
@ -85,7 +85,9 @@ class eden_dirstate_map(dirstate.dirstatemap):
|
||||
self.copymap = copymap
|
||||
|
||||
def iteritems(self):
|
||||
raise RuntimeError("Should not invoke iteritems() on eden_dirstate_map!")
|
||||
raise RuntimeError(
|
||||
"Should not pycompat.iteritems(invoke) on eden_dirstate_map!"
|
||||
)
|
||||
|
||||
def __len__(self):
|
||||
raise RuntimeError("Should not invoke __len__ on eden_dirstate_map!")
|
||||
@ -140,7 +142,7 @@ class eden_dirstate_map(dirstate.dirstatemap):
|
||||
# type() -> Tuple[Set[str], Set[str]]
|
||||
nonnorm = set()
|
||||
otherparent = set()
|
||||
for path, entry in self._map.iteritems():
|
||||
for path, entry in pycompat.iteritems(self._map):
|
||||
if entry[0] != "n":
|
||||
nonnorm.add(path)
|
||||
elif entry[2] == MERGE_STATE_OTHER_PARENT:
|
||||
|
@ -9,7 +9,14 @@ This overrides the dirstate to check with the eden daemon for modifications,
|
||||
instead of doing a normal scan of the filesystem.
|
||||
"""
|
||||
|
||||
from . import EdenThriftClient as thrift, error, localrepo, merge as mergemod, util
|
||||
from . import (
|
||||
EdenThriftClient as thrift,
|
||||
error,
|
||||
localrepo,
|
||||
merge as mergemod,
|
||||
pycompat,
|
||||
util,
|
||||
)
|
||||
from .i18n import _
|
||||
|
||||
|
||||
@ -235,7 +242,7 @@ def _determine_actions_for_conflicts(repo, src, conflicts):
|
||||
def _check_actions_and_raise_if_there_are_conflicts(actions):
|
||||
# In stock Hg, update() performs this check once it gets the set of actions.
|
||||
conflict_paths = []
|
||||
for action_type, list_of_tuples in actions.iteritems():
|
||||
for action_type, list_of_tuples in pycompat.iteritems(actions):
|
||||
if len(list_of_tuples) == 0:
|
||||
continue # Note `actions` defaults to [] for all keys.
|
||||
if action_type not in ("g", "k", "e", "r", "pr"):
|
||||
|
@ -1006,7 +1006,7 @@ def _getbundlesendvars(pushop, bundler):
|
||||
|
||||
part = bundler.newpart("pushvars")
|
||||
|
||||
for key, value in shellvars.iteritems():
|
||||
for key, value in pycompat.iteritems(shellvars):
|
||||
part.addparam(key, value, mandatory=False)
|
||||
|
||||
|
||||
|
@ -121,7 +121,7 @@ def find(name):
|
||||
try:
|
||||
mod = _extensions[name]
|
||||
except KeyError:
|
||||
for k, v in _extensions.iteritems():
|
||||
for k, v in pycompat.iteritems(_extensions):
|
||||
if k.endswith("." + name) or k.endswith("/" + name):
|
||||
mod = v
|
||||
break
|
||||
@ -258,7 +258,7 @@ _cmdfuncattrs = ("norepo", "optionalrepo", "inferrepo")
|
||||
|
||||
def _validatecmdtable(ui, cmdtable):
|
||||
"""Check if extension commands have required attributes"""
|
||||
for c, e in cmdtable.iteritems():
|
||||
for c, e in pycompat.iteritems(cmdtable):
|
||||
f = e[0]
|
||||
if getattr(f, "_deprecatedregistrar", False):
|
||||
ui.deprecwarn(
|
||||
@ -601,7 +601,7 @@ def wrapcommand(table, command, wrapper, synopsis=None, docstring=None):
|
||||
'''
|
||||
assert callable(wrapper)
|
||||
aliases, entry = cmdutil.findcmd(command, table)
|
||||
for alias, e in table.iteritems():
|
||||
for alias, e in pycompat.iteritems(table):
|
||||
if e is entry:
|
||||
key = alias
|
||||
break
|
||||
@ -772,7 +772,7 @@ def _disabledpaths(strip_init=False):
|
||||
if name in exts or name in _order or name == "__init__":
|
||||
continue
|
||||
exts[name] = path
|
||||
for name, path in _disabledextensions.iteritems():
|
||||
for name, path in pycompat.iteritems(_disabledextensions):
|
||||
# If no path was provided for a disabled extension (e.g. "color=!"),
|
||||
# don't replace the path we already found by the scan above.
|
||||
if path:
|
||||
@ -836,7 +836,7 @@ def disabled():
|
||||
|
||||
return dict(
|
||||
(name, gettext(desc))
|
||||
for name, desc in __index__.docs.iteritems()
|
||||
for name, desc in pycompat.iteritems(__index__.docs)
|
||||
if name not in _order and name not in _blacklist
|
||||
)
|
||||
except (ImportError, AttributeError):
|
||||
@ -847,7 +847,7 @@ def disabled():
|
||||
return {}
|
||||
|
||||
exts = {}
|
||||
for name, path in paths.iteritems():
|
||||
for name, path in pycompat.iteritems(paths):
|
||||
doc = _disabledhelp(path)
|
||||
if doc and name not in _blacklist:
|
||||
exts[name] = doc.splitlines()[0]
|
||||
@ -910,7 +910,7 @@ def disabledcmd(ui, cmd):
|
||||
ext = findcmd(cmd, cmd, path)
|
||||
if not ext:
|
||||
# otherwise, interrogate each extension until there's a match
|
||||
for name, path in paths.iteritems():
|
||||
for name, path in pycompat.iteritems(paths):
|
||||
ext = findcmd(cmd, name, path)
|
||||
if ext:
|
||||
break
|
||||
@ -934,7 +934,7 @@ def enabled(shortname=True):
|
||||
|
||||
def notloaded():
|
||||
"""return short names of extensions that failed to load"""
|
||||
return [name for name, mod in _extensions.iteritems() if mod is None]
|
||||
return [name for name, mod in pycompat.iteritems(_extensions) if mod is None]
|
||||
|
||||
|
||||
def moduleversion(module):
|
||||
|
@ -251,7 +251,7 @@ class _nestedformatter(baseformatter):
|
||||
def _iteritems(data):
|
||||
"""iterate key-value pairs in stable order"""
|
||||
if isinstance(data, dict):
|
||||
return sorted(data.iteritems())
|
||||
return sorted(pycompat.iteritems(data))
|
||||
return data
|
||||
|
||||
|
||||
|
@ -57,7 +57,7 @@ def listexts(header, exts, indent=1, showdeprecated=False):
|
||||
"""return a text listing of the given extensions"""
|
||||
rst = []
|
||||
if exts:
|
||||
for name, desc in sorted(exts.iteritems()):
|
||||
for name, desc in sorted(pycompat.iteritems(exts)):
|
||||
if not showdeprecated and any(w in desc for w in _exclkeywords):
|
||||
continue
|
||||
rst.append("%s:%s: %s\n" % (" " * indent, name, desc))
|
||||
@ -154,7 +154,7 @@ def topicmatch(ui, commands, kw):
|
||||
or (callable(doc) and lowercontains(doc(ui)))
|
||||
):
|
||||
results["topics"].append((names[0], header))
|
||||
for cmd, entry in commands.table.iteritems():
|
||||
for cmd, entry in pycompat.iteritems(commands.table):
|
||||
if len(entry) == 3:
|
||||
summary = entry[2]
|
||||
else:
|
||||
@ -170,7 +170,8 @@ def topicmatch(ui, commands, kw):
|
||||
continue
|
||||
results["commands"].append((cmdname, summary))
|
||||
for name, docs in itertools.chain(
|
||||
extensions.enabled(False).iteritems(), extensions.disabled().iteritems()
|
||||
pycompat.iteritems(extensions.enabled(False)),
|
||||
pycompat.iteritems(extensions.disabled()),
|
||||
):
|
||||
if not docs:
|
||||
continue
|
||||
@ -183,7 +184,7 @@ def topicmatch(ui, commands, kw):
|
||||
except ImportError:
|
||||
# debug message would be printed in extensions.load()
|
||||
continue
|
||||
for cmd, entry in getattr(mod, "cmdtable", {}).iteritems():
|
||||
for cmd, entry in pycompat.iteritems(getattr(mod, "cmdtable", {})):
|
||||
if kw in cmd or (len(entry) > 2 and lowercontains(entry[2])):
|
||||
cmdname = cmd.partition("|")[0].lstrip("^")
|
||||
cmddoc = pycompat.getdoc(entry[0])
|
||||
@ -403,7 +404,7 @@ class _helpdispatch(object):
|
||||
self.opts = opts
|
||||
|
||||
self.commandindex = {}
|
||||
for name, cmd in commands.table.iteritems():
|
||||
for name, cmd in pycompat.iteritems(commands.table):
|
||||
for n in name.lstrip("^").split("|"):
|
||||
self.commandindex[n] = cmd
|
||||
|
||||
@ -611,7 +612,7 @@ class _helpdispatch(object):
|
||||
def helplist(self, name, select=None, **opts):
|
||||
h = {}
|
||||
cmds = {}
|
||||
for c, e in self.commands.table.iteritems():
|
||||
for c, e in pycompat.iteritems(self.commands.table):
|
||||
if select and not select(c):
|
||||
continue
|
||||
f = c.lstrip("^").partition("|")[0]
|
||||
|
@ -146,7 +146,7 @@ class requestcontext(object):
|
||||
|
||||
def archivelist(self, nodeid):
|
||||
allowed = self.configlist("web", "allow_archive")
|
||||
for typ, spec in self.archivespecs.iteritems():
|
||||
for typ, spec in pycompat.iteritems(self.archivespecs):
|
||||
if typ in allowed or self.configbool("web", "allow%s" % typ):
|
||||
yield {"type": typ, "extension": spec[2], "node": nodeid}
|
||||
|
||||
@ -411,7 +411,7 @@ class hgweb(object):
|
||||
|
||||
if cmd == "archive":
|
||||
fn = req.form["node"][0]
|
||||
for type_, spec in rctx.archivespecs.iteritems():
|
||||
for type_, spec in pycompat.iteritems(rctx.archivespecs):
|
||||
ext = spec[2]
|
||||
if fn.endswith(ext):
|
||||
req.form["node"] = [fn[: -len(ext)]]
|
||||
|
@ -316,7 +316,7 @@ class hgwebdir(object):
|
||||
def archivelist(ui, nodeid, url):
|
||||
allowed = ui.configlist("web", "allow_archive", untrusted=True)
|
||||
archives = []
|
||||
for typ, spec in hgweb_mod.archivespecs.iteritems():
|
||||
for typ, spec in pycompat.iteritems(hgweb_mod.archivespecs):
|
||||
if typ in allowed or ui.configbool(
|
||||
"web", "allow" + typ, untrusted=True
|
||||
):
|
||||
|
@ -47,7 +47,7 @@ def normalize(form):
|
||||
form[name] = value
|
||||
del form[k]
|
||||
# And strip the values
|
||||
for k, v in form.iteritems():
|
||||
for k, v in pycompat.iteritems(form):
|
||||
form[k] = [i.strip() for i in v]
|
||||
return form
|
||||
|
||||
|
@ -543,7 +543,7 @@ def manifest(web, req, tmpl):
|
||||
l = len(path)
|
||||
abspath = "/" + path
|
||||
|
||||
for full, n in mf.iteritems():
|
||||
for full, n in pycompat.iteritems(mf):
|
||||
# the virtual path (working copy path) used for the full
|
||||
# (repository) path
|
||||
f = decodepath(full)
|
||||
@ -1416,7 +1416,7 @@ def help(web, req, tmpl):
|
||||
|
||||
early, other = [], []
|
||||
primary = lambda s: s.partition("|")[0]
|
||||
for c, e in commands.table.iteritems():
|
||||
for c, e in pycompat.iteritems(commands.table):
|
||||
doc = _getdoc(e)
|
||||
if "DEPRECATED" in doc or c.startswith("debug"):
|
||||
continue
|
||||
|
@ -653,7 +653,7 @@ class sessionvars(object):
|
||||
|
||||
def __iter__(self):
|
||||
separator = self.start
|
||||
for key, value in sorted(self.vars.iteritems()):
|
||||
for key, value in sorted(pycompat.iteritems(self.vars)):
|
||||
yield {
|
||||
"name": key,
|
||||
"value": pycompat.bytestr(value),
|
||||
|
@ -23,7 +23,7 @@ def launch(application):
|
||||
util.setbinary(util.stdin)
|
||||
util.setbinary(util.stdout)
|
||||
|
||||
environ = dict(encoding.environ.iteritems())
|
||||
environ = dict(encoding.environ.items())
|
||||
environ.setdefault(r"PATH_INFO", "")
|
||||
if environ.get(r"SERVER_SOFTWARE", r"").startswith(r"Microsoft-IIS"):
|
||||
# IIS includes script_name in PATH_INFO
|
||||
|
@ -9,7 +9,7 @@ from __future__ import absolute_import
|
||||
|
||||
import os
|
||||
|
||||
from . import rcutil, util
|
||||
from . import pycompat, rcutil, util
|
||||
from .i18n import _
|
||||
|
||||
|
||||
@ -38,7 +38,7 @@ triggered = set()
|
||||
|
||||
|
||||
def loadhint(ui, extname, registrarobj):
|
||||
for name, func in registrarobj._table.iteritems():
|
||||
for name, func in pycompat.iteritems(registrarobj._table):
|
||||
hinttable[name] = func
|
||||
|
||||
|
||||
|
@ -137,13 +137,17 @@ def _exthook(ui, repo, htype, name, cmd, args, throw):
|
||||
env["HG_HOOKTYPE"] = htype
|
||||
env["HG_HOOKNAME"] = name
|
||||
|
||||
for k, v in args.iteritems():
|
||||
for k, v in pycompat.iteritems(args):
|
||||
if callable(v):
|
||||
v = v()
|
||||
if isinstance(v, dict):
|
||||
# make the dictionary element order stable across Python
|
||||
# implementations
|
||||
v = "{" + ", ".join("%r: %r" % i for i in sorted(v.iteritems())) + "}"
|
||||
v = (
|
||||
"{"
|
||||
+ ", ".join("%r: %r" % i for i in sorted(pycompat.iteritems(v)))
|
||||
+ "}"
|
||||
)
|
||||
env["HG_" + k.upper()] = v
|
||||
|
||||
if repo:
|
||||
|
@ -19,7 +19,7 @@ import logging
|
||||
import os
|
||||
import socket
|
||||
|
||||
from . import httpclient, sslutil, urllibcompat, util
|
||||
from . import httpclient, pycompat, sslutil, urllibcompat, util
|
||||
from .i18n import _
|
||||
|
||||
|
||||
@ -82,7 +82,7 @@ def readauthforuri(ui, uri, user):
|
||||
gdict[setting] = val
|
||||
|
||||
filtered = {}
|
||||
for group, auth in sorted(groups.iteritems()):
|
||||
for group, auth in sorted(pycompat.iteritems(groups)):
|
||||
ok = True
|
||||
for key in ("cert", "key"):
|
||||
val = auth.get(key)
|
||||
@ -101,7 +101,7 @@ def readauthforuri(ui, uri, user):
|
||||
bestlen = 0
|
||||
bestpriority = 0
|
||||
bestauth = None
|
||||
for group, auth in filtered.iteritems():
|
||||
for group, auth in pycompat.iteritems(filtered):
|
||||
if user and user != auth.get("username", user):
|
||||
# If a username was set in the URI, the entry username
|
||||
# must either match it or be unset
|
||||
|
@ -46,7 +46,7 @@ def dumps(obj, paranoid=True):
|
||||
elif util.safehasattr(obj, "keys"):
|
||||
out = [
|
||||
'"%s": %s' % (encoding.jsonescape(k, paranoid=paranoid), dumps(v, paranoid))
|
||||
for k, v in sorted(obj.iteritems())
|
||||
for k, v in sorted(pycompat.iteritems(obj))
|
||||
]
|
||||
return "{" + ", ".join(out) + "}"
|
||||
elif util.safehasattr(obj, "__iter__"):
|
||||
|
@ -194,7 +194,7 @@ class KeepAliveHandler(object):
|
||||
|
||||
def close_all(self):
|
||||
"""close all open connections"""
|
||||
for host, conns in self._cm.get_all().iteritems():
|
||||
for host, conns in pycompat.iteritems(self._cm.get_all()):
|
||||
for h in conns:
|
||||
self._cm.remove(h)
|
||||
h.close()
|
||||
|
@ -1108,7 +1108,7 @@ class localrepository(object):
|
||||
def nodebookmarks(self, node):
|
||||
"""return the list of bookmarks pointing to the specified node"""
|
||||
marks = []
|
||||
for bookmark, n in self._bookmarks.iteritems():
|
||||
for bookmark, n in pycompat.iteritems(self._bookmarks):
|
||||
if n == node:
|
||||
marks.append(bookmark)
|
||||
return sorted(marks)
|
||||
@ -1226,7 +1226,7 @@ class localrepository(object):
|
||||
mf = matchmod.match(self.root, "", [pat])
|
||||
fn = None
|
||||
params = cmd
|
||||
for name, filterfn in self._datafilters.iteritems():
|
||||
for name, filterfn in pycompat.iteritems(self._datafilters):
|
||||
if cmd.startswith(name):
|
||||
fn = filterfn
|
||||
params = cmd[len(name) :].lstrip()
|
||||
|
@ -133,7 +133,7 @@ def label(code):
|
||||
try:
|
||||
mname = _fn2mod[code.co_filename]
|
||||
except KeyError:
|
||||
for k, v in list(sys.modules.iteritems()):
|
||||
for k, v in list(sys.modules.items()):
|
||||
if v is None:
|
||||
continue
|
||||
if not isinstance(getattr(v, "__file__", None), str):
|
||||
|
@ -17,7 +17,7 @@ import itertools
|
||||
import os
|
||||
import struct
|
||||
|
||||
from . import error, mdiff, policy, revlog, util
|
||||
from . import error, mdiff, policy, pycompat, revlog, util
|
||||
from .i18n import _
|
||||
from .node import bin, hex
|
||||
|
||||
@ -405,10 +405,10 @@ class manifestdict(object):
|
||||
return self._lm.__iter__()
|
||||
|
||||
def iterkeys(self):
|
||||
return self._lm.iterkeys()
|
||||
return pycompat.iterkeys(self._lm)
|
||||
|
||||
def keys(self):
|
||||
return list(self.iterkeys())
|
||||
return list(self._lm.keys())
|
||||
|
||||
def filesnotin(self, m2, matcher=None):
|
||||
"""Set of files in this manifest that are not in the other"""
|
||||
@ -419,7 +419,7 @@ class manifestdict(object):
|
||||
diff = self.diff(m2)
|
||||
files = set(
|
||||
filepath
|
||||
for filepath, hashflags in diff.iteritems()
|
||||
for filepath, hashflags in pycompat.iteritems(diff)
|
||||
if hashflags[1][0] is None
|
||||
)
|
||||
return files
|
||||
@ -762,7 +762,7 @@ class treemanifest(object):
|
||||
if p in self._files:
|
||||
yield self._subpath(p), n
|
||||
else:
|
||||
for f, sn in n.iteritems():
|
||||
for f, sn in pycompat.iteritems(n):
|
||||
yield f, sn
|
||||
|
||||
iteritems = items
|
||||
@ -773,14 +773,14 @@ class treemanifest(object):
|
||||
if p in self._files:
|
||||
yield self._subpath(p)
|
||||
else:
|
||||
for f in self._dirs[p].iterkeys():
|
||||
for f in pycompat.iterkeys(self._dirs[p]):
|
||||
yield f
|
||||
|
||||
def keys(self):
|
||||
return list(self.iterkeys())
|
||||
|
||||
def __iter__(self):
|
||||
return self.iterkeys()
|
||||
return pycompat.iterkeys(self)
|
||||
|
||||
def __contains__(self, f):
|
||||
if f is None:
|
||||
@ -913,14 +913,14 @@ class treemanifest(object):
|
||||
return
|
||||
t1._load()
|
||||
t2._load()
|
||||
for d, m1 in t1._dirs.iteritems():
|
||||
for d, m1 in pycompat.iteritems(t1._dirs):
|
||||
if d in t2._dirs:
|
||||
m2 = t2._dirs[d]
|
||||
_filesnotin(m1, m2)
|
||||
else:
|
||||
files.update(m1.iterkeys())
|
||||
files.update(pycompat.iterkeys(m1))
|
||||
|
||||
for fn in t1._files.iterkeys():
|
||||
for fn in pycompat.iterkeys(t1._files):
|
||||
if fn not in t2._files:
|
||||
files.add(t1._subpath(fn))
|
||||
|
||||
@ -1015,7 +1015,7 @@ class treemanifest(object):
|
||||
if fn in self._flags:
|
||||
ret._flags[fn] = self._flags[fn]
|
||||
|
||||
for dir, subm in self._dirs.iteritems():
|
||||
for dir, subm in pycompat.iteritems(self._dirs):
|
||||
m = subm._matches(match)
|
||||
if not m._isempty():
|
||||
ret._dirs[dir] = m
|
||||
@ -1049,22 +1049,22 @@ class treemanifest(object):
|
||||
return
|
||||
t1._load()
|
||||
t2._load()
|
||||
for d, m1 in t1._dirs.iteritems():
|
||||
for d, m1 in pycompat.iteritems(t1._dirs):
|
||||
m2 = t2._dirs.get(d, emptytree)
|
||||
_diff(m1, m2)
|
||||
|
||||
for d, m2 in t2._dirs.iteritems():
|
||||
for d, m2 in pycompat.iteritems(t2._dirs):
|
||||
if d not in t1._dirs:
|
||||
_diff(emptytree, m2)
|
||||
|
||||
for fn, n1 in t1._files.iteritems():
|
||||
for fn, n1 in pycompat.iteritems(t1._files):
|
||||
fl1 = t1._flags.get(fn, "")
|
||||
n2 = t2._files.get(fn, None)
|
||||
fl2 = t2._flags.get(fn, "")
|
||||
if n1 != n2 or fl1 != fl2:
|
||||
result[t1._subpath(fn)] = ((n1, fl1), (n2, fl2))
|
||||
|
||||
for fn, n2 in t2._files.iteritems():
|
||||
for fn, n2 in pycompat.iteritems(t2._files):
|
||||
if fn not in t1._files:
|
||||
fl2 = t2._flags.get(fn, "")
|
||||
result[t2._subpath(fn)] = ((None, ""), (n2, fl2))
|
||||
@ -1122,7 +1122,7 @@ class treemanifest(object):
|
||||
m1._load()
|
||||
m2._load()
|
||||
emptytree = treemanifest()
|
||||
for d, subm in self._dirs.iteritems():
|
||||
for d, subm in pycompat.iteritems(self._dirs):
|
||||
subp1 = m1._dirs.get(d, emptytree)._node
|
||||
subp2 = m2._dirs.get(d, emptytree)._node
|
||||
if subp1 == revlog.nullid:
|
||||
@ -1141,7 +1141,7 @@ class treemanifest(object):
|
||||
yield self
|
||||
|
||||
self._load()
|
||||
for d, subm in self._dirs.iteritems():
|
||||
for d, subm in pycompat.iteritems(self._dirs):
|
||||
for subtree in subm.walksubtrees(matcher=matcher):
|
||||
yield subtree
|
||||
|
||||
@ -1636,7 +1636,7 @@ class treemanifestctx(object):
|
||||
m0 = self._manifestlog.get(self._dir, revlog.node(r0)).read()
|
||||
m1 = self.read()
|
||||
md = treemanifest(dir=self._dir)
|
||||
for f, ((n0, fl0), (n1, fl1)) in m0.diff(m1).iteritems():
|
||||
for f, ((n0, fl0), (n1, fl1)) in pycompat.iteritems(m0.diff(m1)):
|
||||
if n1:
|
||||
md[f] = n1
|
||||
if fl1:
|
||||
|
@ -19,7 +19,7 @@ import re
|
||||
# pyre-fixme[21]: Could not find `bindings`.
|
||||
from bindings import pathmatcher
|
||||
|
||||
from . import error, pathutil, util
|
||||
from . import error, pathutil, pycompat, util
|
||||
from .i18n import _
|
||||
|
||||
|
||||
@ -1620,7 +1620,7 @@ def readpatternfile(filepath, warn, sourceinfo=False):
|
||||
continue
|
||||
|
||||
linesyntax = syntax
|
||||
for s, rels in syntaxes.iteritems():
|
||||
for s, rels in pycompat.iteritems(syntaxes):
|
||||
if line.startswith(rels):
|
||||
linesyntax = rels
|
||||
line = line[len(rels) :]
|
||||
|
@ -395,7 +395,7 @@ class mergestate(object):
|
||||
# the type of state that is stored, and capital-letter records are used
|
||||
# to prevent older versions of Mercurial that do not support the feature
|
||||
# from loading them.
|
||||
for filename, v in self._state.iteritems():
|
||||
for filename, v in pycompat.iteritems(self._state):
|
||||
if v[0] == "d":
|
||||
# Driver-resolved merge. These are stored in 'D' records.
|
||||
records.append(("D", "\0".join([filename] + v)))
|
||||
@ -412,8 +412,10 @@ class mergestate(object):
|
||||
else:
|
||||
# Normal files. These are stored in 'F' records.
|
||||
records.append(("F", "\0".join([filename] + v)))
|
||||
for filename, extras in sorted(self._stateextras.iteritems()):
|
||||
rawextras = "\0".join("%s\0%s" % (k, v) for k, v in extras.iteritems())
|
||||
for filename, extras in sorted(pycompat.iteritems(self._stateextras)):
|
||||
rawextras = "\0".join(
|
||||
"%s\0%s" % (k, v) for k, v in pycompat.iteritems(extras)
|
||||
)
|
||||
records.append(("f", "%s\0%s" % (filename, rawextras)))
|
||||
if self._labels is not None:
|
||||
labels = "\0".join(self._labels)
|
||||
@ -510,7 +512,7 @@ class mergestate(object):
|
||||
def unresolved(self):
|
||||
"""Obtain the paths of unresolved files."""
|
||||
|
||||
for f, entry in self._state.iteritems():
|
||||
for f, entry in pycompat.iteritems(self._state):
|
||||
if entry[0] in ("u", "pu"):
|
||||
yield f
|
||||
|
||||
@ -652,7 +654,7 @@ class mergestate(object):
|
||||
"""return counts for updated, merged and removed files in this
|
||||
session"""
|
||||
updated, merged, removed = 0, 0, 0
|
||||
for r, action in self._results.itervalues():
|
||||
for r, action in pycompat.itervalues(self._results):
|
||||
if r is None:
|
||||
updated += 1
|
||||
elif r == 0:
|
||||
@ -669,7 +671,7 @@ class mergestate(object):
|
||||
def actions(self):
|
||||
"""return lists of actions to perform on the dirstate"""
|
||||
actions = {"r": [], "f": [], "a": [], "am": [], "g": []}
|
||||
for f, (r, action) in self._results.iteritems():
|
||||
for f, (r, action) in pycompat.iteritems(self._results):
|
||||
if action is not None:
|
||||
actions[action].append((f, None, "merge result"))
|
||||
return actions
|
||||
@ -808,7 +810,7 @@ def _checkunknownfiles(repo, wctx, mctx, force, actions, mergeforce):
|
||||
|
||||
checkunknowndirs = _unknowndirschecker()
|
||||
count = 0
|
||||
for f, (m, args, msg) in actions.iteritems():
|
||||
for f, (m, args, msg) in pycompat.iteritems(actions):
|
||||
if m in ("c", "dc"):
|
||||
count += 1
|
||||
if _checkunknownfile(repo, wctx, mctx, f):
|
||||
@ -828,7 +830,7 @@ def _checkunknownfiles(repo, wctx, mctx, force, actions, mergeforce):
|
||||
collectconflicts(ignoredconflicts, ignoredconfig)
|
||||
collectconflicts(unknownconflicts, unknownconfig)
|
||||
else:
|
||||
for f, (m, args, msg) in actions.iteritems():
|
||||
for f, (m, args, msg) in pycompat.iteritems(actions):
|
||||
if m == "cm":
|
||||
fl2, anc = args
|
||||
different = _checkunknownfile(repo, wctx, mctx, f)
|
||||
@ -887,7 +889,7 @@ def _checkunknownfiles(repo, wctx, mctx, force, actions, mergeforce):
|
||||
else:
|
||||
repo.ui.warn(_("%s: replacing untracked files in directory\n") % f)
|
||||
|
||||
for f, (m, args, msg) in actions.iteritems():
|
||||
for f, (m, args, msg) in pycompat.iteritems(actions):
|
||||
if m == "c":
|
||||
backup = (
|
||||
f in fileconflicts
|
||||
@ -1152,7 +1154,7 @@ def manifestmerge(
|
||||
relevantfiles = set(ma.diff(m2).keys())
|
||||
|
||||
# For copied and moved files, we need to add the source file too.
|
||||
for copykey, copyvalue in copy.iteritems():
|
||||
for copykey, copyvalue in pycompat.iteritems(copy):
|
||||
if copyvalue in relevantfiles:
|
||||
relevantfiles.add(copykey)
|
||||
for movedirkey in movewithdir:
|
||||
@ -1176,7 +1178,7 @@ def manifestmerge(
|
||||
elif sparsematch is not None and not forcefulldiff:
|
||||
if branchmerge:
|
||||
relevantfiles = set(ma.diff(m2).keys())
|
||||
for copykey, copyvalue in copy.iteritems():
|
||||
for copykey, copyvalue in pycompat.iteritems(copy):
|
||||
if copyvalue in relevantfiles:
|
||||
relevantfiles.add(copykey)
|
||||
for movedirkey in movewithdir:
|
||||
@ -1200,7 +1202,7 @@ def manifestmerge(
|
||||
matcher = matchmod.always("", "")
|
||||
|
||||
actions = {}
|
||||
for f, ((n1, fl1), (n2, fl2)) in diff.iteritems():
|
||||
for f, ((n1, fl1), (n2, fl2)) in pycompat.iteritems(diff):
|
||||
if n1 and n2: # file exists on both local and remote side
|
||||
if f not in ma:
|
||||
fa = copy.get(f, None)
|
||||
@ -1426,7 +1428,7 @@ def calculateupdates(
|
||||
if renamedelete is None or len(renamedelete) < len(renamedelete1):
|
||||
renamedelete = renamedelete1
|
||||
|
||||
for f, a in sorted(actions.iteritems()):
|
||||
for f, a in sorted(pycompat.iteritems(actions)):
|
||||
m, args, msg = a
|
||||
repo.ui.debug(" %s: %s -> %s\n" % (f, msg, m))
|
||||
if f in fbids:
|
||||
@ -1589,7 +1591,7 @@ def applyupdates(repo, actions, wctx, mctx, overwrite, labels=None, ancestors=No
|
||||
Return a tuple of counts (updated, merged, removed, unresolved) that
|
||||
describes how many files were affected by the update.
|
||||
"""
|
||||
perftrace.tracevalue("Actions", sum(len(v) for k, v in actions.iteritems()))
|
||||
perftrace.tracevalue("Actions", sum(len(v) for k, v in pycompat.iteritems(actions)))
|
||||
|
||||
updated, merged, removed = 0, 0, 0
|
||||
|
||||
@ -1881,7 +1883,7 @@ def applyupdates(repo, actions, wctx, mctx, overwrite, labels=None, ancestors=No
|
||||
]
|
||||
|
||||
mfiles = set(a[0] for a in actions["m"])
|
||||
for k, acts in extraactions.iteritems():
|
||||
for k, acts in pycompat.iteritems(extraactions):
|
||||
actions[k].extend(acts)
|
||||
# Remove these files from actions['m'] as well. This is
|
||||
# important because in recordupdates, files in actions['m'] are
|
||||
@ -2276,7 +2278,7 @@ def update(
|
||||
if updatecheck == "noconflict":
|
||||
paths = []
|
||||
cwd = repo.getcwd()
|
||||
for f, (m, args, msg) in actionbyfile.iteritems():
|
||||
for f, (m, args, msg) in pycompat.iteritems(actionbyfile):
|
||||
if m not in ("g", "k", "e", "r", "pr"):
|
||||
paths.append(repo.pathto(f, cwd))
|
||||
|
||||
@ -2293,7 +2295,7 @@ def update(
|
||||
|
||||
# Convert to dictionary-of-lists format
|
||||
actions = dict((m, []) for m in "a am f g cd dc r dm dg m e k p pr".split())
|
||||
for f, (m, args, msg) in actionbyfile.iteritems():
|
||||
for f, (m, args, msg) in pycompat.iteritems(actionbyfile):
|
||||
if m not in actions:
|
||||
actions[m] = []
|
||||
actions[m].append((f, args, msg))
|
||||
@ -2310,7 +2312,7 @@ def update(
|
||||
_checkcollision(repo, wc.manifest(), actions)
|
||||
|
||||
# divergent renames
|
||||
for f, fl in sorted(diverge.iteritems()):
|
||||
for f, fl in sorted(pycompat.iteritems(diverge)):
|
||||
repo.ui.warn(
|
||||
_("note: possible conflict - %s was renamed " "multiple times to:\n")
|
||||
% f
|
||||
@ -2319,7 +2321,7 @@ def update(
|
||||
repo.ui.warn(" %s\n" % nf)
|
||||
|
||||
# rename and delete
|
||||
for f, fl in sorted(renamedelete.iteritems()):
|
||||
for f, fl in sorted(pycompat.iteritems(renamedelete)):
|
||||
repo.ui.warn(
|
||||
_("note: possible conflict - %s was deleted " "and renamed to:\n") % f
|
||||
)
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user