mirror of
https://github.com/facebook/sapling.git
synced 2024-10-12 17:58:27 +03:00
f1c575a099
Summary: This check is useful and detects real errors (ex. fbconduit). Unfortunately `arc lint` will run it with both py2 and py3 so a lot of py2 builtins will still be warned. I didn't find a clean way to disable py3 check. So this diff tries to fix them. For `xrange`, the change was done by a script: ``` import sys import redbaron headertypes = {'comment', 'endl', 'from_import', 'import', 'string', 'assignment', 'atomtrailers'} xrangefix = '''try: xrange(0) except NameError: xrange = range ''' def isxrange(x): try: return x[0].value == 'xrange' except Exception: return False def main(argv): for i, path in enumerate(argv): print('(%d/%d) scanning %s' % (i + 1, len(argv), path)) content = open(path).read() try: red = redbaron.RedBaron(content) except Exception: print(' warning: failed to parse') continue hasxrange = red.find('atomtrailersnode', value=isxrange) hasxrangefix = 'xrange = range' in content if hasxrangefix or not hasxrange: print(' no need to change') continue # find a place to insert the compatibility statement changed = False for node in red: if node.type in headertypes: continue # node.insert_before is an easier API, but it has bugs changing # other "finally" and "except" positions. So do the insert # manually. # # node.insert_before(xrangefix) line = node.absolute_bounding_box.top_left.line - 1 lines = content.splitlines(1) content = ''.join(lines[:line]) + xrangefix + ''.join(lines[line:]) changed = True break if changed: # "content" is faster than "red.dumps()" open(path, 'w').write(content) print(' updated') if __name__ == "__main__": sys.exit(main(sys.argv[1:])) ``` For other py2 builtins that do not have a py3 equivalent, some `# noqa` were added as a workaround for now. Reviewed By: DurhamG Differential Revision: D6934535 fbshipit-source-id: 546b62830af144bc8b46788d2e0fd00496838939
165 lines
5.3 KiB
Python
165 lines
5.3 KiB
Python
# discovery.py - protocol changeset discovery functions
|
|
#
|
|
# Copyright 2010 Matt Mackall <mpm@selenic.com>
|
|
#
|
|
# This software may be used and distributed according to the terms of the
|
|
# GNU General Public License version 2 or any later version.
|
|
|
|
from __future__ import absolute_import
|
|
|
|
import collections
|
|
|
|
from .i18n import _
|
|
from .node import (
|
|
nullid,
|
|
short,
|
|
)
|
|
from . import (
|
|
error,
|
|
)
|
|
|
|
try:
|
|
xrange(0)
|
|
except NameError:
|
|
xrange = range
|
|
|
|
def findcommonincoming(repo, remote, heads=None, force=False):
|
|
"""Return a tuple (common, fetch, heads) used to identify the common
|
|
subset of nodes between repo and remote.
|
|
|
|
"common" is a list of (at least) the heads of the common subset.
|
|
"fetch" is a list of roots of the nodes that would be incoming, to be
|
|
supplied to changegroupsubset.
|
|
"heads" is either the supplied heads, or else the remote's heads.
|
|
"""
|
|
|
|
knownnode = repo.changelog.hasnode
|
|
search = []
|
|
fetch = set()
|
|
seen = set()
|
|
seenbranch = set()
|
|
base = set()
|
|
|
|
if not heads:
|
|
heads = remote.heads()
|
|
|
|
if repo.changelog.tip() == nullid:
|
|
base.add(nullid)
|
|
if heads != [nullid]:
|
|
return [nullid], [nullid], list(heads)
|
|
return [nullid], [], heads
|
|
|
|
# assume we're closer to the tip than the root
|
|
# and start by examining the heads
|
|
repo.ui.status(_("searching for changes\n"))
|
|
|
|
unknown = []
|
|
for h in heads:
|
|
if not knownnode(h):
|
|
unknown.append(h)
|
|
else:
|
|
base.add(h)
|
|
|
|
if not unknown:
|
|
return list(base), [], list(heads)
|
|
|
|
req = set(unknown)
|
|
reqcnt = 0
|
|
|
|
# search through remote branches
|
|
# a 'branch' here is a linear segment of history, with four parts:
|
|
# head, root, first parent, second parent
|
|
# (a branch always has two parents (or none) by definition)
|
|
unknown = collections.deque(remote.branches(unknown))
|
|
while unknown:
|
|
r = []
|
|
while unknown:
|
|
n = unknown.popleft()
|
|
if n[0] in seen:
|
|
continue
|
|
|
|
repo.ui.debug("examining %s:%s\n"
|
|
% (short(n[0]), short(n[1])))
|
|
if n[0] == nullid: # found the end of the branch
|
|
pass
|
|
elif n in seenbranch:
|
|
repo.ui.debug("branch already found\n")
|
|
continue
|
|
elif n[1] and knownnode(n[1]): # do we know the base?
|
|
repo.ui.debug("found incomplete branch %s:%s\n"
|
|
% (short(n[0]), short(n[1])))
|
|
search.append(n[0:2]) # schedule branch range for scanning
|
|
seenbranch.add(n)
|
|
else:
|
|
if n[1] not in seen and n[1] not in fetch:
|
|
if knownnode(n[2]) and knownnode(n[3]):
|
|
repo.ui.debug("found new changeset %s\n" %
|
|
short(n[1]))
|
|
fetch.add(n[1]) # earliest unknown
|
|
for p in n[2:4]:
|
|
if knownnode(p):
|
|
base.add(p) # latest known
|
|
|
|
for p in n[2:4]:
|
|
if p not in req and not knownnode(p):
|
|
r.append(p)
|
|
req.add(p)
|
|
seen.add(n[0])
|
|
|
|
if r:
|
|
reqcnt += 1
|
|
repo.ui.progress(_('searching'), reqcnt, unit=_('queries'))
|
|
repo.ui.debug("request %d: %s\n" %
|
|
(reqcnt, " ".join(map(short, r))))
|
|
for p in xrange(0, len(r), 10):
|
|
for b in remote.branches(r[p:p + 10]):
|
|
repo.ui.debug("received %s:%s\n" %
|
|
(short(b[0]), short(b[1])))
|
|
unknown.append(b)
|
|
|
|
# do binary search on the branches we found
|
|
while search:
|
|
newsearch = []
|
|
reqcnt += 1
|
|
repo.ui.progress(_('searching'), reqcnt, unit=_('queries'))
|
|
for n, l in zip(search, remote.between(search)):
|
|
l.append(n[1])
|
|
p = n[0]
|
|
f = 1
|
|
for i in l:
|
|
repo.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i)))
|
|
if knownnode(i):
|
|
if f <= 2:
|
|
repo.ui.debug("found new branch changeset %s\n" %
|
|
short(p))
|
|
fetch.add(p)
|
|
base.add(i)
|
|
else:
|
|
repo.ui.debug("narrowed branch search to %s:%s\n"
|
|
% (short(p), short(i)))
|
|
newsearch.append((p, i))
|
|
break
|
|
p, f = i, f * 2
|
|
search = newsearch
|
|
|
|
# sanity check our fetch list
|
|
for f in fetch:
|
|
if knownnode(f):
|
|
raise error.RepoError(_("already have changeset ")
|
|
+ short(f[:4]))
|
|
|
|
base = list(base)
|
|
if base == [nullid]:
|
|
if force:
|
|
repo.ui.warn(_("warning: repository is unrelated\n"))
|
|
else:
|
|
raise error.Abort(_("repository is unrelated"))
|
|
|
|
repo.ui.debug("found new changesets starting at " +
|
|
" ".join([short(f) for f in fetch]) + "\n")
|
|
|
|
repo.ui.progress(_('searching'), None)
|
|
repo.ui.debug("%d total queries\n" % reqcnt)
|
|
|
|
return base, list(fetch), heads
|