cleanup: use set literals

We no longer support Python 2.6, so we can now use set literals.
This commit is contained in:
Martin von Zweigbergk 2017-02-10 16:56:29 -08:00
parent 4e8e473e5e
commit c3406ac3db
47 changed files with 119 additions and 120 deletions

View File

@ -213,7 +213,7 @@ def list_stdlib_modules():
yield m
for m in ['cffi']:
yield m
stdlib_prefixes = set([sys.prefix, sys.exec_prefix])
stdlib_prefixes = {sys.prefix, sys.exec_prefix}
# We need to supplement the list of prefixes for the search to work
# when run from within a virtualenv.
for mod in (BaseHTTPServer, zlib):

View File

@ -71,7 +71,7 @@ testedwith = 'ships-with-hg-core'
cmdtable = {}
command = cmdutil.command(cmdtable)
newfile = set(('new fi', 'rename', 'copy f', 'copy t'))
newfile = {'new fi', 'rename', 'copy f', 'copy t'}
def zerodict():
return collections.defaultdict(lambda: 0)
@ -336,7 +336,7 @@ def synthesize(ui, repo, descpath, **opts):
wlock = repo.wlock()
lock = repo.lock()
nevertouch = set(('.hgsub', '.hgignore', '.hgtags'))
nevertouch = {'.hgsub', '.hgignore', '.hgtags'}
progress = ui.progress
_synthesizing = _('synthesizing')

View File

@ -310,7 +310,7 @@ class filemap_source(common.converter_source):
# map to any revision in the restricted graph. Put SKIPREV
# in the set of wanted ancestors to simplify code elsewhere
self.parentmap[rev] = SKIPREV
self.wantedancestors[rev] = set((SKIPREV,))
self.wantedancestors[rev] = {SKIPREV}
return
# Reuse the data from our parent.

View File

@ -32,7 +32,7 @@ class submodule(object):
return "%s %s" % (self.node, self.path)
# Keys in extra fields that should not be copied if the user requests.
bannedextrakeys = set([
bannedextrakeys = {
# Git commit object built-ins.
'tree',
'parent',
@ -41,7 +41,7 @@ bannedextrakeys = set([
# Mercurial built-ins.
'branch',
'close',
])
}
class convert_git(common.converter_source, common.commandline):
# Windows does not support GIT_DIR= construct while other systems
@ -455,9 +455,9 @@ class convert_git(common.converter_source, common.commandline):
('refs/heads/', '')
]
exclude = set([
exclude = {
'refs/remotes/origin/HEAD',
])
}
try:
output, status = self.gitrunlines('show-ref')

View File

@ -1641,8 +1641,8 @@ def stripwrapper(orig, ui, repo, nodelist, *args, **kwargs):
if os.path.exists(os.path.join(repo.path, 'histedit-state')):
state = histeditstate(repo)
state.read()
histedit_nodes = set([action.node for action
in state.actions if action.node])
histedit_nodes = {action.node for action
in state.actions if action.node}
common_nodes = histedit_nodes & set(nodelist)
if common_nodes:
raise error.Abort(_("histedit in progress, can't strip %s")

View File

@ -158,7 +158,7 @@ def unsharejournal(orig, ui, repo, repopath):
util.safehasattr(repo, 'journal')):
sharedrepo = share._getsrcrepo(repo)
sharedfeatures = _readsharedfeatures(repo)
if sharedrepo and sharedfeatures > set(['journal']):
if sharedrepo and sharedfeatures > {'journal'}:
# there is a shared repository and there are shared journal entries
# to copy. move shared date over from source to destination but
# move the local file first

View File

@ -129,7 +129,7 @@ reposetup = reposetup.reposetup
def featuresetup(ui, supported):
# don't die on seeing a repo with the largefiles requirement
supported |= set(['largefiles'])
supported |= {'largefiles'}
def uisetup(ui):
localrepo.localrepository.featuresetupfuncs.add(featuresetup)

View File

@ -242,7 +242,7 @@ class rebaseruntime(object):
skipped = set()
# recompute the set of skipped revs
if not collapse:
seen = set([dest])
seen = {dest}
for old, new in sorted(state.items()):
if new != revtodo and new in seen:
skipped.add(old)
@ -250,7 +250,7 @@ class rebaseruntime(object):
repo.ui.debug('computed skipped revs: %s\n' %
(' '.join(str(r) for r in sorted(skipped)) or None))
repo.ui.debug('rebase status resumed\n')
_setrebasesetvisibility(repo, set(state.keys()) | set([originalwd]))
_setrebasesetvisibility(repo, set(state.keys()) | {originalwd})
self.originalwd = originalwd
self.dest = dest
@ -1235,7 +1235,7 @@ def buildstate(repo, dest, rebaseset, collapse, obsoletenotrebased):
rebaseset: set of rev
'''
originalwd = repo['.'].rev()
_setrebasesetvisibility(repo, set(rebaseset) | set([originalwd]))
_setrebasesetvisibility(repo, set(rebaseset) | {originalwd})
# This check isn't strictly necessary, since mq detects commits over an
# applied patch. But it prevents messing up the working directory when

View File

@ -352,7 +352,7 @@ def mutableancestors(ctx):
"""return all mutable ancestors for ctx (included)
Much faster than the revset ancestors(ctx) & draft()"""
seen = set([nodemod.nullrev])
seen = {nodemod.nullrev}
visit = collections.deque()
visit.append(ctx)
while visit:
@ -1006,17 +1006,17 @@ def shelvecmd(ui, repo, *pats, **opts):
all shelved changes, use ``--cleanup``.
'''
allowables = [
('addremove', set(['create'])), # 'create' is pseudo action
('unknown', set(['create'])),
('cleanup', set(['cleanup'])),
# ('date', set(['create'])), # ignored for passing '--date "0 0"' in tests
('delete', set(['delete'])),
('edit', set(['create'])),
('list', set(['list'])),
('message', set(['create'])),
('name', set(['create'])),
('patch', set(['patch', 'list'])),
('stat', set(['stat', 'list'])),
('addremove', {'create'}), # 'create' is pseudo action
('unknown', {'create'}),
('cleanup', {'cleanup'}),
# ('date', {'create'}), # ignored for passing '--date "0 0"' in tests
('delete', {'delete'}),
('edit', {'create'}),
('list', {'list'}),
('message', {'create'}),
('name', {'create'}),
('patch', {'patch', 'list'}),
('stat', {'stat', 'list'}),
]
def checkopt(opt):
if opts.get(opt):

View File

@ -185,7 +185,7 @@ def underwayrevset(repo, subset, x):
# Add working directory parent.
wdirrev = repo['.'].rev()
if wdirrev != nullrev:
relevant += revset.baseset(set([wdirrev]))
relevant += revset.baseset({wdirrev})
return subset & relevant

View File

@ -22,14 +22,14 @@ modulepolicy = policy.policy
# Modules that have both Python and C implementations. See also the
# set of .py files under mercurial/pure/.
_dualmodules = set([
_dualmodules = {
'mercurial.base85',
'mercurial.bdiff',
'mercurial.diffhelpers',
'mercurial.mpatch',
'mercurial.osutil',
'mercurial.parsers',
])
}
class hgimporter(object):
"""Object that conforms to import hook interface defined in PEP-302."""

View File

@ -47,7 +47,7 @@ def commonancestorsheads(pfunc, *nodes):
sv |= poison
if v in nodes:
# history is linear
return set([v])
return {v}
if sv < poison:
for p in pfunc(v):
sp = seen[p]
@ -151,7 +151,7 @@ class incrementalmissingancestors(object):
def hasbases(self):
'''whether the common set has any non-trivial bases'''
return self.bases and self.bases != set([nullrev])
return self.bases and self.bases != {nullrev}
def addbases(self, newbases):
'''grow the ancestor set by adding new bases'''

View File

@ -1693,7 +1693,7 @@ def walkfilerevs(repo, match, follow, revs, fncache):
last = filelog.rev(node)
# keep track of all ancestors of the file
ancestors = set([filelog.linkrev(last)])
ancestors = {filelog.linkrev(last)}
# iterate from latest to oldest revision
for rev, flparentlinkrevs, copied in filerevgen(filelog, last):

View File

@ -442,10 +442,10 @@ if pycompat.osname == 'nt':
'inverse': _COMMON_LVB_REVERSE_VIDEO, # double-byte charsets only
}
passthrough = set([_FOREGROUND_INTENSITY,
_BACKGROUND_INTENSITY,
_COMMON_LVB_UNDERSCORE,
_COMMON_LVB_REVERSE_VIDEO])
passthrough = {_FOREGROUND_INTENSITY,
_BACKGROUND_INTENSITY,
_COMMON_LVB_UNDERSCORE,
_COMMON_LVB_REVERSE_VIDEO}
stdout = _kernel32.GetStdHandle(
_STD_OUTPUT_HANDLE) # don't close the handle returned

View File

@ -637,7 +637,7 @@ def _checkcopies(ctx, f, m1, m2, base, tca, remotebase, limit, data):
return
of = None
seen = set([f])
seen = {f}
for oc in getfctx(f, m1[f]).ancestors():
ocr = oc.linkrev()
of = oc.path()

View File

@ -254,7 +254,7 @@ def _oldheadssummary(repo, remoteheads, outgoing, inc=False):
newheads = list(c.node() for c in r)
# set some unsynced head to issue the "unsynced changes" warning
if inc:
unsynced = set([None])
unsynced = {None}
else:
unsynced = set()
return {None: (oldheads, newheads, unsynced)}

View File

@ -749,7 +749,7 @@ def _dispatch(req):
# imported and commands.table is populated.
debugcommands.command
uis = set([ui, lui])
uis = {ui, lui}
if req.repo:
uis.add(req.repo.ui)

View File

@ -30,9 +30,9 @@ _ignore = [unichr(int(x, 16)).encode("utf-8") for x in
"206a 206b 206c 206d 206e 206f feff".split()]
# verify the next function will work
if pycompat.ispy3:
assert set(i[0] for i in _ignore) == set([ord(b'\xe2'), ord(b'\xef')])
assert set(i[0] for i in _ignore) == {ord(b'\xe2'), ord(b'\xef')}
else:
assert set(i[0] for i in _ignore) == set(["\xe2", "\xef"])
assert set(i[0] for i in _ignore) == {"\xe2", "\xef"}
def hfsignoreclean(s):
"""Remove codepoints ignored by HFS+ from s.

View File

@ -43,7 +43,7 @@ _bundlespeccgversions = {'v1': '01',
}
# Compression engines allowed in version 1. THIS SHOULD NEVER CHANGE.
_bundlespecv1compengines = set(['gzip', 'bzip2', 'none'])
_bundlespecv1compengines = {'gzip', 'bzip2', 'none'}
def parsebundlespec(repo, spec, strict=True, externalnames=False):
"""Parse a bundle string specification into parts.
@ -1522,7 +1522,7 @@ def _pullobsolete(pullop):
def caps20to10(repo):
"""return a set with appropriate options to use bundle20 during getbundle"""
caps = set(['HG20'])
caps = {'HG20'}
capsblob = bundle2.encodecaps(bundle2.getrepocaps(repo))
caps.add('bundle2=' + urlreq.quote(capsblob))
return caps

View File

@ -28,8 +28,8 @@ _extensions = {}
_disabledextensions = {}
_aftercallbacks = {}
_order = []
_builtin = set(['hbisect', 'bookmarks', 'parentrevspec', 'progress', 'interhg',
'inotify', 'hgcia'])
_builtin = {'hbisect', 'bookmarks', 'parentrevspec', 'progress', 'interhg',
'inotify', 'hgcia'}
def extensions(ui=None):
if ui:

View File

@ -14,7 +14,7 @@ from . import (
)
# Set of flags to not apply boolean negation logic on
nevernegate = set([
nevernegate = {
# avoid --no-noninteractive
'noninteractive',
# These two flags are special because they cause hg to do one
@ -22,7 +22,7 @@ nevernegate = set([
# like aliases anyway.
'help',
'version',
])
}
def gnugetopt(args, options, longoptions):
"""Parse options mostly like getopt.gnu_getopt.

View File

@ -37,7 +37,7 @@ elements = {
"end": (0, None, None, None, None),
}
keywords = set(['and', 'or', 'not'])
keywords = {'and', 'or', 'not'}
globchars = ".*{}[]?/\\_"

View File

@ -34,7 +34,7 @@ from .hgweb import (
webcommands,
)
_exclkeywords = set([
_exclkeywords = {
"(ADVANCED)",
"(DEPRECATED)",
"(EXPERIMENTAL)",
@ -44,7 +44,7 @@ _exclkeywords = set([
_("(DEPRECATED)"),
# i18n: "(EXPERIMENTAL)" is a keyword, must be translated consistently
_("(EXPERIMENTAL)"),
])
}
def listexts(header, exts, indent=1, showdeprecated=False):
'''return a text listing of the given extensions'''

View File

@ -113,9 +113,9 @@ def unfilteredmethod(orig):
return orig(repo.unfiltered(), *args, **kwargs)
return wrapper
moderncaps = set(('lookup', 'branchmap', 'pushkey', 'known', 'getbundle',
'unbundle'))
legacycaps = moderncaps.union(set(['changegroupsubset']))
moderncaps = {'lookup', 'branchmap', 'pushkey', 'known', 'getbundle',
'unbundle'}
legacycaps = moderncaps.union({'changegroupsubset'})
class localpeer(peer.peerrepository):
'''peer for a local repo; reflects only the most recent API'''
@ -247,11 +247,11 @@ class locallegacypeer(localpeer):
class localrepository(object):
supportedformats = set(('revlogv1', 'generaldelta', 'treemanifest',
'manifestv2'))
_basesupported = supportedformats | set(('store', 'fncache', 'shared',
'relshared', 'dotencode'))
openerreqs = set(('revlogv1', 'generaldelta', 'treemanifest', 'manifestv2'))
supportedformats = {'revlogv1', 'generaldelta', 'treemanifest',
'manifestv2'}
_basesupported = supportedformats | {'store', 'fncache', 'shared',
'relshared', 'dotencode'}
openerreqs = {'revlogv1', 'generaldelta', 'treemanifest', 'manifestv2'}
filtername = None
# a list of (ui, featureset) functions.
@ -2020,7 +2020,7 @@ def newreporequirements(repo):
new repositories.
"""
ui = repo.ui
requirements = set(['revlogv1'])
requirements = {'revlogv1'}
if ui.configbool('format', 'usestore', True):
requirements.add('store')
if ui.configbool('format', 'usefncache', True):

View File

@ -236,7 +236,7 @@ class match(object):
@propertycache
def _dirs(self):
return set(util.dirs(self._fileroots)) | set(['.'])
return set(util.dirs(self._fileroots)) | {'.'}
def visitdir(self, dir):
'''Decides whether a directory should be visited based on whether it

View File

@ -46,7 +46,7 @@ class namespaces(object):
logfmt=_("tag: %s\n"),
listnames=tagnames,
namemap=tagnamemap, nodemap=tagnodemap,
deprecated=set(['tip']))
deprecated={'tip'})
self.addnamespace(n)
bnames = lambda repo: repo.branchmap().keys()

View File

@ -23,7 +23,7 @@ newnodeid = '!' * 20
addednodeid = ('0' * 15) + 'added'
modifiednodeid = ('0' * 12) + 'modified'
wdirnodes = set((newnodeid, addednodeid, modifiednodeid))
wdirnodes = {newnodeid, addednodeid, modifiednodeid}
# pseudo identifiers for working directory
# (they are experimental, so don't add too many dependencies on them)

View File

@ -474,7 +474,7 @@ def findexe(command):
def setsignalhandler():
pass
_wantedkinds = set([stat.S_IFREG, stat.S_IFLNK])
_wantedkinds = {stat.S_IFREG, stat.S_IFLNK}
def statfiles(files):
'''Stat each file in files. Yield each stat, or None if a file does not

View File

@ -913,8 +913,8 @@ class revlog(object):
stop = []
stoprevs = set([self.rev(n) for n in stop])
startrev = self.rev(start)
reachable = set((startrev,))
heads = set((startrev,))
reachable = {startrev}
heads = {startrev}
parentrevs = self.parentrevs
for r in self.revs(start=startrev + 1):
@ -2039,7 +2039,7 @@ class revlog(object):
DELTAREUSESAMEREVS = 'samerevs'
DELTAREUSENEVER = 'never'
DELTAREUSEALL = set(['always', 'samerevs', 'never'])
DELTAREUSEALL = {'always', 'samerevs', 'never'}
def clone(self, tr, destrevlog, addrevisioncb=None,
deltareuse=DELTAREUSESAMEREVS, aggressivemergedeltas=None):

View File

@ -451,9 +451,8 @@ def bookmark(repo, subset, x):
for bmrev in matchrevs:
bms.add(repo[bmrev].rev())
else:
bms = set([repo[r].rev()
for r in repo._bookmarks.values()])
bms -= set([node.nullrev])
bms = {repo[r].rev() for r in repo._bookmarks.values()}
bms -= {node.nullrev}
return subset & bms
@predicate('branch(string or set)', safe=True)
@ -1276,7 +1275,7 @@ def named(repo, subset, x):
if name not in ns.deprecated:
names.update(repo[n].rev() for n in ns.nodes(repo, name))
names -= set([node.nullrev])
names -= {node.nullrev}
return subset & names
@predicate('id(string)', safe=True)
@ -1363,8 +1362,8 @@ def origin(repo, subset, x):
return src
src = prev
o = set([_firstsrc(r) for r in dests])
o -= set([None])
o = {_firstsrc(r) for r in dests}
o -= {None}
# XXX we should turn this into a baseset instead of a set, smartset may do
# some optimizations from the fact this is a baseset.
return subset & o
@ -1393,7 +1392,7 @@ def outgoing(repo, subset, x):
outgoing = discovery.findcommonoutgoing(repo, other, onlyheads=revs)
repo.ui.popbuffer()
cl = repo.changelog
o = set([cl.rev(r) for r in outgoing.missing])
o = {cl.rev(r) for r in outgoing.missing}
return subset & o
@predicate('p1([set])', safe=True)
@ -1410,7 +1409,7 @@ def p1(repo, subset, x):
cl = repo.changelog
for r in getset(repo, fullreposet(repo), x):
ps.add(cl.parentrevs(r)[0])
ps -= set([node.nullrev])
ps -= {node.nullrev}
# XXX we should turn this into a baseset instead of a set, smartset may do
# some optimizations from the fact this is a baseset.
return subset & ps
@ -1433,7 +1432,7 @@ def p2(repo, subset, x):
cl = repo.changelog
for r in getset(repo, fullreposet(repo), x):
ps.add(cl.parentrevs(r)[1])
ps -= set([node.nullrev])
ps -= {node.nullrev}
# XXX we should turn this into a baseset instead of a set, smartset may do
# some optimizations from the fact this is a baseset.
return subset & ps
@ -1458,7 +1457,7 @@ def parents(repo, subset, x):
up(p.rev() for p in repo[r].parents())
else:
up(parentrevs(r))
ps -= set([node.nullrev])
ps -= {node.nullrev}
return subset & ps
def _phase(repo, subset, *targets):
@ -1965,7 +1964,7 @@ def _toposort(revs, parentsfunc, firstbranch=()):
else:
# This is a new head. We create a new subgroup for it.
targetidx = len(groups)
groups.append(([], set([rev])))
groups.append(([], {rev}))
gr = groups[targetidx]
@ -2098,11 +2097,11 @@ def tag(repo, subset, x):
if tn is None:
raise error.RepoLookupError(_("tag '%s' does not exist")
% pattern)
s = set([repo[tn].rev()])
s = {repo[tn].rev()}
else:
s = set([cl.rev(n) for t, n in repo.tagslist() if matcher(t)])
s = {cl.rev(n) for t, n in repo.tagslist() if matcher(t)}
else:
s = set([cl.rev(n) for t, n in repo.tagslist() if t != 'tip'])
s = {cl.rev(n) for t, n in repo.tagslist() if t != 'tip'}
return subset & s
@predicate('tagged', safe=True)

View File

@ -44,9 +44,9 @@ elements = {
"end": (0, None, None, None, None),
}
keywords = set(['and', 'or', 'not'])
keywords = {'and', 'or', 'not'}
_quoteletters = set(['"', "'"])
_quoteletters = {'"', "'"}
_simpleopletters = set(pycompat.iterbytestr("():=,-|&+!~^%"))
# default set of valid characters for the initial letter of symbols

View File

@ -123,7 +123,7 @@ def _createhgwebservice(ui, repo, opts):
if opts.get('port'):
opts['port'] = util.getport(opts.get('port'))
alluis = set([ui])
alluis = {ui}
if repo:
baseui = repo.baseui
alluis.update([repo.baseui, repo.ui])

View File

@ -243,7 +243,7 @@ def findcommonheads(ui, local, remote,
raise error.Abort(_("repository is unrelated"))
else:
ui.warn(_("warning: repository is unrelated\n"))
return (set([nullid]), True, srvheadhashes,)
return ({nullid}, True, srvheadhashes,)
anyincoming = (srvheadhashes != [nullid])
return dag.externalizeall(result), anyincoming, srvheadhashes

View File

@ -29,17 +29,17 @@ from . import (
# modern/secure or legacy/insecure. Many operations in this module have
# separate code paths depending on support in Python.
configprotocols = set([
configprotocols = {
'tls1.0',
'tls1.1',
'tls1.2',
])
}
hassni = getattr(ssl, 'HAS_SNI', False)
# TLS 1.1 and 1.2 may not be supported if the OpenSSL Python is compiled
# against doesn't support them.
supportedprotocols = set(['tls1.0'])
supportedprotocols = {'tls1.0'}
if util.safehasattr(ssl, 'PROTOCOL_TLSv1_1'):
supportedprotocols.add('tls1.1')
if util.safehasattr(ssl, 'PROTOCOL_TLSv1_2'):
@ -296,7 +296,7 @@ def protocolsettings(protocol):
# disable protocols via SSLContext.options and OP_NO_* constants.
# However, SSLContext.options doesn't work unless we have the
# full/real SSLContext available to us.
if supportedprotocols == set(['tls1.0']):
if supportedprotocols == {'tls1.0'}:
if protocol != 'tls1.0':
raise error.Abort(_('current Python does not support protocol '
'setting %s') % protocol,
@ -430,7 +430,7 @@ def wrapsocket(sock, keyfile, certfile, ui, serverhostname=None):
# is really old. (e.g. server doesn't support TLS 1.0+ or
# client doesn't support modern TLS versions introduced
# several years from when this comment was written).
if supportedprotocols != set(['tls1.0']):
if supportedprotocols != {'tls1.0'}:
ui.warn(_(
'(could not communicate with %s using security '
'protocols %s; if you are using a modern Mercurial '

View File

@ -126,14 +126,14 @@ contextmanager = contextlib.contextmanager
__all__ = ['start', 'stop', 'reset', 'display', 'profile']
skips = set(["util.py:check", "extensions.py:closure",
"color.py:colorcmd", "dispatch.py:checkargs",
"dispatch.py:<lambda>", "dispatch.py:_runcatch",
"dispatch.py:_dispatch", "dispatch.py:_runcommand",
"pager.py:pagecmd", "dispatch.py:run",
"dispatch.py:dispatch", "dispatch.py:runcommand",
"hg.py:<module>", "evolve.py:warnobserrors",
])
skips = {"util.py:check", "extensions.py:closure",
"color.py:colorcmd", "dispatch.py:checkargs",
"dispatch.py:<lambda>", "dispatch.py:_runcatch",
"dispatch.py:_dispatch", "dispatch.py:_runcommand",
"pager.py:pagecmd", "dispatch.py:run",
"dispatch.py:dispatch", "dispatch.py:runcommand",
"hg.py:<module>", "evolve.py:warnobserrors",
}
###########################################################################
## Utils

View File

@ -26,10 +26,10 @@ version = 2
# These are the file generators that should only be executed after the
# finalizers are done, since they rely on the output of the finalizers (like
# the changelog having been written).
postfinalizegenerators = set([
postfinalizegenerators = {
'bookmarks',
'dirstate'
])
}
gengroupall='all'
gengroupprefinalize='prefinalize'

View File

@ -28,12 +28,12 @@ def requiredsourcerequirements(repo):
An upgrade will not be allowed if the repository doesn't have the
requirements returned by this function.
"""
return set([
return {
# Introduced in Mercurial 0.9.2.
'revlogv1',
# Introduced in Mercurial 0.9.2.
'store',
])
}
def blocksourcerequirements(repo):
"""Obtain requirements that will prevent an upgrade from occurring.
@ -41,7 +41,7 @@ def blocksourcerequirements(repo):
An upgrade cannot be performed if the source repository contains a
requirements in the returned set.
"""
return set([
return {
# The upgrade code does not yet support these experimental features.
# This is an artificial limitation.
'manifestv2',
@ -51,7 +51,7 @@ def blocksourcerequirements(repo):
'parentdelta',
# Upgrade should operate on the actual store, not the shared link.
'shared',
])
}
def supportremovedrequirements(repo):
"""Obtain requirements that can be removed during an upgrade.
@ -70,13 +70,13 @@ def supporteddestrequirements(repo):
Extensions should monkeypatch this to add their custom requirements.
"""
return set([
return {
'dotencode',
'fncache',
'generaldelta',
'revlogv1',
'store',
])
}
def allowednewrequirements(repo):
"""Obtain requirements that can be added to a repository during upgrade.
@ -88,11 +88,11 @@ def allowednewrequirements(repo):
bad additions because the whitelist approach is safer and will prevent
future, unknown requirements from accidentally being added.
"""
return set([
return {
'dotencode',
'fncache',
'generaldelta',
])
}
deficiency = 'deficiency'
optimisation = 'optimization'
@ -628,7 +628,7 @@ def _upgraderepo(ui, srcrepo, dstrepo, requirements, actions):
ui.write(_('marking source repository as being upgraded; clients will be '
'unable to read from repository\n'))
scmutil.writerequires(srcrepo.vfs,
srcrepo.requirements | set(['upgradeinprogress']))
srcrepo.requirements | {'upgradeinprogress'})
ui.write(_('starting in-place swap of repository data\n'))
ui.write(_('replaced files will be backed up at %s\n') %

View File

@ -1097,7 +1097,7 @@ def checksignature(func):
return check
# a whilelist of known filesystems where hardlink works reliably
_hardlinkfswhitelist = set([
_hardlinkfswhitelist = {
'btrfs',
'ext2',
'ext3',
@ -1109,7 +1109,7 @@ _hardlinkfswhitelist = set([
'ufs',
'xfs',
'zfs',
])
}
def copyfile(src, dest, hardlink=False, copystat=False, checkambig=False):
'''copy a file, preserving mode and optionally other stat info like

View File

@ -334,7 +334,7 @@ def findexe(command):
return executable
return findexisting(os.path.expanduser(os.path.expandvars(command)))
_wantedkinds = set([stat.S_IFREG, stat.S_IFLNK])
_wantedkinds = {stat.S_IFREG, stat.S_IFLNK}
def statfiles(files):
'''Stat each file in files. Yield each stat, or None if a file

View File

@ -759,7 +759,7 @@ def _capabilities(repo, proto):
caps.append('stream-preferred')
requiredformats = repo.requirements & repo.supportedformats
# if our local revlogs are just revlogv1, add 'stream' cap
if not requiredformats - set(('revlogv1',)):
if not requiredformats - {'revlogv1'}:
caps.append('stream')
# otherwise, add 'streamreqs' detailing our local revlog format
else:

View File

@ -26,7 +26,7 @@ def modulewhitelist(names):
replacement = [('.py', ''), ('.c', ''), # trim suffix
('mercurial%s' % (os.sep), ''), # trim "mercurial/" path
]
ignored = set(['__init__'])
ignored = {'__init__'}
modules = {}
# convert from file name to module name, and count # of appearances

View File

@ -145,7 +145,7 @@ def _parseasciigraph(text):
def parents(y, x):
"""(int, int) -> [str]. follow the ASCII edges at given position,
return a list of parents"""
visited = set([(y, x)])
visited = {(y, x)}
visit = []
result = []

View File

@ -47,7 +47,7 @@ def generatestates(maxchangesets, parentcontents):
content in parentcontents]) + "-" + tracked
yield (filename, parentcontents)
else:
for content in (set([None, 'content' + str(depth + 1)]) |
for content in ({None, 'content' + str(depth + 1)} |
set(parentcontents)):
for combination in generatestates(maxchangesets,
parentcontents + [content]):

View File

@ -49,7 +49,7 @@ def buildgraph(rng, nodes=100, rootprob=0.05, mergeprob=0.2, prevprob=0.7):
def buildancestorsets(graph):
ancs = [None] * len(graph)
for i in xrange(len(graph)):
ancs[i] = set([i])
ancs[i] = {i}
if graph[i] == [nullrev]:
continue
for p in graph[i]:

View File

@ -6,7 +6,7 @@ from mercurial import (
ui as uimod,
)
ignore = set(['highlight', 'win32text', 'factotum'])
ignore = {'highlight', 'win32text', 'factotum'}
if os.name != 'nt':
ignore.add('win32mbcs')

View File

@ -154,7 +154,7 @@ check saving last-message.txt
> from mercurial import util
> def abortfolding(ui, repo, hooktype, **kwargs):
> ctx = repo[kwargs.get('node')]
> if set(ctx.files()) == set(['c', 'd', 'f']):
> if set(ctx.files()) == {'c', 'd', 'f'}:
> return True # abort folding commit only
> ui.warn('allow non-folding commit\\n')
> EOF

View File

@ -37,7 +37,7 @@ another repository of push/pull/clone on localhost:
> for name, module in extensions.extensions(ui):
> if __name__ == module.__name__:
> # support specific feature locally
> supported |= set(['featuresetup-test'])
> supported |= {'featuresetup-test'}
> return
> def uisetup(ui):
> localrepo.localrepository.featuresetupfuncs.add(featuresetup)