formatting changes from black 22.3.0

Summary:
Applies the black-fbsource codemod with the new build of pyfmt.

paintitblack

Reviewed By: lisroach

Differential Revision: D36324783

fbshipit-source-id: 280c09e88257e5e569ab729691165d8dedd767bc
This commit is contained in:
John Reese 2022-05-11 19:55:56 -07:00 committed by Facebook GitHub Bot
parent 61c22a3512
commit 2b1bda643c
54 changed files with 264 additions and 286 deletions

View File

@ -146,7 +146,6 @@ if hasattr(select, "poll"):
data = data.decode("utf-8", errors="surrogateescape")
log_fn(data)
else:
def _pipe_output(p, log_fn):

View File

@ -33,7 +33,6 @@ if TYPE_CHECKING:
class Strs(Tuple[str, ...]):
pass
else:
class Strs(tuple):

View File

@ -64,7 +64,7 @@ from .subcmd import Subcmd
from .util import format_cmd, format_mount, split_inodes_by_operation_type, print_stderr
MB: int = 1024 ** 2
MB: int = 1024**2
debug_cmd = subcmd_mod.Decorator()
@ -850,7 +850,7 @@ class UnloadInodesCmd(Subcmd):
# set the age in nanoSeconds
age = TimeSpec()
age.seconds = int(args.age)
age.nanoSeconds = int((args.age - age.seconds) * 10 ** 9)
age.nanoSeconds = int((args.age - age.seconds) * 10**9)
count = client.unloadInodeForPath(
bytes(checkout.path), bytes(rel_path), age
)

View File

@ -132,7 +132,6 @@ try:
build_time=typing.cast(int, build_info_dict.get("time", 0)),
)
except ImportError:
def get_build_info_from_pid(

View File

@ -36,7 +36,6 @@ if sys.platform == "win32":
def raise_win_error() -> NoReturn:
raise ctypes.WinError()
else:
# This entire file is only ever imported in Windows. However on our continuous
# integration environments Pyre currently does all of its type checking assuming

View File

@ -335,7 +335,7 @@ def get_session_id() -> int:
global _session_id
sid = _session_id
if sid is None:
sid = random.randrange(2 ** 32)
sid = random.randrange(2**32)
_session_id = sid
return sid

View File

@ -55,9 +55,12 @@ class BuckCommandError(subprocess.CalledProcessError):
def __str__(self) -> str:
cmd_str = " ".join(shlex.quote(arg) for arg in self.cmd)
return "buck command returned non-zero exit status %d\n\nCommand:\n[%s]\n\nOuput:\n%s\n\nStderr:\n%s" % (
self.returncode,
cmd_str,
self.output,
self.stderr,
return (
"buck command returned non-zero exit status %d\n\nCommand:\n[%s]\n\nOuput:\n%s\n\nStderr:\n%s"
% (
self.returncode,
cmd_str,
self.output,
self.stderr,
)
)

View File

@ -622,10 +622,13 @@ class EdenCommandError(subprocess.CalledProcessError):
def __str__(self) -> str:
cmd_str = " ".join(shlex.quote(arg) for arg in self.cmd)
return "edenfsctl command returned non-zero exit status %d\n\nCommand:\n[%s]\n\nStderr:\n%s" % (
self.returncode,
cmd_str,
self.stderr,
return (
"edenfsctl command returned non-zero exit status %d\n\nCommand:\n[%s]\n\nStderr:\n%s"
% (
self.returncode,
cmd_str,
self.stderr,
)
)

View File

@ -155,7 +155,6 @@ elif safehasattr(cmdutil, "command"):
commands.norepo += " %s" % " ".join(parsealiases(name))
return _command(name, list(options), synopsis)
else:
# for "historical portability":
# define "@command" annotation locally, because cmdutil.command
@ -843,7 +842,7 @@ def perfchangeset(ui, repo, rev, **opts):
@command("perfindex", formatteropts)
def perfindex(ui, repo, **opts):
timer, fm = gettimer(ui, opts)
revlog._prereadsize = 2 ** 24 # disable lazy parser in old hg
revlog._prereadsize = 2**24 # disable lazy parser in old hg
n = repo["tip"].node()
svfs = getsvfs(repo)
@ -932,7 +931,7 @@ def perfrevrange(ui, repo, *specs, **opts):
@command("perfnodelookup", formatteropts)
def perfnodelookup(ui, repo, rev, **opts):
timer, fm = gettimer(ui, opts)
revlog._prereadsize = 2 ** 24 # disable lazy parser in old hg
revlog._prereadsize = 2**24 # disable lazy parser in old hg
n = repo[rev].node()
cl = revlog.revlog(getsvfs(repo), "00changelog.i")

View File

@ -1294,7 +1294,7 @@ class svn_sink(converter_sink, commandline):
self.childmap[parent] = child
def revid(self, rev):
return u"svn:%s@%s" % (self.uuid, rev)
return "svn:%s@%s" % (self.uuid, rev)
def putcommit(self, files, copies, parents, commit, source, revmap, full, cleanp2):
for parent in parents:

View File

@ -199,7 +199,6 @@ if _debugging:
% (time.strftime("%a, %d %b %Y %H:%M:%S", time.gmtime()), fmt % args[:])
)
else:
def log(fmt, *args):

View File

@ -26,7 +26,6 @@ if compat.PYTHON3:
# returns None.
return sys.getfilesystemencoding()
else:
# Python 2 doesn't support surrogateescape, so use 'strict' by
# default. Users can register a custom surrogateescape error handler and use

View File

@ -402,7 +402,6 @@ if pycompat.iswindows:
finally:
closehandler(h)
elif pycompat.isdarwin:
import ctypes.util
@ -431,7 +430,6 @@ elif pycompat.isdarwin:
finally:
os.close(fd)
else:
def getcanonicalpath(name):

View File

@ -26,7 +26,6 @@ if pycompat.iswindows:
# "C:\\repo1\\.hg\\scratchbranches\\index\\bookmarkmap\\infinitepush/backups/test/HOSTNAME/C:\\repo2/heads"
return path.replace(":", "")
else:
def _normalizepath(path):

View File

@ -137,7 +137,7 @@ cmdtable = {}
command = registrar.command(cmdtable)
# The default depth to fetch during tree fetches
TREE_DEPTH_MAX = 2 ** 16
TREE_DEPTH_MAX = 2**16
configtable = {}
configitem = registrar.configitem(configtable)
@ -1285,7 +1285,7 @@ def servergettreepack(repo, proto, args):
raise error.Abort(_("cannot fetch remote files over non-ssh protocol"))
rootdir = args["rootdir"]
depth = int(args.get("depth", str(2 ** 16)))
depth = int(args.get("depth", str(2**16)))
mfnodes = wireproto.decodelist(args["mfnodes"])
basemfnodes = wireproto.decodelist(args["basemfnodes"])

View File

@ -220,7 +220,7 @@ cmdtable = {}
command = registrar.command(cmdtable)
# The default depth to fetch during tree fetches
TREE_DEPTH_MAX = 2 ** 16
TREE_DEPTH_MAX = 2**16
configtable = {}
configitem = registrar.configitem(configtable)
@ -2365,7 +2365,7 @@ def servergettreepack(repo, proto, args):
raise error.Abort(_("cannot fetch remote files over non-ssh protocol"))
rootdir = args["rootdir"]
depth = int(args.get("depth", str(2 ** 16)))
depth = int(args.get("depth", str(2**16)))
mfnodes = wireproto.decodelist(args["mfnodes"])
basemfnodes = wireproto.decodelist(args["basemfnodes"])

View File

@ -430,7 +430,7 @@ class bundlerepository(localrepo.localrepository):
with util.fdopen(fdtemp, "wb") as fptemp:
fptemp.write(pycompat.encodeutf8(header))
while True:
chunk = readfn(2 ** 18)
chunk = readfn(2**18)
if not chunk:
break
fptemp.write(chunk)

View File

@ -278,7 +278,7 @@ class cg1unpacker(object):
yield chunkheader(len(chunk))
pos = 0
while pos < len(chunk):
next = pos + 2 ** 20
next = pos + 2**20
yield chunk[pos:next]
pos = next
yield closechunk()

View File

@ -816,18 +816,15 @@ def migratetolazy(repo):
if repo.changelog.algorithmbackend == "revlog":
migratetodoublewrite(repo)
if (
not any(
s in repo.storerequirements
for s in (
"lazytextchangelog",
"hybridchangelog",
"doublewritechangelog",
"lazytext",
)
if not any(
s in repo.storerequirements
for s in (
"lazytextchangelog",
"hybridchangelog",
"doublewritechangelog",
"lazytext",
)
and not _isempty(repo)
):
) and not _isempty(repo):
raise error.Abort(
_(
"lazy backend can only be migrated from hybrid or doublewrite, or lazytext"

View File

@ -41,7 +41,6 @@ if pycompat.iswindows:
fileno = open_osfhandle(handle, os_mode)
return util.fdopen(fileno, mode)
else:
def fdopen(handle: int, mode: str) -> IO[Any]:

View File

@ -28,7 +28,7 @@ stringio = util.stringio
# This is required for ncurses to display non-ASCII characters in default user
# locale encoding correctly. --immerrr
locale.setlocale(locale.LC_ALL, u"")
locale.setlocale(locale.LC_ALL, "")
# patch comments based on the git one
diffhelptext = _(
@ -998,7 +998,7 @@ class curseschunkselector(object):
# strip \n, and convert control characters to ^[char] representation
text = text.strip(b"\n")
text = re.sub(
br"[\x00-\x08\x0a-\x1f]",
rb"[\x00-\x08\x0a-\x1f]",
lambda m: b"^%s" % bytearray([ord(m.group()) + 64]),
text,
)

View File

@ -545,7 +545,7 @@ def dispatch(req):
return millis
for power in range(3, 19):
threshold = 10 ** power
threshold = 10**power
if millis < threshold:
factor = int(threshold / 1000)
return int(millis / factor) * factor

View File

@ -507,7 +507,6 @@ if sys.version_info[0] >= 3:
def upper(s):
return s.upper()
else:
colwidth = _colwidth
fromlocal = pycompat.identity
@ -528,6 +527,5 @@ if sys.version_info[0] < 3:
pass
return s
else:
localtooutput = pycompat.identity

View File

@ -402,7 +402,6 @@ try:
return "unknown"
return False
except AttributeError:
def _handlesarg(func, arg):

View File

@ -88,7 +88,7 @@ def loads(string):
# XXX: This should round-trip with "dumps". But it might be non-trivial to
# do so.
def encode(s):
if isinstance(s, type(u"")):
if isinstance(s, type("")):
return pycompat.decodeutf8(s.encode("utf-8"))
else:
return s

View File

@ -348,7 +348,7 @@ def headdecode(s):
"""Decodes RFC-2047 header"""
uparts = []
for part, charset in email.header.decode_header(s):
if isinstance(part, type(u"")):
if isinstance(part, type("")):
uparts.append(part)
continue
if charset is not None:
@ -363,4 +363,4 @@ def headdecode(s):
except UnicodeDecodeError:
pass
uparts.append(part.decode("ISO-8859-1"))
return encoding.unitolocal(u" ".join(uparts))
return encoding.unitolocal(" ".join(uparts))

View File

@ -1704,7 +1704,7 @@ def readpatternfile(filepath, warn, sourceinfo=False):
if "#" in line:
global _commentre
if not _commentre:
_commentre = util.re.compile(br"((?:^|[^\\])(?:\\\\)*)#.*")
_commentre = util.re.compile(rb"((?:^|[^\\])(?:\\\\)*)#.*")
# remove comments prefixed by an even number of escapes
m = _commentre.search(line)
if m:

View File

@ -37,7 +37,7 @@ patches = mpatch.patches
patchedsize = mpatch.patchedsize
textdiff = bdiff.bdiff
wordsplitter = re.compile(br"(\t+| +|[a-zA-Z0-9_\x80-\xff]+|[^ \ta-zA-Z0-9_\x80-\xff])")
wordsplitter = re.compile(rb"(\t+| +|[a-zA-Z0-9_\x80-\xff]+|[^ \ta-zA-Z0-9_\x80-\xff])")
# called by dispatch.py
def init(ui: "UI") -> None:

View File

@ -49,9 +49,9 @@ from .pycompat import decodeutf8, encodeutf8, range
stringio = util.stringio
gitre = re.compile(br"diff --git a/(.*) b/(.*)")
tabsplitter = re.compile(br"(\t+|[^\t]+)")
wordsplitter = re.compile(br"(\t+| +|[a-zA-Z0-9_\x80-\xff]+|[^ \ta-zA-Z0-9_\x80-\xff])")
gitre = re.compile(rb"diff --git a/(.*) b/(.*)")
tabsplitter = re.compile(rb"(\t+|[^\t]+)")
wordsplitter = re.compile(rb"(\t+| +|[a-zA-Z0-9_\x80-\xff]+|[^ \ta-zA-Z0-9_\x80-\xff])")
PatchError = error.PatchError
@ -212,10 +212,10 @@ def extract(ui, fileobj):
# attempt to detect the start of a patch
# (this heuristic is borrowed from quilt)
diffre = re.compile(
br"^(?:Index:[ \t]|diff[ \t]-|RCS file: |"
br"retrieving revision [0-9]+(\.[0-9]+)*$|"
br"---[ \t].*?^\+\+\+[ \t]|"
br"\*\*\*[ \t].*?^---[ \t])",
rb"^(?:Index:[ \t]|diff[ \t]-|RCS file: |"
rb"retrieving revision [0-9]+(\.[0-9]+)*$|"
rb"---[ \t].*?^\+\+\+[ \t]|"
rb"\*\*\*[ \t].*?^---[ \t])",
re.MULTILINE | re.DOTALL,
)
@ -606,7 +606,7 @@ class filestore(object):
self.created = 0
self.maxsize = maxsize
if self.maxsize is None:
self.maxsize = 4 * (2 ** 20)
self.maxsize = 4 * (2**20)
self.size = 0
self.data = {}
@ -2000,7 +2000,7 @@ def scanpatch(fp):
- ('hunk', [hunk_lines])
- ('range', (-start,len, +start,len, proc))
"""
lines_re = re.compile(br"@@ -(\d+),(\d+) \+(\d+),(\d+) @@\s*(.*)")
lines_re = re.compile(rb"@@ -(\d+),(\d+) \+(\d+),(\d+) @@\s*(.*)")
lr = linereader(fp)
def scanwhile(first, p):

View File

@ -732,7 +732,6 @@ elif pycompat.sysplatform == "cygwin":
def checklink(path: str) -> bool:
return False
else:
# os.path.normcase is a no-op, which doesn't help us on non-native
# filesystems

View File

@ -91,30 +91,30 @@ if not pycompat.iswindows:
_msg_iovlen_t = ctypes.c_int
class _iovec(ctypes.Structure):
_fields_ = [(u"iov_base", ctypes.c_void_p), (u"iov_len", ctypes.c_size_t)]
_fields_ = [("iov_base", ctypes.c_void_p), ("iov_len", ctypes.c_size_t)]
class _msghdr(ctypes.Structure):
_fields_ = [
(u"msg_name", ctypes.c_void_p),
(u"msg_namelen", _socklen_t),
(u"msg_iov", ctypes.POINTER(_iovec)),
(u"msg_iovlen", _msg_iovlen_t),
(u"msg_control", ctypes.c_void_p),
(u"msg_controllen", _msg_controllen_t),
(u"msg_flags", ctypes.c_int),
("msg_name", ctypes.c_void_p),
("msg_namelen", _socklen_t),
("msg_iov", ctypes.POINTER(_iovec)),
("msg_iovlen", _msg_iovlen_t),
("msg_control", ctypes.c_void_p),
("msg_controllen", _msg_controllen_t),
("msg_flags", ctypes.c_int),
]
class _cmsghdr(ctypes.Structure):
_fields_ = [
(u"cmsg_len", _cmsg_len_t),
(u"cmsg_level", ctypes.c_int),
(u"cmsg_type", ctypes.c_int),
("cmsg_len", _cmsg_len_t),
("cmsg_level", ctypes.c_int),
("cmsg_type", ctypes.c_int),
# pyre-fixme[58]: `*` is not supported for operand types
# `Type[ctypes.c_ubyte]` and `int`.
(u"cmsg_data", ctypes.c_ubyte * 0),
("cmsg_data", ctypes.c_ubyte * 0),
]
_libc = ctypes.CDLL(ctypes.util.find_library(u"c"), use_errno=True)
_libc = ctypes.CDLL(ctypes.util.find_library("c"), use_errno=True)
_recvmsg = getattr(_libc, "recvmsg", None)
if _recvmsg:
_recvmsg.restype = getattr(ctypes, "c_ssize_t", ctypes.c_long)
@ -164,7 +164,6 @@ if not pycompat.iswindows:
)
return [rfds[i] for i in range(rfdscount)]
else:
import msvcrt

View File

@ -58,7 +58,6 @@ if "TESTTMP" in os.environ or "testutil" in sys.modules:
def istest():
return True
else:
def istest():

View File

@ -194,7 +194,7 @@ state = ProfileState()
class CodeSite(object):
cache = {}
__slots__ = (u"path", u"lineno", u"function", u"source")
__slots__ = ("path", "lineno", "function", "source")
def __init__(self, path, lineno, function):
self.path = path
@ -249,7 +249,7 @@ class CodeSite(object):
class Sample(object):
__slots__ = (u"stack", u"time")
__slots__ = ("stack", "time")
def __init__(self, stack, time):
self.stack = stack

View File

@ -128,7 +128,6 @@ if sys.version_info[0] >= 3:
def hasdata(req):
return req.data is not None
else:
import BaseHTTPServer
import CGIHTTPServer

View File

@ -2066,10 +2066,10 @@ class chunkbuffer(object):
def splitbig(chunks):
for chunk in chunks:
assert isinstance(chunk, bytes)
if len(chunk) > 2 ** 20:
if len(chunk) > 2**20:
pos = 0
while pos < len(chunk):
end = pos + 2 ** 18
end = pos + 2**18
yield chunk[pos:end]
pos = end
else:
@ -2093,7 +2093,7 @@ class chunkbuffer(object):
while left > 0:
# refill the queue
if not queue:
target = 2 ** 18
target = 2**18
for chunk in self.iter:
assert isinstance(chunk, bytes)
queue.append(chunk)
@ -2754,7 +2754,6 @@ if pyplatform.python_implementation() == "CPython" and sys.version_info < (3, 0)
else:
return _safeiterfile(fp)
else:
# PyPy and CPython 3 do not have the EINTR issue thus no workaround needed.
def iterfile(fp):
@ -3200,7 +3199,7 @@ class url(object):
return True # remote URL
if hasdriveletter(self.path):
return True # absolute for our purposes - can't be joined()
if self.path.startswith(br"\\"):
if self.path.startswith(rb"\\"):
return True # Windows UNC path
if self.path.startswith("/"):
return True # POSIX-style
@ -3285,14 +3284,14 @@ _timenesting = [0]
_sizeunits = (
("b", 1),
("kb", 2 ** 10),
("mb", 2 ** 20),
("gb", 2 ** 30),
("tb", 2 ** 40),
("m", 2 ** 20),
("k", 2 ** 10),
("g", 2 ** 30),
("t", 2 ** 40),
("kb", 2**10),
("mb", 2**20),
("gb", 2**30),
("tb", 2**40),
("m", 2**20),
("k", 2**10),
("g", 2**30),
("t", 2**40),
)
tracewrap = bindings.tracing.wrapfunc
@ -3789,7 +3788,7 @@ class _zlibengine(compressionengine):
for chunk in filechunkiter(fh):
while chunk:
# Limit output size to limit memory.
yield d.decompress(chunk, 2 ** 18)
yield d.decompress(chunk, 2**18)
chunk = d.unconsumed_tail
return chunkbuffer(gen())
@ -3817,7 +3816,7 @@ class _zlibengine(compressionengine):
parts = []
pos = 0
while pos < insize:
pos2 = pos + 2 ** 20
pos2 = pos + 2**20
parts.append(z.compress(data[pos:pos2]))
pos = pos2
parts.append(z.flush())

View File

@ -252,7 +252,6 @@ if sys.version_info.major >= 3:
def _elementtointeger(b, i):
return b[i]
else:
def _elementtointeger(b, i):

View File

@ -391,7 +391,7 @@ def _raiseoserror(name):
# See https://bugs.python.org/issue28474
code = _kernel32.GetLastError()
if code > 0x7FFFFFFF:
code -= 2 ** 32
code -= 2**32
err = ctypes.WinError(code=code)
raise OSError(err.errno, "%s: %s" % (name, encoding.strtolocal(err.strerror)))

View File

@ -156,7 +156,7 @@ def matchoutput(cmd, regexp, ignorestatus=False):
@check("baz", "GNU Arch baz client", exe=True)
def has_baz():
return matchoutput("baz --version 2>&1", br"baz Bazaar version")
return matchoutput("baz --version 2>&1", rb"baz Bazaar version")
@check("bzr", "Canonical's Bazaar client", exe=True)
@ -223,31 +223,31 @@ def has_common_zlib():
@check("cvs", "cvs client/server", exe=True)
def has_cvs():
re = br"Concurrent Versions System.*?server"
re = rb"Concurrent Versions System.*?server"
return matchoutput("cvs --version 2>&1", re) and not has_msys()
@check("cvs112", "cvs client/server 1.12.* (not cvsnt)")
def has_cvs112():
re = br"Concurrent Versions System \(CVS\) 1.12.*?server"
re = rb"Concurrent Versions System \(CVS\) 1.12.*?server"
return matchoutput("cvs --version 2>&1", re) and not has_msys()
@check("cvsnt", "cvsnt client/server", exe=True)
def has_cvsnt():
re = br"Concurrent Versions System \(CVSNT\) (\d+).(\d+).*\(client/server\)"
re = rb"Concurrent Versions System \(CVSNT\) (\d+).(\d+).*\(client/server\)"
return matchoutput("cvsnt --version 2>&1", re)
@check("darcs", "darcs client", exe=True)
def has_darcs():
return matchoutput("darcs --version", br"\b2\.([2-9]|\d{2})", True)
return matchoutput("darcs --version", rb"\b2\.([2-9]|\d{2})", True)
@check("mtn", "monotone client (>= 1.0)", exe=True)
def has_mtn():
return matchoutput("mtn --version", br"monotone", True) and not matchoutput(
"mtn --version", br"monotone 0\.", True
return matchoutput("mtn --version", rb"monotone", True) and not matchoutput(
"mtn --version", rb"monotone 0\.", True
)
@ -360,7 +360,7 @@ def has_lz4():
def gethgversion():
m = matchoutput("hg --version --quiet 2>&1", br"(\d+)\.(\d+)")
m = matchoutput("hg --version --quiet 2>&1", rb"(\d+)\.(\d+)")
if not m:
return (0, 0)
return (int(m.group(1)), int(m.group(2)))
@ -395,16 +395,16 @@ def has_hg06():
@check("gettext", "GNU Gettext (msgfmt)")
def has_gettext():
return matchoutput("msgfmt --version", br"GNU gettext-tools")
return matchoutput("msgfmt --version", rb"GNU gettext-tools")
@check("git", "git command line client", exe=True)
def has_git():
return matchoutput("git --version 2>&1", br"^git version")
return matchoutput("git --version 2>&1", rb"^git version")
def getgitversion():
m = matchoutput("git --version 2>&1", br"git version (\d+)\.(\d+)")
m = matchoutput("git --version 2>&1", rb"git version (\d+)\.(\d+)")
if not m:
return (0, 0)
return (int(m.group(1)), int(m.group(2)))
@ -441,12 +441,12 @@ def has_docutils():
@check("p4", "Perforce server and client", exe=True)
def has_p4():
return matchoutput("p4 -V", br"Rev\. P4/") and matchoutput("p4d -V", br"Rev\. P4D/")
return matchoutput("p4 -V", rb"Rev\. P4/") and matchoutput("p4d -V", rb"Rev\. P4D/")
@check("jq", "json processing tool", exe=True)
def has_jq():
return matchoutput("jq --help", br"Usage:\W+jq .*")
return matchoutput("jq --help", rb"Usage:\W+jq .*")
@check("symlink", "symbolic links")
@ -502,22 +502,22 @@ def has_rmcwd():
@check("tla", "GNU Arch tla client", exe=True)
def has_tla():
return matchoutput("tla --version 2>&1", br"The GNU Arch Revision")
return matchoutput("tla --version 2>&1", rb"The GNU Arch Revision")
@check("gpg", "gpg client", exe=True)
def has_gpg():
return matchoutput("gpg --version 2>&1", br"GnuPG")
return matchoutput("gpg --version 2>&1", rb"GnuPG")
@check("gpg2", "gpg client v2")
def has_gpg2():
return matchoutput("gpg --version 2>&1", br"GnuPG[^0-9]+2\.")
return matchoutput("gpg --version 2>&1", rb"GnuPG[^0-9]+2\.")
@check("gpg21", "gpg client v2.1+")
def has_gpg21():
return matchoutput("gpg --version 2>&1", br"GnuPG[^0-9]+2\.(?!0)")
return matchoutput("gpg --version 2>&1", rb"GnuPG[^0-9]+2\.(?!0)")
@check("unix-permissions", "unix-style permissions")
@ -553,26 +553,26 @@ def has_pyflakes():
pyflakespath = os.environ.get("HGTEST_PYFLAKES_PATH", "pyflakes")
return matchoutput(
"sh -c \"echo 'import re' 2>&1 | %s\"" % pyflakespath,
br"<stdin>:1: 're' imported but unused",
rb"<stdin>:1: 're' imported but unused",
True,
)
@check("pylint", "Pylint python linter", exe=True)
def has_pylint():
return matchoutput("pylint --help", br"Usage: pylint", True)
return matchoutput("pylint --help", rb"Usage: pylint", True)
@check("clang-format", "clang-format C code formatter", exe=True)
def has_clang_format():
return matchoutput(
"clang-format --help", br"^OVERVIEW: A tool to format C/C\+\+[^ ]+ code."
"clang-format --help", rb"^OVERVIEW: A tool to format C/C\+\+[^ ]+ code."
)
@check("jshint", "JSHint static code analysis tool", exe=True)
def has_jshint():
return matchoutput("jshint --version 2>&1", br"jshint v")
return matchoutput("jshint --version 2>&1", rb"jshint v")
@check("pygments", "Pygments source highlighting library")
@ -589,7 +589,7 @@ def has_pygments():
@check("outer-repo", "outer repo")
def has_outer_repo():
# failing for other reasons than 'no repo' imply that there is a repo
return not matchoutput("hg root 2>&1", br"abort: no repository found", True)
return not matchoutput("hg root 2>&1", rb"abort: no repository found", True)
@check("ssl", "ssl module available")
@ -681,7 +681,7 @@ def has_tic():
import curses
curses.COLOR_BLUE
return matchoutput('test -x "`which tic`"', br"")
return matchoutput('test -x "`which tic`"', rb"")
except ImportError:
return False
@ -705,10 +705,10 @@ def has_osx():
def has_osxpackaging():
try:
return (
matchoutput("pkgbuild", br"Usage: pkgbuild ", ignorestatus=1)
and matchoutput("productbuild", br"Usage: productbuild ", ignorestatus=1)
and matchoutput("lsbom", br"Usage: lsbom", ignorestatus=1)
and matchoutput("xar --help", br"Usage: xar", ignorestatus=1)
matchoutput("pkgbuild", rb"Usage: pkgbuild ", ignorestatus=1)
and matchoutput("productbuild", rb"Usage: productbuild ", ignorestatus=1)
and matchoutput("lsbom", rb"Usage: lsbom", ignorestatus=1)
and matchoutput("xar --help", rb"Usage: xar", ignorestatus=1)
)
except ImportError:
return False
@ -722,7 +722,7 @@ def has_linuxormacos():
@check("docker", "docker support")
def has_docker():
pat = br"A self-sufficient runtime for"
pat = rb"A self-sufficient runtime for"
if matchoutput("docker --help", pat):
if "linux" not in sys.platform:
# TODO: in theory we should be able to test docker-based
@ -742,13 +742,13 @@ def has_docker():
def has_debhelper():
# Some versions of dpkg say `dpkg', some say 'dpkg' (` vs ' on the first
# quote), so just accept anything in that spot.
dpkg = matchoutput("dpkg --version", br"Debian .dpkg' package management program")
dh = matchoutput("dh --help", br"dh is a part of debhelper.", ignorestatus=True)
dh_py2 = matchoutput("dh_python2 --help", br"other supported Python versions")
dpkg = matchoutput("dpkg --version", rb"Debian .dpkg' package management program")
dh = matchoutput("dh --help", rb"dh is a part of debhelper.", ignorestatus=True)
dh_py2 = matchoutput("dh_python2 --help", rb"other supported Python versions")
# debuild comes from the 'devscripts' package, though you might want
# the 'build-debs' package instead, which has a dependency on devscripts.
debuild = matchoutput(
"debuild --help", br"to run debian/rules with given parameter"
"debuild --help", rb"to run debian/rules with given parameter"
)
return dpkg and dh and dh_py2 and debuild
@ -783,7 +783,7 @@ def has_hypothesis():
@check("unziplinks", "unzip(1) understands and extracts symlinks")
def unzip_understands_symlinks():
return matchoutput("unzip --help", br"Info-ZIP")
return matchoutput("unzip --help", rb"Info-ZIP")
@check("zstd", "zstd Python module available")
@ -835,7 +835,7 @@ def has_fuzzywuzzy():
def has_eden():
return matchoutput(
"hg --debug --config extensions.eden= --version 2>&1",
re.compile(br"^\s*eden\s+(in|ex)ternal\s*$", re.MULTILINE),
re.compile(rb"^\s*eden\s+(in|ex)ternal\s*$", re.MULTILINE),
)

View File

@ -24,8 +24,8 @@ source_suffix = [".rst", ".md"]
master_doc = "index"
# General information about the project.
project = u"FB Mercurial Documentation"
copyright = u"2018 Facebook, Inc"
project = "FB Mercurial Documentation"
copyright = "2018 Facebook, Inc"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
@ -165,8 +165,8 @@ latex_documents = [
(
"index",
"FBMercurialDeveloperGuide.tex",
u"FB Mercurial Developer Guide Documentation",
u"Facebook Source Control Team",
"FB Mercurial Developer Guide Documentation",
"Facebook Source Control Team",
"manual",
)
]

View File

@ -6,74 +6,74 @@ import os
substitutions = [
# list of possible compressions
(br"(zstd,)?zlib,none,bzip2", br"$USUAL_COMPRESSIONS$"),
(rb"(zstd,)?zlib,none,bzip2", rb"$USUAL_COMPRESSIONS$"),
# capabilities sent through http
(
br"bundlecaps=HG20%2Cbundle2%3DHG20%250A"
br"bookmarks%250A"
br"changegroup%253D01%252C02%250A"
br"digests%253Dmd5%252Csha1%252Csha512%250A"
br"error%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250A"
br"listkeys%250A"
br"phases%253Dheads%250A"
br"pushkey%250A"
br"remote-changegroup%253Dhttp%252Chttps",
rb"bundlecaps=HG20%2Cbundle2%3DHG20%250A"
rb"bookmarks%250A"
rb"changegroup%253D01%252C02%250A"
rb"digests%253Dmd5%252Csha1%252Csha512%250A"
rb"error%253Dabort%252Cunsupportedcontent%252Cpushraced%252Cpushkey%250A"
rb"listkeys%250A"
rb"phases%253Dheads%250A"
rb"pushkey%250A"
rb"remote-changegroup%253Dhttp%252Chttps",
# (the replacement patterns)
br"$USUAL_BUNDLE_CAPS$",
rb"$USUAL_BUNDLE_CAPS$",
),
# bundle2 capabilities sent through ssh
(
br"bundle2=HG20%0A"
br"bookmarks%0A"
br"changegroup%3D01%2C02%0A"
br"digests%3Dmd5%2Csha1%2Csha512%0A"
br"error%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0A"
br"listkeys%0A"
br"phases%3Dheads%0A"
br"pushkey%0A"
br"remote-changegroup%3Dhttp%2Chttps",
rb"bundle2=HG20%0A"
rb"bookmarks%0A"
rb"changegroup%3D01%2C02%0A"
rb"digests%3Dmd5%2Csha1%2Csha512%0A"
rb"error%3Dabort%2Cunsupportedcontent%2Cpushraced%2Cpushkey%0A"
rb"listkeys%0A"
rb"phases%3Dheads%0A"
rb"pushkey%0A"
rb"remote-changegroup%3Dhttp%2Chttps",
# (replacement patterns)
br"$USUAL_BUNDLE2_CAPS$",
rb"$USUAL_BUNDLE2_CAPS$",
),
# HTTP log dates
(br' - - \[\d\d/.../2\d\d\d \d\d:\d\d:\d\d] "GET', br' - - [$LOGDATE$] "GET'),
(rb' - - \[\d\d/.../2\d\d\d \d\d:\d\d:\d\d] "GET', rb' - - [$LOGDATE$] "GET'),
# Windows has an extra '/' in the following lines that get globbed away:
# pushing to file:/*/$TESTTMP/r2 (glob)
# comparing with file:/*/$TESTTMP/r2 (glob)
# sub/maybelarge.dat: largefile 34..9c not available from
# file:/*/$TESTTMP/largefiles-repo (glob)
(
br"(.*file:/)/?(/\$TESTTMP.*)",
rb"(.*file:/)/?(/\$TESTTMP.*)",
lambda m: m.group(1) + b"*" + m.group(2) + b" (glob)",
),
]
# Various platform error strings, keyed on a common replacement string
_errors = {
br"$ENOENT$": (
rb"$ENOENT$": (
# strerror()
br"No such file or directory",
rb"No such file or directory",
# FormatMessage(ERROR_FILE_NOT_FOUND)
br"The system cannot find the file specified",
rb"The system cannot find the file specified",
),
br"$ENOTDIR$": (
rb"$ENOTDIR$": (
# strerror()
br"Not a directory",
rb"Not a directory",
# FormatMessage(ERROR_PATH_NOT_FOUND)
br"The system cannot find the path specified",
rb"The system cannot find the path specified",
),
br"$ECONNRESET$": (
rb"$ECONNRESET$": (
# strerror()
br"Connection reset by peer",
rb"Connection reset by peer",
# FormatMessage(WSAECONNRESET)
br"An existing connection was forcibly closed by the remote host",
rb"An existing connection was forcibly closed by the remote host",
),
br"$EADDRINUSE$": (
rb"$EADDRINUSE$": (
# strerror()
br"Address already in use",
rb"Address already in use",
# FormatMessage(WSAEADDRINUSE)
br"Only one usage of each socket address"
br" \(protocol/network address/port\) is normally permitted",
rb"Only one usage of each socket address"
rb" \(protocol/network address/port\) is normally permitted",
),
}
@ -85,29 +85,29 @@ for replace, msgs in _errors.items():
_winpathfixes = [
# cloning subrepo s\ss from $TESTTMP/t/s/ss
# cloning subrepo foo\bar from http://localhost:$HGPORT/foo/bar
br"(?m)^cloning subrepo \S+\\.*",
rb"(?m)^cloning subrepo \S+\\.*",
# pulling from $TESTTMP\issue1852a
br"(?m)^pulling from \$TESTTMP\\.*",
rb"(?m)^pulling from \$TESTTMP\\.*",
# pushing to $TESTTMP\a
br"(?m)^pushing to \$TESTTMP\\.*",
rb"(?m)^pushing to \$TESTTMP\\.*",
# pushing subrepo s\ss to $TESTTMP/t/s/ss
br"(?m)^pushing subrepo \S+\\\S+ to.*",
rb"(?m)^pushing subrepo \S+\\\S+ to.*",
# moving d1\d11\a1 to d3/d11/a1
br"(?m)^moving \S+\\.*",
rb"(?m)^moving \S+\\.*",
# d1\a: not recording move - dummy does not exist
br"\S+\\\S+: not recording move .+",
rb"\S+\\\S+: not recording move .+",
# reverting s\a
br"(?m)^reverting (?!subrepo ).*\\.*",
rb"(?m)^reverting (?!subrepo ).*\\.*",
# no changes made to subrepo s\ss since last push to ../tcc/s/ss
br"(?m)^no changes made to subrepo \S+\\\S+ since.*",
rb"(?m)^no changes made to subrepo \S+\\\S+ since.*",
# changeset 5:9cc5aa7204f0: stuff/maybelarge.dat references missing
# $TESTTMP\largefiles-repo-hg\.hg\largefiles\76..38
br"(?m)^changeset .* references (corrupted|missing) \$TESTTMP\\.*",
rb"(?m)^changeset .* references (corrupted|missing) \$TESTTMP\\.*",
# stuff/maybelarge.dat: largefile 76..38 not available from
# file:/*/$TESTTMP\largefiles-repo (glob)
br".*: largefile \S+ not available from file:/\*/.+",
rb".*: largefile \S+ not available from file:/\*/.+",
# hgrc parse error (double escaped)
br"(?m)^hg: parse error: \".*",
rb"(?m)^hg: parse error: \".*",
]
if os.name == "nt":

View File

@ -24,7 +24,6 @@ if os.environ.get("HGIPV6", "0") == "1":
class simplehttpserver(httpserver.httpserver):
address_family = socket.AF_INET6
else:
simplehttpserver = httpserver.httpserver

View File

@ -135,7 +135,7 @@ def matchoutput(cmd, regexp, ignorestatus=False):
@check("baz", "GNU Arch baz client")
def has_baz():
return matchoutput("baz --version 2>&1", br"baz Bazaar version")
return matchoutput("baz --version 2>&1", rb"baz Bazaar version")
@check("bzr", "Canonical's Bazaar client")
@ -193,31 +193,31 @@ def has_common_zlib():
@check("cvs", "cvs client/server")
def has_cvs():
re = br"Concurrent Versions System.*?server"
re = rb"Concurrent Versions System.*?server"
return matchoutput("cvs --version 2>&1", re) and not has_msys()
@check("cvs112", "cvs client/server 1.12.* (not cvsnt)")
def has_cvs112():
re = br"Concurrent Versions System \(CVS\) 1.12.*?server"
re = rb"Concurrent Versions System \(CVS\) 1.12.*?server"
return matchoutput("cvs --version 2>&1", re) and not has_msys()
@check("cvsnt", "cvsnt client/server")
def has_cvsnt():
re = br"Concurrent Versions System \(CVSNT\) (\d+).(\d+).*\(client/server\)"
re = rb"Concurrent Versions System \(CVSNT\) (\d+).(\d+).*\(client/server\)"
return matchoutput("cvsnt --version 2>&1", re)
@check("darcs", "darcs client")
def has_darcs():
return matchoutput("darcs --version", br"\b2\.([2-9]|\d{2})", True)
return matchoutput("darcs --version", rb"\b2\.([2-9]|\d{2})", True)
@check("mtn", "monotone client (>= 1.0)")
def has_mtn():
return matchoutput("mtn --version", br"monotone", True) and not matchoutput(
"mtn --version", br"monotone 0\.", True
return matchoutput("mtn --version", rb"monotone", True) and not matchoutput(
"mtn --version", rb"monotone 0\.", True
)
@ -329,7 +329,7 @@ def has_lz4():
def gethgversion():
m = matchoutput("hg --version --quiet 2>&1", br"(\d+)\.(\d+)")
m = matchoutput("hg --version --quiet 2>&1", rb"(\d+)\.(\d+)")
if not m:
return (0, 0)
return (int(m.group(1)), int(m.group(2)))
@ -364,16 +364,16 @@ def has_hg06():
@check("gettext", "GNU Gettext (msgfmt)")
def has_gettext():
return matchoutput("msgfmt --version", br"GNU gettext-tools")
return matchoutput("msgfmt --version", rb"GNU gettext-tools")
@check("git", "git command line client")
def has_git():
return matchoutput("git --version 2>&1", br"^git version")
return matchoutput("git --version 2>&1", rb"^git version")
def getgitversion():
m = matchoutput("git --version 2>&1", br"git version (\d+)\.(\d+)")
m = matchoutput("git --version 2>&1", rb"git version (\d+)\.(\d+)")
if not m:
return (0, 0)
return (int(m.group(1)), int(m.group(2)))
@ -410,12 +410,12 @@ def has_docutils():
@check("p4", "Perforce server and client")
def has_p4():
return matchoutput("p4 -V", br"Rev\. P4/") and matchoutput("p4d -V", br"Rev\. P4D/")
return matchoutput("p4 -V", rb"Rev\. P4/") and matchoutput("p4d -V", rb"Rev\. P4D/")
@check("jq", "json processing tool")
def has_jq():
return matchoutput("jq --help", br"Usage:\W+jq .*")
return matchoutput("jq --help", rb"Usage:\W+jq .*")
@check("symlink", "symbolic links")
@ -482,22 +482,22 @@ def has_rmcwd():
@check("tla", "GNU Arch tla client")
def has_tla():
return matchoutput("tla --version 2>&1", br"The GNU Arch Revision")
return matchoutput("tla --version 2>&1", rb"The GNU Arch Revision")
@check("gpg", "gpg client")
def has_gpg():
return matchoutput("gpg --version 2>&1", br"GnuPG")
return matchoutput("gpg --version 2>&1", rb"GnuPG")
@check("gpg2", "gpg client v2")
def has_gpg2():
return matchoutput("gpg --version 2>&1", br"GnuPG[^0-9]+2\.")
return matchoutput("gpg --version 2>&1", rb"GnuPG[^0-9]+2\.")
@check("gpg21", "gpg client v2.1+")
def has_gpg21():
return matchoutput("gpg --version 2>&1", br"GnuPG[^0-9]+2\.(?!0)")
return matchoutput("gpg --version 2>&1", rb"GnuPG[^0-9]+2\.(?!0)")
@check("unix-permissions", "unix-style permissions")
@ -533,26 +533,26 @@ def has_pyflakes():
pyflakespath = os.environ.get("HGTEST_PYFLAKES_PATH", "pyflakes")
return matchoutput(
"sh -c \"echo 'import re' 2>&1 | %s\"" % pyflakespath,
br"<stdin>:1: 're' imported but unused",
rb"<stdin>:1: 're' imported but unused",
True,
)
@check("pylint", "Pylint python linter")
def has_pylint():
return matchoutput("pylint --help", br"Usage: pylint", True)
return matchoutput("pylint --help", rb"Usage: pylint", True)
@check("clang-format", "clang-format C code formatter")
def has_clang_format():
return matchoutput(
"clang-format --help", br"^OVERVIEW: A tool to format C/C\+\+[^ ]+ code."
"clang-format --help", rb"^OVERVIEW: A tool to format C/C\+\+[^ ]+ code."
)
@check("jshint", "JSHint static code analysis tool")
def has_jshint():
return matchoutput("jshint --version 2>&1", br"jshint v")
return matchoutput("jshint --version 2>&1", rb"jshint v")
@check("pygments", "Pygments source highlighting library")
@ -569,7 +569,7 @@ def has_pygments():
@check("outer-repo", "outer repo")
def has_outer_repo():
# failing for other reasons than 'no repo' imply that there is a repo
return not matchoutput("hg root 2>&1", br"abort: no repository found", True)
return not matchoutput("hg root 2>&1", rb"abort: no repository found", True)
@check("ssl", "ssl module available")
@ -661,7 +661,7 @@ def has_tic():
import curses
curses.COLOR_BLUE
return matchoutput('test -x "`which tic`"', br"")
return matchoutput('test -x "`which tic`"', rb"")
except ImportError:
return False
@ -685,10 +685,10 @@ def has_osx():
def has_osxpackaging():
try:
return (
matchoutput("pkgbuild", br"Usage: pkgbuild ", ignorestatus=1)
and matchoutput("productbuild", br"Usage: productbuild ", ignorestatus=1)
and matchoutput("lsbom", br"Usage: lsbom", ignorestatus=1)
and matchoutput("xar --help", br"Usage: xar", ignorestatus=1)
matchoutput("pkgbuild", rb"Usage: pkgbuild ", ignorestatus=1)
and matchoutput("productbuild", rb"Usage: productbuild ", ignorestatus=1)
and matchoutput("lsbom", rb"Usage: lsbom", ignorestatus=1)
and matchoutput("xar --help", rb"Usage: xar", ignorestatus=1)
)
except ImportError:
return False
@ -702,7 +702,7 @@ def has_linuxormacos():
@check("docker", "docker support")
def has_docker():
pat = br"A self-sufficient runtime for"
pat = rb"A self-sufficient runtime for"
if matchoutput("docker --help", pat):
if "linux" not in sys.platform:
# TODO: in theory we should be able to test docker-based
@ -722,13 +722,13 @@ def has_docker():
def has_debhelper():
# Some versions of dpkg say `dpkg', some say 'dpkg' (` vs ' on the first
# quote), so just accept anything in that spot.
dpkg = matchoutput("dpkg --version", br"Debian .dpkg' package management program")
dh = matchoutput("dh --help", br"dh is a part of debhelper.", ignorestatus=True)
dh_py2 = matchoutput("dh_python2 --help", br"other supported Python versions")
dpkg = matchoutput("dpkg --version", rb"Debian .dpkg' package management program")
dh = matchoutput("dh --help", rb"dh is a part of debhelper.", ignorestatus=True)
dh_py2 = matchoutput("dh_python2 --help", rb"other supported Python versions")
# debuild comes from the 'devscripts' package, though you might want
# the 'build-debs' package instead, which has a dependency on devscripts.
debuild = matchoutput(
"debuild --help", br"to run debian/rules with given parameter"
"debuild --help", rb"to run debian/rules with given parameter"
)
return dpkg and dh and dh_py2 and debuild
@ -762,7 +762,7 @@ def has_hypothesis():
@check("unziplinks", "unzip(1) understands and extracts symlinks")
def unzip_understands_symlinks():
return matchoutput("unzip --help", br"Info-ZIP")
return matchoutput("unzip --help", rb"Info-ZIP")
@check("zstd", "zstd Python module available")
@ -812,7 +812,7 @@ def has_fuzzywuzzy():
def has_eden():
return matchoutput(
"hg --debug --config extensions.eden= --version 2>&1",
re.compile(br"^\s*eden\s+(in|ex)ternal\s*$", re.MULTILINE),
re.compile(rb"^\s*eden\s+(in|ex)ternal\s*$", re.MULTILINE),
)

View File

@ -85,7 +85,6 @@ if os.name == "nt":
raise
_check(ctypes.windll.kernel32.CloseHandle(handle))
else:
def kill(pid, logfn, tryhard=True):

View File

@ -26,9 +26,9 @@ _faketime = faketime()
time.time = _faketime.time
unicodeloopitems = [
pycompat.ensurestr(u"\u3042\u3044"), # 2 x 2 = 4 columns
pycompat.ensurestr(u"\u3042\u3044\u3046"), # 2 x 3 = 6 columns
pycompat.ensurestr(u"\u3042\u3044\u3046\u3048"), # 2 x 4 = 8 columns
pycompat.ensurestr("\u3042\u3044"), # 2 x 2 = 4 columns
pycompat.ensurestr("\u3042\u3044\u3046"), # 2 x 3 = 6 columns
pycompat.ensurestr("\u3042\u3044\u3046\u3048"), # 2 x 4 = 8 columns
]
@ -50,7 +50,7 @@ def progresstest(ui, loops, total, **opts):
nested = opts.get("nested", None)
useunicode = opts.get("unicode", False)
if useunicode:
topic = pycompat.ensurestr(u"\u3042\u3044\u3046\u3048")
topic = pycompat.ensurestr("\u3042\u3044\u3046\u3048")
else:
topic = "progress test"
with progress.bar(ui, topic, "cycles", total) as prog:

View File

@ -183,7 +183,6 @@ if sys.version_info > (3, 5, 0):
return p
return p.decode("utf-8")
elif sys.version_info >= (3, 0, 0):
print(
"%s is only supported on Python 3.5+ and 2.7, not %s"
@ -860,7 +859,7 @@ def vlog(*msg):
# Bytes that break XML even in a CDATA block: control characters 0-31
# sans \t, \n and \r
CDATA_EVIL = re.compile(br"[\000-\010\013\014\016-\037]")
CDATA_EVIL = re.compile(rb"[\000-\010\013\014\016-\037]")
# Match feature conditionalized output lines in the form, capturing the feature
# list in group 2, and the preceeding line output in group 1:
@ -967,7 +966,6 @@ if os.name == "nt":
_kernel32.CloseHandle(self._hjob)
self._hjob = 0
else:
class ProcessGroup(object):
@ -1421,7 +1419,7 @@ class Test(unittest.TestCase):
def _portmap(self, i):
offset = b"" if i == 0 else b"%d" % i
return (br":%d\b" % (self._startport + i), b":$HGPORT%s" % offset)
return (rb":%d\b" % (self._startport + i), b":$HGPORT%s" % offset)
def _getreplacements(self):
"""Obtain a mapping of text replacements to apply to test output.
@ -1438,25 +1436,25 @@ class Test(unittest.TestCase):
# This hack allows us to have same outputs for ipv4 and v6 urls:
# [ipv6]:port
(
br"([^0-9:])\[%s\]:[0-9]+" % re.escape(_bytespath(self._localip())),
br"\1$LOCALIP:$LOCAL_PORT",
rb"([^0-9:])\[%s\]:[0-9]+" % re.escape(_bytespath(self._localip())),
rb"\1$LOCALIP:$LOCAL_PORT",
),
# [ipv6]
(
br"([^0-9:])\[%s\]" % re.escape(_bytespath(self._localip())),
br"\1$LOCALIP",
rb"([^0-9:])\[%s\]" % re.escape(_bytespath(self._localip())),
rb"\1$LOCALIP",
),
# ipv4:port
(
br"([^0-9])%s:[0-9]+" % re.escape(_bytespath(self._localip())),
br"\1$LOCALIP:$LOCAL_PORT",
rb"([^0-9])%s:[0-9]+" % re.escape(_bytespath(self._localip())),
rb"\1$LOCALIP:$LOCAL_PORT",
),
# [ipv4]
(br"([^0-9])%s" % re.escape(_bytespath(self._localip())), br"\1$LOCALIP"),
(br"\bHG_TXNID=TXN:[a-f0-9]{40}\b", br"HG_TXNID=TXN:$ID$"),
(rb"([^0-9])%s" % re.escape(_bytespath(self._localip())), rb"\1$LOCALIP"),
(rb"\bHG_TXNID=TXN:[a-f0-9]{40}\b", rb"HG_TXNID=TXN:$ID$"),
]
r.append((_bytespath(self._escapepath(self._testtmp)), b"$TESTTMP"))
r.append((br"eager:///", br"eager://"))
r.append((rb"eager:///", rb"eager://"))
replacementfile = os.path.join(self._testdir, "common-pattern.py")
@ -1842,9 +1840,9 @@ class TTest(Test):
SKIPPED_PREFIX = b"skipped: "
FAILED_PREFIX = b"hghave check failed: "
ESCAPESUB = re.compile(br"[\x00-\x08\x0b-\x1f\\\x7f-\xff]").sub
ESCAPEMAP = dict((bchr(i), br"\x%02x" % i) for i in range(256))
ESCAPEMAP.update({b"\\": b"\\\\", b"\r": br"\r"})
ESCAPESUB = re.compile(rb"[\x00-\x08\x0b-\x1f\\\x7f-\xff]").sub
ESCAPEMAP = dict((bchr(i), rb"\x%02x" % i) for i in range(256))
ESCAPEMAP.update({b"\\": b"\\\\", b"\r": rb"\r"})
def __init__(self, path, *args, **kwds):
# accept an extra "case" parameter
@ -2142,7 +2140,7 @@ class TTest(Test):
if not lout.endswith(b"\n"):
if b"\x1b" in lout or b"\r" in lout:
lout = (
lout.replace(b"\x1b", br"\x1b").replace(b"\r", br"\r")
lout.replace(b"\x1b", rb"\x1b").replace(b"\r", rb"\r")
+ b" (no-eol) (esc)\n"
)
else:
@ -2222,8 +2220,8 @@ class TTest(Test):
el = b"(?:" + el + b")"
# use \Z to ensure that the regex matches to the end of the string
if os.name == "nt":
return re.match(el + br"\r?\n\Z", l)
return re.match(el + br"\n\Z", l)
return re.match(el + rb"\r?\n\Z", l)
return re.match(el + rb"\n\Z", l)
except re.error:
# el is an invalid regex
return False
@ -3109,9 +3107,9 @@ class TextTestRunner(unittest.TextTestRunner):
data = pread(bisectcmd + ["--command", rtc])
m = re.search(
(
br"\nThe first (?P<goodbad>bad|good) revision "
br"is:\ncommit: +(?P<node>[a-f0-9]+)\n.*\n"
br"summary: +(?P<summary>[^\n]+)\n"
rb"\nThe first (?P<goodbad>bad|good) revision "
rb"is:\ncommit: +(?P<node>[a-f0-9]+)\n.*\n"
rb"summary: +(?P<summary>[^\n]+)\n"
),
data,
(re.MULTILINE | re.DOTALL),

View File

@ -48,7 +48,7 @@ def run_stress_test(n, binary, kill_median, kill_half_width):
proc.kill()
if not os.path.exists(filename):
print(
u"ALARM! Iteration %i failed. File not found. Slept: %fs"
"ALARM! Iteration %i failed. File not found. Slept: %fs"
% (p, tosleep)
)
finally:

View File

@ -18,7 +18,7 @@ from testutil.dott import testtmp # noqa: F401
writefile(
"a.rc",
br"""[a]
rb"""[a]
x=1
y=2
%include b.rc
@ -27,7 +27,7 @@ y=2
writefile(
"b.rc",
br"""%include b.rc
rb"""%include b.rc
[b]
z = 3
[a]
@ -36,7 +36,7 @@ z = 3
""",
)
writefile("broken.rc", br"%not-implemented")
writefile("broken.rc", rb"%not-implemented")
def createConfig():

View File

@ -223,7 +223,7 @@ color =
>> "$HGRCPATH"
)
sh % "hg export --color always --nodates tip" == br"""
sh % "hg export --color always --nodates tip" == rb"""
# HG changeset patch
# User test
# Date 0 0

View File

@ -23,7 +23,7 @@ from edenscm.mercurial.node import nullid
from hghave import require
SMALLFANOUTCUTOFF = int(2 ** 16 / 8)
SMALLFANOUTCUTOFF = int(2**16 / 8)
LARGEFANOUTPREFIX = 2
try:

View File

@ -12,7 +12,6 @@ if sys.version_info[0] >= 3:
def escape(s):
return str(s.encode("utf-8"))[2:-1]
else:
def escape(s):

View File

@ -206,7 +206,7 @@ def genbits(n):
That is to say, given any x, y where both x, and y are in range(2 ** n),
there is an x followed immediately by y in the generated sequence.
"""
m = 2 ** n
m = 2**n
# Gray Code. See https://en.wikipedia.org/wiki/Gray_code
gray = lambda x: x ^ (x >> 1)

View File

@ -43,7 +43,7 @@ def lm(expected, output):
"""
assert expected.endswith(b"\n") and output.endswith(b"\n"), "missing newline"
assert not re.search(
br"[^ \w\\/\r\n()*?]", expected + output
rb"[^ \w\\/\r\n()*?]", expected + output
), b"single backslash or unknown char"
test = run_tests.TTest("test-run-test.t", ".", ".")
match = test.linematch(expected, output)

View File

@ -121,7 +121,7 @@ check(_verifycert(cert("a.*.com"), "a..com"), "certificate is for a.*.com")
check(_verifycert(cert("a.*.com"), "a.com"), "certificate is for a.*.com")
# wildcard doesn't match IDNA prefix 'xn--'
idna = u"püthon.python.org".encode("idna").decode("ascii")
idna = "püthon.python.org".encode("idna").decode("ascii")
check(_verifycert(cert(idna), idna), None)
check(_verifycert(cert("x*.python.org"), idna), "certificate is for x*.python.org")
check(
@ -130,21 +130,21 @@ check(
# wildcard in first fragment and IDNA A-labels in sequent fragments
# are supported.
idna = u"www*.pythön.org".encode("idna").decode("ascii")
check(_verifycert(cert(idna), u"www.pythön.org".encode("idna").decode("ascii")), None)
check(_verifycert(cert(idna), u"www1.pythön.org".encode("idna").decode("ascii")), None)
idna = "www*.pythön.org".encode("idna").decode("ascii")
check(_verifycert(cert(idna), "www.pythön.org".encode("idna").decode("ascii")), None)
check(_verifycert(cert(idna), "www1.pythön.org".encode("idna").decode("ascii")), None)
check(
_verifycert(cert(idna), u"ftp.pythön.org".encode("idna").decode("ascii")),
_verifycert(cert(idna), "ftp.pythön.org".encode("idna").decode("ascii")),
"certificate is for www*.xn--pythn-mua.org",
)
check(
_verifycert(cert(idna), u"pythön.org".encode("idna").decode("ascii")),
_verifycert(cert(idna), "pythön.org".encode("idna").decode("ascii")),
"certificate is for www*.xn--pythn-mua.org",
)
c = {
"notAfter": "Jun 26 21:41:46 2011 GMT",
"subject": (((u"commonName", u"linuxfrz.org"),),),
"subject": ((("commonName", "linuxfrz.org"),),),
"subjectAltName": (
("DNS", "linuxfr.org"),
("DNS", "linuxfr.com"),
@ -162,11 +162,11 @@ check(_verifycert(c, "linuxfrz.org"), "certificate is for linuxfr.org, linuxfr.c
c = {
"notAfter": "Dec 18 23:59:59 2011 GMT",
"subject": (
((u"countryName", u"US"),),
((u"stateOrProvinceName", u"California"),),
((u"localityName", u"Mountain View"),),
((u"organizationName", u"Google Inc"),),
((u"commonName", u"mail.google.com"),),
(("countryName", "US"),),
(("stateOrProvinceName", "California"),),
(("localityName", "Mountain View"),),
(("organizationName", "Google Inc"),),
(("commonName", "mail.google.com"),),
),
}
check(_verifycert(c, "mail.google.com"), None)
@ -179,10 +179,10 @@ check(_verifycert(c, "California"), "certificate is for mail.google.com")
c = {
"notAfter": "Dec 18 23:59:59 2011 GMT",
"subject": (
((u"countryName", u"US"),),
((u"stateOrProvinceName", u"California"),),
((u"localityName", u"Mountain View"),),
((u"organizationName", u"Google Inc"),),
(("countryName", "US"),),
(("stateOrProvinceName", "California"),),
(("localityName", "Mountain View"),),
(("organizationName", "Google Inc"),),
),
}
check(
@ -194,10 +194,10 @@ check(
c = {
"notAfter": "Dec 18 23:59:59 2099 GMT",
"subject": (
((u"countryName", u"US"),),
((u"stateOrProvinceName", u"California"),),
((u"localityName", u"Mountain View"),),
((u"commonName", u"mail.google.com"),),
(("countryName", "US"),),
(("stateOrProvinceName", "California"),),
(("localityName", "Mountain View"),),
(("commonName", "mail.google.com"),),
),
"subjectAltName": (("othername", "blabla"),),
}
@ -207,10 +207,10 @@ check(_verifycert(c, "mail.google.com"), None)
c = {
"notAfter": "Dec 18 23:59:59 2099 GMT",
"subject": (
((u"countryName", u"US"),),
((u"stateOrProvinceName", u"California"),),
((u"localityName", u"Mountain View"),),
((u"organizationName", u"Google Inc"),),
(("countryName", "US"),),
(("stateOrProvinceName", "California"),),
(("localityName", "Mountain View"),),
(("organizationName", "Google Inc"),),
),
"subjectAltName": (("othername", "blabla"),),
}
@ -224,13 +224,13 @@ check(_verifycert({}, "example.com"), "no certificate received")
# avoid denials of service by refusing more than one
# wildcard per fragment.
check(_verifycert({"subject": (((u"commonName", u"a*b.com"),),)}, "axxb.com"), None)
check(_verifycert({"subject": ((("commonName", "a*b.com"),),)}, "axxb.com"), None)
check(
_verifycert({"subject": (((u"commonName", u"a*b.co*"),),)}, "axxb.com"),
_verifycert({"subject": ((("commonName", "a*b.co*"),),)}, "axxb.com"),
"certificate is for a*b.co*",
)
check(
_verifycert({"subject": (((u"commonName", u"a*b*.com"),),)}, "axxbxxc.com"),
_verifycert({"subject": ((("commonName", "a*b*.com"),),)}, "axxbxxc.com"),
"too many wildcards in certificate DNS name: a*b*.com",
)

View File

@ -131,19 +131,19 @@ def _fix():
# This is not super efficient. But it's easy to write.
for i in range(startline, endline + 1):
line = lines[i - 1]
newline = u""
newline = ""
if i == startline:
if isinstance(code, bytes):
code = code.decode("utf-8")
newline += u"%s%s" % (line[:startcol], code)
newline += "%s%s" % (line[:startcol], code)
if i == endline:
newline += line[endcol:]
lines[i - 1] = newline
lines = u"".join(lines).splitlines(True)
lines = "".join(lines).splitlines(True)
with open(path, "wb") as f:
f.write(u"".join(lines).encode("utf-8"))
f.write("".join(lines).encode("utf-8"))
def _removeindent(text):