mirror of
https://github.com/facebook/sapling.git
synced 2024-10-11 17:27:53 +03:00
ac7e07dbdf
Summary: At a recent team meeting we've decided to remove the command prefix matching behavior, as it can be really annoying for the Rust parser (since it needs to know all the names, but it wants to avoid spinning up Python). It's even more annoying for subcommand support. FWIW git does not have prefix matching. This diff adds various aliases to "roughly" keep the command prefix matching behavior. The list of aliases are obtained by this script in `hg dbsh`: def unique(prefix, names): m = __import__('edenscm.mercurial').mercurial try: return m.cmdutil.findcmd(prefix, m.commands.table, False)[0][0] in names except: return False nameslist=sorted([i.replace('^','') for i in m.commands.table]) aliases = {} for names in nameslist: names = names.split('|') for name in names: if name.startswith('debug'): continue for prefix in [name[:i] for i in xrange(1, len(name))]: if unique(prefix, names): aliases.setdefault(name, []).append(prefix) Debug commands, and commands that are rarely used are not changed, including: 'backfillmanifestrevlog': ['backfillm', 'backfillma', 'backfillman', 'backfillmani', 'backfillmanif', 'backfillmanife', 'backfillmanifes', 'backfillmanifest', 'backfillmanifestr', 'backfillmanifestre', 'backfillmanifestrev', 'backfillmanifestrevl', 'backfillmanifestrevlo'], 'backfilltree': ['backfillt', 'backfilltr', 'backfilltre']} 'blackbox': ['blac', 'black', 'blackb', 'blackbo'], 'cachemanifest': ['cac', 'cach', 'cache', 'cachem', 'cachema', 'cacheman', 'cachemani', 'cachemanif', 'cachemanife', 'cachemanifes'], 'chistedit': ['chi', 'chis', 'chist', 'chiste', 'chisted', 'chistedi'], 'clone': ['clon'], 'cloud': ['clou'], 'convert': ['conv', 'conve', 'conver'], 'copy': ['cop'], 'fastannotate': ['fa', 'fas', 'fast', 'fasta', 'fastan', 'fastann', 'fastanno', 'fastannot', 'fastannota', 'fastannotat'], 'fold': ['fol'], 'githelp': ['gi', 'git', 'gith', 'githe', 'githel'], 'histgrep': ['histg', 'histgr', 'histgre'], 'incoming': ['in', 'inc', 'inco', 'incom', 'incomi', 'incomin'], 'isbackedup': ['is', 'isb', 'isba', 'isbac', 'isback', 'isbacke', 'isbacked', 'isbackedu'], 'manifest': ['ma', 'man', 'mani', 'manif', 'manife', 'manifes'], 'outgoing': ['o', 'ou', 'out', 'outg', 'outgo', 'outgoi', 'outgoin'], 'prefetch': ['pref', 'prefe', 'prefet', 'prefetc'], 'prune': ['pru', 'prun'], 'pushbackup': ['pushb', 'pushba', 'pushbac', 'pushback', 'pushbacku'], 'rage': ['ra', 'rag'], 'record': ['recor'], 'recover': ['recov', 'recove'], 'redo': ['red'], 'repack': ['rep', 'repa', 'repac'], 'reset': ['rese'], 'rollback': ['rol', 'roll', 'rollb', 'rollba', 'rollbac'], 'root': ['roo'], 'serve': ['se', 'ser', 'serv'], 'share': ['sha', 'shar'], 'sparse': ['spa', 'spar', 'spars'], 'svn': ['sv'], 'undo': ['und'], 'unshare': ['unsha', 'unshar'], 'verifyremotefilelog': ['verifyr', 'verifyre', 'verifyrem', 'verifyremo', 'verifyremot', 'verifyremote', 'verifyremotef', 'verifyremotefi', 'verifyremotefil', 'verifyremotefile', 'verifyremotefilel', 'verifyremotefilelo'], Reviewed By: sfilipco Differential Revision: D17644676 fbshipit-source-id: f60f5e6810279b52f9a4a1e048eeb529a96bd735
200 lines
6.2 KiB
Python
200 lines
6.2 KiB
Python
# sendunbundlereplay.py - send unbundlereplay wireproto command
|
|
#
|
|
# Copyright 2019-present Facebook, Inc.
|
|
#
|
|
# This software may be used and distributed according to the terms of the
|
|
# GNU General Public License version 2 or any later version.
|
|
from __future__ import absolute_import
|
|
|
|
import contextlib
|
|
import datetime
|
|
import os
|
|
import sys
|
|
|
|
from edenscm.mercurial import error, hg, replay, util
|
|
from edenscm.mercurial.commands import command
|
|
from edenscm.mercurial.i18n import _
|
|
|
|
|
|
def getcommitdates(ui, fname=None):
|
|
if fname:
|
|
with open(fname, "r") as tf:
|
|
timestamps = tf.readlines()
|
|
else:
|
|
timestamps = ui.fin
|
|
return dict(map(lambda s: s.split("="), timestamps))
|
|
|
|
|
|
def getstream(fname):
|
|
with open(fname, "rb") as f:
|
|
return util.chunkbuffer([f.read()])
|
|
|
|
|
|
@util.timed(annotation="creating a peer took")
|
|
def getremote(ui, path):
|
|
return hg.peer(ui, {}, path)
|
|
|
|
|
|
@util.timed(annotation="single wireproto command took")
|
|
def runreplay(ui, remote, stream, commitdates, rebasedhead, ontobook):
|
|
returncode = 0
|
|
try:
|
|
reply = remote.unbundlereplay(
|
|
stream,
|
|
["force"],
|
|
remote.url(),
|
|
replay.ReplayData(commitdates, rebasedhead, ontobook),
|
|
ui.configbool("sendunbundlereplay", "respondlightly", True),
|
|
)
|
|
except Exception:
|
|
returncode = 255
|
|
finally:
|
|
if returncode != 0:
|
|
return returncode
|
|
|
|
for part in reply.iterparts():
|
|
part.read()
|
|
if part.type.startswith("error:"):
|
|
returncode = 1
|
|
ui.warn(_("replay failed: %s\n") % part.type)
|
|
if "message" in part.params:
|
|
ui.warn(_("part message: %s\n") % (part.params["message"]))
|
|
return returncode
|
|
|
|
|
|
def writereport(reportsfile, msg):
|
|
reportsfile.write(msg)
|
|
reportsfile.flush()
|
|
os.fsync(reportsfile.fileno())
|
|
|
|
|
|
@contextlib.contextmanager
|
|
def capturelogs(ui, remote, logfile):
|
|
if logfile is None:
|
|
yield
|
|
else:
|
|
uis = [remote.ui, ui]
|
|
for u in uis:
|
|
u.pushbuffer(error=True, subproc=True)
|
|
|
|
try:
|
|
yield
|
|
finally:
|
|
output = "".join([u.popbuffer() for u in uis])
|
|
ui.write_err(output)
|
|
with open(logfile, "w") as f:
|
|
f.write(output)
|
|
|
|
|
|
@command(
|
|
"sendunbundlereplaybatch",
|
|
[
|
|
("", "path", "", _("hg server remotepath (ssh)"), ""),
|
|
("", "reports", "", _("a file for unbundereplay progress reports"), ""),
|
|
],
|
|
_("[OPTION]..."),
|
|
norepo=True,
|
|
)
|
|
def sendunbundlereplaybatch(ui, **opts):
|
|
"""Send a batch of unbundlereplay wireproto commands to a given server
|
|
|
|
This exists to amortize the costs of `hg.peer` creation over multiple
|
|
`unbundlereplay` calls.
|
|
|
|
Reads `(bundlefile, timestampsfile, ontobook, rebasedhead)` from
|
|
stdin. See docs of `sendunbundlereplay` for more details.
|
|
|
|
Takes the `reports` argument on the command line. After each unbundlereplay
|
|
command is successfully executed, will write and flush a single line
|
|
into this file, thus reporting progress. File is truncated at the beginning
|
|
of this function.
|
|
|
|
``sendunbundlereplay.respondlightly`` config option instructs the server
|
|
to avoid sending large bundle2 parts back.
|
|
"""
|
|
if not opts.get("reports"):
|
|
raise error.Abort("--reports argument is required")
|
|
path = opts["path"]
|
|
returncode = 0
|
|
remote = getremote(ui, path)
|
|
ui.debug("using %s as a reports file\n" % opts["reports"])
|
|
with open(opts["reports"], "wb", 0) as reportsfile:
|
|
counter = 0
|
|
while True:
|
|
line = sys.stdin.readline()
|
|
if line == "":
|
|
break
|
|
|
|
# The newest sync job sends 5 parameters, but older versions send 4.
|
|
# We default the last parameter to None for compatibility.
|
|
parts = line.split()
|
|
if len(parts) == 4:
|
|
parts.append(None)
|
|
(bfname, tsfname, ontobook, rebasedhead, logfile) = parts
|
|
|
|
rebasedhead = None if rebasedhead == "DELETED" else rebasedhead
|
|
commitdates = getcommitdates(ui, tsfname)
|
|
stream = getstream(bfname)
|
|
|
|
with capturelogs(ui, remote, logfile):
|
|
returncode = runreplay(
|
|
ui, remote, stream, commitdates, rebasedhead, ontobook
|
|
)
|
|
|
|
if returncode != 0:
|
|
# the word "failed" is an identifier of failure, do not change
|
|
failure = "unbundle replay batch item #%i failed\n" % counter
|
|
ui.warn(failure)
|
|
writereport(reportsfile, failure)
|
|
break
|
|
success = "unbundle replay batch item #%i successfully sent\n" % counter
|
|
ui.warn(success)
|
|
writereport(reportsfile, success)
|
|
counter += 1
|
|
|
|
return returncode
|
|
|
|
|
|
@command(
|
|
"sendunbundlereplay",
|
|
[
|
|
("", "file", "", _("file to read bundle from"), ""),
|
|
("", "path", "", _("hg server remotepath (ssh)"), ""),
|
|
("r", "rebasedhead", "", _("expected rebased head hash"), ""),
|
|
(
|
|
"",
|
|
"deleted",
|
|
False,
|
|
_("bookmark was deleted, can't be used with `--rebasedhead`"),
|
|
),
|
|
("b", "ontobook", "", _("expected onto bookmark for pushrebase"), ""),
|
|
],
|
|
_("[OPTION]..."),
|
|
norepo=True,
|
|
)
|
|
def sendunbundlereplay(ui, **opts):
|
|
"""Send unbundlereplay wireproto command to a given server
|
|
|
|
Takes `rebasedhook` and `ontobook` arguments on the commmand
|
|
line, and commit dates in stdin. The commit date format is:
|
|
<commithash>=<hg-parseable-date>
|
|
|
|
``sendunbundlereplay.respondlightly`` config option instructs the server
|
|
to avoid sending large bundle2 parts back.
|
|
"""
|
|
fname = opts["file"]
|
|
path = opts["path"]
|
|
rebasedhead = opts["rebasedhead"]
|
|
deleted = opts["deleted"]
|
|
ontobook = opts["ontobook"]
|
|
if rebasedhead and deleted:
|
|
raise error.Abort("can't use `--rebasedhead` and `--deleted`")
|
|
|
|
if not (rebasedhead or deleted):
|
|
raise error.Abort("either `--rebasedhead` or `--deleted` should be used")
|
|
|
|
commitdates = getcommitdates(ui)
|
|
stream = getstream(fname)
|
|
remote = getremote(ui, path)
|
|
return runreplay(ui, remote, stream, commitdates, rebasedhead, ontobook)
|