mirror of
https://github.com/facebook/sapling.git
synced 2024-10-06 14:58:03 +03:00
commitcloud: use dag operations to simplify 'hide' logic
Summary: Now we have a virtual DAG that can be queried. Use that to figure out what heads to hide and add instead of manually traversal through the graph. Also did some tweaks to fix Python 3 compatibility and make the parent graph style a bit more correct. Reviewed By: markbt Differential Revision: D21554672 fbshipit-source-id: 749d7938a8612e21c5975d9b80a275a059de022d
This commit is contained in:
parent
4ff20f93e0
commit
b2c1d90f22
@ -12,7 +12,7 @@ import collections
|
||||
import bindings
|
||||
from edenscm.mercurial import dagop, json, node as nodemod, pycompat
|
||||
from edenscm.mercurial.graphmod import CHANGESET, GRANDPARENT, MISSINGPARENT, PARENT
|
||||
from edenscm.mercurial.pycompat import ensurestr
|
||||
from edenscm.mercurial.pycompat import decodeutf8, encodeutf8, ensurestr
|
||||
|
||||
|
||||
def _joinremotename(remote, name):
|
||||
@ -296,24 +296,26 @@ class BaseService(pycompat.ABC):
|
||||
"""
|
||||
|
||||
public = smartloginfo.public
|
||||
publicset = set(public)
|
||||
dag = smartloginfo.dag.beautify(public)
|
||||
|
||||
def createctx(repo, node):
|
||||
return FakeCtx(repo, smartloginfo.nodeinfos[node], node)
|
||||
|
||||
def parentwithstyle(node, p):
|
||||
if node not in publicset:
|
||||
return (PARENT, p)
|
||||
if p in smartloginfo.nodeinfos[node].parents:
|
||||
return (PARENT, p)
|
||||
return (GRANDPARENT, p)
|
||||
|
||||
def dagwalker():
|
||||
for node in dag.all():
|
||||
ctx = createctx(repo, node)
|
||||
# XXX: This does not actually take the real parent information
|
||||
# into consideration.
|
||||
if node in public:
|
||||
parentstyle = GRANDPARENT
|
||||
else:
|
||||
parentstyle = PARENT
|
||||
parents = [(parentstyle, p) for p in dag.parentnames(node)]
|
||||
parents = [parentwithstyle(node, p) for p in dag.parentnames(node)]
|
||||
yield (node, CHANGESET, ctx, parents)
|
||||
|
||||
firstbranch = filter(None, [dag.sort(public).first()])
|
||||
firstbranch = public[0:1]
|
||||
return firstbranch, dagwalker()
|
||||
|
||||
|
||||
@ -321,13 +323,15 @@ def _makenodes(data):
|
||||
nodes = {}
|
||||
for nodeinfo in data["nodes"]:
|
||||
node = ensurestr(nodeinfo["node"])
|
||||
parents = [ensurestr(p) for p in nodeinfo["parents"]]
|
||||
parents = [encodeutf8(ensurestr(p)) for p in nodeinfo["parents"]]
|
||||
bookmarks = [ensurestr(b) for b in nodeinfo["bookmarks"]]
|
||||
author = ensurestr(nodeinfo["author"])
|
||||
date = int(nodeinfo["date"])
|
||||
message = ensurestr(nodeinfo["message"])
|
||||
phase = ensurestr(nodeinfo["phase"])
|
||||
nodes[node] = NodeInfo(node, bookmarks, parents, author, date, message, phase)
|
||||
nodes[encodeutf8(node)] = NodeInfo(
|
||||
node, bookmarks, parents, author, date, message, phase
|
||||
)
|
||||
return nodes
|
||||
|
||||
|
||||
|
@ -343,19 +343,14 @@ def cloudhide(ui, repo, *revs, **opts):
|
||||
firstpublic, revdag = serv.makedagwalker(slinfo, repo)
|
||||
cloudrefs = serv.getreferences(reponame, workspacename, 0)
|
||||
|
||||
ctxs = {}
|
||||
childmap = {}
|
||||
drafts = set()
|
||||
for (_r, _t, ctx, _p) in revdag:
|
||||
ctxs[ctx.node()] = ctx
|
||||
for p in ctx.parents():
|
||||
childmap.setdefault(p, []).append(ctx.node())
|
||||
if ctx.phasestr() == "draft":
|
||||
drafts.add(ctx.node())
|
||||
nodeinfos = slinfo.nodeinfos
|
||||
dag = slinfo.dag
|
||||
drafts = set(slinfo.draft)
|
||||
|
||||
removenodes = set()
|
||||
|
||||
for rev in list(revs) + opts.get("rev", []):
|
||||
rev = pycompat.encodeutf8(rev)
|
||||
if rev in drafts:
|
||||
removenodes.add(rev)
|
||||
else:
|
||||
@ -396,63 +391,44 @@ def cloudhide(ui, repo, *revs, **opts):
|
||||
if matcher(remote):
|
||||
removeremotes.add(remote)
|
||||
|
||||
# Find the heads we need to remove
|
||||
candidates = removenodes.copy()
|
||||
removeheads = set()
|
||||
seen = set()
|
||||
while candidates:
|
||||
candidate = candidates.pop()
|
||||
seen.add(candidate)
|
||||
removebookmarks.update(ctxs[candidate].bookmarks())
|
||||
if candidate in cloudrefs.heads:
|
||||
removeheads.add(candidate)
|
||||
for child in childmap.get(candidate, []):
|
||||
if child not in seen:
|
||||
candidates.add(child)
|
||||
removenodes.discard(child)
|
||||
# Find the heads we need to add to keep other commits visible
|
||||
addheads = set()
|
||||
for hexnode in removenodes:
|
||||
for parent in ctxs[hexnode].parents():
|
||||
if parent in drafts:
|
||||
descendants = set(child for child in childmap.get(parent, []))
|
||||
descendants.discard(hexnode)
|
||||
# This parent is a new head, unless one of its other descendants is
|
||||
# a head that is not being removed.
|
||||
newhead = True
|
||||
while descendants:
|
||||
descendant = descendants.pop()
|
||||
if descendant in cloudrefs.heads and descendant not in removeheads:
|
||||
newhead = False
|
||||
break
|
||||
descendants.update(childmap.get(descendant, []))
|
||||
if newhead:
|
||||
addheads.add(parent)
|
||||
# Find the heads and bookmarks we need to remove
|
||||
allremovenodes = dag.descendants(removenodes)
|
||||
removeheads = set(allremovenodes & map(pycompat.encodeutf8, cloudrefs.heads))
|
||||
for node in allremovenodes:
|
||||
removebookmarks.update(nodeinfos[node].bookmarks)
|
||||
|
||||
# Find the heads we need to remove because we are removing the last bookmark
|
||||
# to it.
|
||||
remainingheads = set(cloudrefs.heads) - removeheads
|
||||
remainingheads = set(map(pycompat.encodeutf8, cloudrefs.heads)) - removeheads
|
||||
for bookmark in removebookmarks:
|
||||
ctx = ctxs.get(cloudrefs.bookmarks[bookmark])
|
||||
if ctx is not None and ctx.node() in remainingheads:
|
||||
allbms = set(ctxs[cloudrefs.bookmarks[bookmark]].bookmarks())
|
||||
if removebookmarks.issuperset(allbms):
|
||||
removeheads.add(ctx.node())
|
||||
remainingheads.discard(ctx.node())
|
||||
nodeutf8 = cloudrefs.bookmarks[bookmark]
|
||||
node = pycompat.encodeutf8(nodeutf8)
|
||||
info = nodeinfos.get(node)
|
||||
if node in remainingheads and info:
|
||||
if removebookmarks.issuperset(set(info.bookmarks)):
|
||||
remainingheads.discard(node)
|
||||
removeheads.add(node)
|
||||
|
||||
# Find the heads we need to add to keep other commits visible
|
||||
addheads = (
|
||||
dag.parents(removenodes) - allremovenodes - dag.ancestors(remainingheads)
|
||||
) & drafts
|
||||
|
||||
if removeheads:
|
||||
ui.status(_("removing heads:\n"))
|
||||
for head in sorted(removeheads):
|
||||
headutf8 = pycompat.decodeutf8(head)
|
||||
ui.status(
|
||||
" %s %s\n"
|
||||
% (head[:12], templatefilters.firstline(ctxs[head].description()))
|
||||
% (headutf8[:12], templatefilters.firstline(nodeinfos[head].message))
|
||||
)
|
||||
if addheads:
|
||||
ui.status(_("adding heads:\n"))
|
||||
for head in sorted(addheads):
|
||||
headutf8 = pycompat.decodeutf8(head)
|
||||
ui.status(
|
||||
" %s %s\n"
|
||||
% (head[:12], templatefilters.firstline(ctxs[head].description()))
|
||||
% (headutf8[:12], templatefilters.firstline(nodeinfos[head].message))
|
||||
)
|
||||
if removebookmarks:
|
||||
ui.status(_("removing bookmarks:\n"))
|
||||
@ -463,6 +439,10 @@ def cloudhide(ui, repo, *revs, **opts):
|
||||
for remote in sorted(removeremotes):
|
||||
ui.status(" %s: %s\n" % (remote, cloudrefs.remotebookmarks[remote][:12]))
|
||||
|
||||
# Normalize back to strings. (The DAG wants bytes, the cloudrefs wants str)
|
||||
removeheads = list(map(pycompat.decodeutf8, removeheads))
|
||||
addheads = list(map(pycompat.decodeutf8, addheads))
|
||||
|
||||
if removeheads or addheads or removebookmarks or removeremotes:
|
||||
if opts.get("dry_run"):
|
||||
ui.status(_("not updating cloud workspace: --dry-run specified\n"))
|
||||
|
@ -735,10 +735,10 @@ Tests for hg cloud sl
|
||||
./ some commit
|
||||
|
|
||||
o 390b78 (public) 2018-06-26 16:53 +0000
|
||||
. some commit
|
||||
.
|
||||
. o b71712 Test User 2018-06-27 22:20 +0000
|
||||
./ some commit
|
||||
| some commit
|
||||
|
|
||||
| o b71712 Test User 2018-06-27 22:20 +0000
|
||||
|/ some commit
|
||||
|
|
||||
o 0c157b (public) 2018-06-26 16:53 +0000
|
||||
. some commit
|
||||
|
Loading…
Reference in New Issue
Block a user