2017-10-14 00:51:27 +03:00
|
|
|
# fbsparse.py - allow sparse checkouts of the working directory
|
|
|
|
#
|
|
|
|
# Copyright 2014 Facebook, Inc.
|
|
|
|
#
|
|
|
|
# This software may be used and distributed according to the terms of the
|
|
|
|
# GNU General Public License version 2 or any later version.
|
|
|
|
|
|
|
|
"""allow sparse checkouts of the working directory
|
|
|
|
"""
|
|
|
|
|
2018-03-29 12:34:01 +03:00
|
|
|
from __future__ import division
|
|
|
|
|
2018-03-15 14:34:09 +03:00
|
|
|
from mercurial import (
|
|
|
|
util,
|
|
|
|
cmdutil,
|
|
|
|
extensions,
|
|
|
|
context,
|
|
|
|
dirstate,
|
|
|
|
commands,
|
2018-04-10 13:37:16 +03:00
|
|
|
fancyopts,
|
2018-03-15 14:34:09 +03:00
|
|
|
progress,
|
|
|
|
localrepo,
|
|
|
|
error,
|
2018-03-26 18:44:42 +03:00
|
|
|
help,
|
2018-03-15 14:34:09 +03:00
|
|
|
hg,
|
2018-03-26 18:44:42 +03:00
|
|
|
minirst,
|
2018-03-15 14:34:09 +03:00
|
|
|
pathutil,
|
|
|
|
registrar,
|
|
|
|
patch,
|
|
|
|
pycompat,
|
2018-04-10 13:37:19 +03:00
|
|
|
scmutil,
|
2018-03-15 14:34:09 +03:00
|
|
|
)
|
2017-10-14 00:51:27 +03:00
|
|
|
from mercurial import match as matchmod
|
|
|
|
from mercurial import merge as mergemod
|
|
|
|
from mercurial.node import nullid
|
|
|
|
from mercurial.i18n import _
|
2018-03-13 15:02:32 +03:00
|
|
|
from mercurial.thirdparty import attr
|
2018-03-28 17:15:11 +03:00
|
|
|
import os, collections, functools, hashlib
|
2018-03-14 16:49:45 +03:00
|
|
|
import re
|
2017-10-14 00:51:27 +03:00
|
|
|
|
|
|
|
cmdtable = {}
|
|
|
|
command = registrar.command(cmdtable)
|
|
|
|
testedwith = 'ships-with-fb-hgext'
|
|
|
|
|
|
|
|
cwdrealtivepatkinds = ('glob', 'relpath')
|
|
|
|
|
|
|
|
def uisetup(ui):
|
|
|
|
_setupupdates(ui)
|
|
|
|
_setupcommit(ui)
|
|
|
|
|
|
|
|
def extsetup(ui):
|
|
|
|
_setupclone(ui)
|
|
|
|
_setuplog(ui)
|
|
|
|
_setupadd(ui)
|
|
|
|
_setupdirstate(ui)
|
|
|
|
_setupdiff(ui)
|
2018-03-26 18:44:42 +03:00
|
|
|
_setupsubcommands(ui)
|
2017-10-14 00:51:27 +03:00
|
|
|
# if fsmonitor is enabled, tell it to use our hash function
|
|
|
|
try:
|
|
|
|
fsmonitor = extensions.find('fsmonitor')
|
|
|
|
def _hashignore(orig, ignore):
|
|
|
|
return _hashmatcher(ignore)
|
|
|
|
extensions.wrapfunction(fsmonitor, '_hashignore', _hashignore)
|
|
|
|
except KeyError:
|
|
|
|
pass
|
|
|
|
# do the same for hgwatchman, old name
|
|
|
|
try:
|
|
|
|
hgwatchman = extensions.find('hgwatchman')
|
|
|
|
def _hashignore(orig, ignore):
|
|
|
|
return _hashmatcher(ignore)
|
|
|
|
extensions.wrapfunction(hgwatchman, '_hashignore', _hashignore)
|
|
|
|
except KeyError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
def reposetup(ui, repo):
|
|
|
|
if not util.safehasattr(repo, 'dirstate'):
|
|
|
|
return
|
|
|
|
|
|
|
|
_wraprepo(ui, repo)
|
|
|
|
|
|
|
|
def replacefilecache(cls, propname, replacement):
|
|
|
|
"""Replace a filecache property with a new class. This allows changing the
|
|
|
|
cache invalidation condition."""
|
|
|
|
origcls = cls
|
|
|
|
assert callable(replacement)
|
|
|
|
while cls is not object:
|
|
|
|
if propname in cls.__dict__:
|
|
|
|
orig = cls.__dict__[propname]
|
|
|
|
setattr(cls, propname, replacement(orig))
|
|
|
|
break
|
|
|
|
cls = cls.__bases__[0]
|
|
|
|
|
|
|
|
if cls is object:
|
|
|
|
raise AttributeError(_("type '%s' has no property '%s'") % (origcls,
|
|
|
|
propname))
|
|
|
|
|
|
|
|
def _setupupdates(ui):
|
|
|
|
def _calculateupdates(orig, repo, wctx, mctx, ancestors, branchmerge, *arg,
|
|
|
|
**kwargs):
|
|
|
|
"""Filter updates to only lay out files that match the sparse rules.
|
|
|
|
"""
|
|
|
|
actions, diverge, renamedelete = orig(repo, wctx, mctx, ancestors,
|
|
|
|
branchmerge, *arg, **kwargs)
|
|
|
|
|
2018-01-17 04:43:19 +03:00
|
|
|
# If the working context is in memory (virtual), there's no need to
|
|
|
|
# apply the user's sparse rules at all (and in fact doing so would
|
|
|
|
# cause unexpected behavior in the real working copy).
|
|
|
|
if not util.safehasattr(repo, 'sparsematch') or wctx.isinmemory():
|
2017-10-14 00:51:27 +03:00
|
|
|
return actions, diverge, renamedelete
|
|
|
|
|
|
|
|
files = set()
|
|
|
|
prunedactions = {}
|
|
|
|
oldrevs = [pctx.rev() for pctx in wctx.parents()]
|
|
|
|
oldsparsematch = repo.sparsematch(*oldrevs)
|
|
|
|
|
|
|
|
if branchmerge:
|
|
|
|
# If we're merging, use the wctx filter, since we're merging into
|
|
|
|
# the wctx.
|
|
|
|
sparsematch = repo.sparsematch(wctx.parents()[0].rev())
|
|
|
|
else:
|
|
|
|
# If we're updating, use the target context's filter, since we're
|
|
|
|
# moving to the target context.
|
|
|
|
sparsematch = repo.sparsematch(mctx.rev())
|
|
|
|
|
|
|
|
temporaryfiles = []
|
|
|
|
for file, action in actions.iteritems():
|
|
|
|
type, args, msg = action
|
|
|
|
files.add(file)
|
|
|
|
if sparsematch(file):
|
|
|
|
prunedactions[file] = action
|
|
|
|
elif type == 'm':
|
|
|
|
temporaryfiles.append(file)
|
|
|
|
prunedactions[file] = action
|
|
|
|
elif branchmerge:
|
|
|
|
if type != 'k':
|
|
|
|
temporaryfiles.append(file)
|
|
|
|
prunedactions[file] = action
|
|
|
|
elif type == 'f':
|
|
|
|
prunedactions[file] = action
|
|
|
|
elif file in wctx:
|
|
|
|
prunedactions[file] = ('r', args, msg)
|
|
|
|
|
|
|
|
if len(temporaryfiles) > 0:
|
|
|
|
ui.status(_("temporarily included %d file(s) in the sparse checkout"
|
|
|
|
" for merging\n") % len(temporaryfiles))
|
|
|
|
repo.addtemporaryincludes(temporaryfiles)
|
|
|
|
|
|
|
|
# Add the new files to the working copy so they can be merged, etc
|
|
|
|
actions = []
|
|
|
|
message = 'temporarily adding to sparse checkout'
|
|
|
|
wctxmanifest = repo[None].manifest()
|
|
|
|
for file in temporaryfiles:
|
|
|
|
if file in wctxmanifest:
|
|
|
|
fctx = repo[None][file]
|
|
|
|
actions.append((file, (fctx.flags(), False), message))
|
|
|
|
|
|
|
|
typeactions = collections.defaultdict(list)
|
|
|
|
typeactions['g'] = actions
|
|
|
|
mergemod.applyupdates(repo, typeactions, repo[None], repo['.'],
|
|
|
|
False)
|
|
|
|
|
|
|
|
dirstate = repo.dirstate
|
|
|
|
for file, flags, msg in actions:
|
|
|
|
dirstate.normal(file)
|
|
|
|
|
|
|
|
profiles = repo.getactiveprofiles()
|
|
|
|
changedprofiles = profiles & files
|
|
|
|
# If an active profile changed during the update, refresh the checkout.
|
|
|
|
# Don't do this during a branch merge, since all incoming changes should
|
|
|
|
# have been handled by the temporary includes above.
|
|
|
|
if changedprofiles and not branchmerge:
|
|
|
|
mf = mctx.manifest()
|
|
|
|
for file in mf:
|
|
|
|
old = oldsparsematch(file)
|
|
|
|
new = sparsematch(file)
|
|
|
|
if not old and new:
|
|
|
|
flags = mf.flags(file)
|
|
|
|
prunedactions[file] = ('g', (flags, False), '')
|
|
|
|
elif old and not new:
|
|
|
|
prunedactions[file] = ('r', [], '')
|
|
|
|
|
|
|
|
return prunedactions, diverge, renamedelete
|
|
|
|
|
|
|
|
extensions.wrapfunction(mergemod, 'calculateupdates', _calculateupdates)
|
|
|
|
|
|
|
|
def _update(orig, repo, node, branchmerge, *args, **kwargs):
|
|
|
|
results = orig(repo, node, branchmerge, *args, **kwargs)
|
|
|
|
|
|
|
|
# If we're updating to a location, clean up any stale temporary includes
|
|
|
|
# (ex: this happens during hg rebase --abort).
|
|
|
|
if not branchmerge and util.safehasattr(repo, 'sparsematch'):
|
|
|
|
repo.prunetemporaryincludes()
|
|
|
|
return results
|
|
|
|
|
|
|
|
extensions.wrapfunction(mergemod, 'update', _update)
|
|
|
|
|
2018-03-27 19:54:48 +03:00
|
|
|
def _checkcollision(orig, repo, wmf, actions):
|
|
|
|
if util.safehasattr(repo, 'sparsematch'):
|
|
|
|
# Only check for collisions on files and directories in the
|
|
|
|
# sparse profile
|
|
|
|
wmf = wmf.matches(repo.sparsematch())
|
|
|
|
return orig(repo, wmf, actions)
|
|
|
|
|
|
|
|
extensions.wrapfunction(mergemod, '_checkcollision', _checkcollision)
|
|
|
|
|
2017-10-14 00:51:27 +03:00
|
|
|
def _setupcommit(ui):
|
|
|
|
def _refreshoncommit(orig, self, node):
|
|
|
|
"""Refresh the checkout when commits touch .hgsparse
|
|
|
|
"""
|
|
|
|
orig(self, node)
|
2017-11-20 17:40:41 +03:00
|
|
|
|
|
|
|
# Use unfiltered to avoid computing hidden commits
|
|
|
|
repo = self._repo.unfiltered()
|
|
|
|
|
2017-10-14 00:51:27 +03:00
|
|
|
if util.safehasattr(repo, 'getsparsepatterns'):
|
|
|
|
ctx = repo[node]
|
2018-03-13 15:02:35 +03:00
|
|
|
profiles = repo.getsparsepatterns(ctx.rev()).profiles
|
2017-10-14 00:51:27 +03:00
|
|
|
if set(profiles) & set(ctx.files()):
|
|
|
|
origstatus = repo.status()
|
|
|
|
origsparsematch = repo.sparsematch()
|
|
|
|
_refresh(repo.ui, repo, origstatus, origsparsematch, True)
|
|
|
|
|
|
|
|
repo.prunetemporaryincludes()
|
|
|
|
|
|
|
|
extensions.wrapfunction(context.committablectx, 'markcommitted',
|
|
|
|
_refreshoncommit)
|
|
|
|
|
|
|
|
def _setuplog(ui):
|
|
|
|
entry = commands.table['^log|history']
|
|
|
|
entry[1].append(('', 'sparse', None,
|
|
|
|
"limit to changesets affecting the sparse checkout"))
|
|
|
|
|
|
|
|
def _logrevs(orig, repo, opts):
|
|
|
|
revs = orig(repo, opts)
|
|
|
|
if opts.get('sparse'):
|
|
|
|
sparsematch = repo.sparsematch()
|
|
|
|
def ctxmatch(rev):
|
|
|
|
ctx = repo[rev]
|
|
|
|
return any(f for f in ctx.files() if sparsematch(f))
|
|
|
|
revs = revs.filter(ctxmatch)
|
|
|
|
return revs
|
|
|
|
extensions.wrapfunction(cmdutil, '_logrevs', _logrevs)
|
|
|
|
|
|
|
|
def _clonesparsecmd(orig, ui, repo, *args, **opts):
|
|
|
|
include_pat = opts.get('include')
|
|
|
|
exclude_pat = opts.get('exclude')
|
|
|
|
enableprofile_pat = opts.get('enable_profile')
|
|
|
|
include = exclude = enableprofile = False
|
|
|
|
if include_pat:
|
|
|
|
pat = include_pat
|
|
|
|
include = True
|
|
|
|
if exclude_pat:
|
|
|
|
pat = exclude_pat
|
|
|
|
exclude = True
|
|
|
|
if enableprofile_pat:
|
|
|
|
pat = enableprofile_pat
|
|
|
|
enableprofile = True
|
|
|
|
if sum([include, exclude, enableprofile]) > 1:
|
|
|
|
raise error.Abort(_("too many flags specified."))
|
|
|
|
if include or exclude or enableprofile:
|
|
|
|
def clone_sparse(orig, self, node, overwrite, *args, **kwargs):
|
|
|
|
# sparse clone is a special snowflake as in that case always
|
|
|
|
# are outside of the repo's dir hierachy, yet we always want
|
|
|
|
# to name our includes/excludes/enables using repo-root
|
|
|
|
# relative paths
|
|
|
|
overrides = {
|
|
|
|
('sparse', 'includereporootpaths'): True,
|
|
|
|
('sparse', 'enablereporootpaths'): True,
|
|
|
|
}
|
|
|
|
with self.ui.configoverride(overrides, 'sparse'):
|
|
|
|
_config(self.ui, self.unfiltered(), pat, {}, include=include,
|
|
|
|
exclude=exclude, enableprofile=enableprofile)
|
|
|
|
return orig(self, node, overwrite, *args, **kwargs)
|
|
|
|
extensions.wrapfunction(hg, 'updaterepo', clone_sparse)
|
|
|
|
return orig(ui, repo, *args, **opts)
|
|
|
|
|
|
|
|
def _setupclone(ui):
|
|
|
|
entry = commands.table['^clone']
|
|
|
|
entry[1].append(('', 'enable-profile', [],
|
|
|
|
'enable a sparse profile'))
|
|
|
|
entry[1].append(('', 'include', [],
|
|
|
|
'include sparse pattern'))
|
|
|
|
entry[1].append(('', 'exclude', [],
|
|
|
|
'exclude sparse pattern'))
|
|
|
|
extensions.wrapcommand(commands.table, 'clone', _clonesparsecmd)
|
|
|
|
|
|
|
|
def _setupadd(ui):
|
|
|
|
entry = commands.table['^add']
|
|
|
|
entry[1].append(('s', 'sparse', None,
|
|
|
|
'also include directories of added files in sparse config'))
|
|
|
|
|
|
|
|
def _add(orig, ui, repo, *pats, **opts):
|
|
|
|
if opts.get('sparse'):
|
|
|
|
dirs = set()
|
|
|
|
for pat in pats:
|
|
|
|
dirname, basename = util.split(pat)
|
|
|
|
dirs.add(dirname)
|
|
|
|
_config(ui, repo, list(dirs), opts, include=True)
|
|
|
|
return orig(ui, repo, *pats, **opts)
|
|
|
|
|
|
|
|
extensions.wrapcommand(commands.table, 'add', _add)
|
|
|
|
|
|
|
|
def _setupdirstate(ui):
|
|
|
|
"""Modify the dirstate to prevent stat'ing excluded files,
|
|
|
|
and to prevent modifications to files outside the checkout.
|
|
|
|
"""
|
|
|
|
|
|
|
|
def _dirstate(orig, repo):
|
|
|
|
dirstate = orig(repo)
|
|
|
|
dirstate.repo = repo
|
|
|
|
return dirstate
|
|
|
|
extensions.wrapfunction(
|
|
|
|
localrepo.localrepository.dirstate, 'func', _dirstate)
|
|
|
|
|
|
|
|
# The atrocity below is needed to wrap dirstate._ignore. It is a cached
|
|
|
|
# property, which means normal function wrapping doesn't work.
|
|
|
|
class ignorewrapper(object):
|
|
|
|
def __init__(self, orig):
|
|
|
|
self.orig = orig
|
|
|
|
self.origignore = None
|
|
|
|
self.func = None
|
|
|
|
self.sparsematch = None
|
|
|
|
|
|
|
|
def __get__(self, obj, type=None):
|
|
|
|
repo = obj.repo
|
|
|
|
origignore = self.orig.__get__(obj)
|
|
|
|
if not util.safehasattr(repo, 'sparsematch'):
|
|
|
|
return origignore
|
|
|
|
|
|
|
|
sparsematch = repo.sparsematch()
|
|
|
|
if self.sparsematch != sparsematch or self.origignore != origignore:
|
|
|
|
self.func = unionmatcher([origignore,
|
|
|
|
negatematcher(sparsematch)])
|
|
|
|
self.sparsematch = sparsematch
|
|
|
|
self.origignore = origignore
|
|
|
|
return self.func
|
|
|
|
|
|
|
|
def __set__(self, obj, value):
|
|
|
|
return self.orig.__set__(obj, value)
|
|
|
|
|
|
|
|
def __delete__(self, obj):
|
|
|
|
return self.orig.__delete__(obj)
|
|
|
|
|
|
|
|
replacefilecache(dirstate.dirstate, '_ignore', ignorewrapper)
|
|
|
|
|
|
|
|
# dirstate.rebuild should not add non-matching files
|
|
|
|
def _rebuild(orig, self, parent, allfiles, changedfiles=None):
|
|
|
|
if util.safehasattr(self.repo, 'sparsematch'):
|
|
|
|
matcher = self.repo.sparsematch()
|
|
|
|
allfiles = allfiles.matches(matcher)
|
|
|
|
if changedfiles:
|
|
|
|
changedfiles = [f for f in changedfiles if matcher(f)]
|
|
|
|
|
|
|
|
if changedfiles is not None:
|
|
|
|
# In _rebuild, these files will be deleted from the dirstate
|
|
|
|
# when they are not found to be in allfiles
|
|
|
|
dirstatefilestoremove = set(f for f in self if not matcher(f))
|
|
|
|
changedfiles = dirstatefilestoremove.union(changedfiles)
|
|
|
|
|
|
|
|
return orig(self, parent, allfiles, changedfiles)
|
|
|
|
extensions.wrapfunction(dirstate.dirstate, 'rebuild', _rebuild)
|
|
|
|
|
|
|
|
# Prevent adding files that are outside the sparse checkout
|
|
|
|
editfuncs = ['normal', 'add', 'normallookup', 'copy', 'remove', 'merge']
|
|
|
|
hint = _('include file with `hg sparse --include <pattern>` or use ' +
|
|
|
|
'`hg add -s <file>` to include file directory while adding')
|
|
|
|
for func in editfuncs:
|
|
|
|
def _wrapper(orig, self, *args):
|
|
|
|
repo = self.repo
|
|
|
|
if util.safehasattr(repo, 'sparsematch'):
|
|
|
|
dirstate = repo.dirstate
|
|
|
|
sparsematch = repo.sparsematch()
|
|
|
|
for f in args:
|
|
|
|
if (f is not None and not sparsematch(f) and
|
|
|
|
f not in dirstate):
|
|
|
|
raise error.Abort(_("cannot add '%s' - it is outside "
|
|
|
|
"the sparse checkout") % f,
|
|
|
|
hint=hint)
|
|
|
|
return orig(self, *args)
|
|
|
|
extensions.wrapfunction(dirstate.dirstate, func, _wrapper)
|
|
|
|
|
|
|
|
def _setupdiff(ui):
|
|
|
|
entry = commands.table['^diff']
|
|
|
|
entry[1].append(('s', 'sparse', None,
|
|
|
|
'only show changes in files in the sparse config'))
|
2017-11-21 00:56:55 +03:00
|
|
|
|
2017-10-14 00:51:27 +03:00
|
|
|
def workingfilectxdata(orig, self):
|
2017-11-21 00:56:55 +03:00
|
|
|
try:
|
|
|
|
# Try lookup working copy first.
|
2017-10-14 00:51:27 +03:00
|
|
|
return orig(self)
|
2017-11-21 00:56:55 +03:00
|
|
|
except IOError:
|
|
|
|
# Then try working copy parent if the file is outside sparse.
|
|
|
|
if util.safehasattr(self._repo, 'sparsematch'):
|
|
|
|
sparsematch = self._repo.sparsematch()
|
|
|
|
if not sparsematch(self._path):
|
|
|
|
basectx = self._changectx._parents[0]
|
|
|
|
return basectx[self._path].data()
|
|
|
|
raise
|
|
|
|
|
2017-10-14 00:51:27 +03:00
|
|
|
extensions.wrapfunction(context.workingfilectx, 'data', workingfilectxdata)
|
|
|
|
|
|
|
|
# wrap trydiff to filter diffs if '--sparse' is set
|
|
|
|
def trydiff(orig, repo, revs, ctx1, ctx2, modified, added, removed,
|
|
|
|
copy, getfilectx, opts, losedatafn, prefix, relroot):
|
|
|
|
sparsematch = repo.sparsematch()
|
|
|
|
modified = filter(sparsematch, modified)
|
|
|
|
added = filter(sparsematch, added)
|
|
|
|
removed = filter(sparsematch, removed)
|
|
|
|
copy = dict((d, s) for d, s in copy.items() if sparsematch(s))
|
|
|
|
return orig(repo, revs, ctx1, ctx2, modified, added, removed,
|
|
|
|
copy, getfilectx, opts, losedatafn, prefix, relroot)
|
|
|
|
def diff(orig, ui, repo, *pats, **opts):
|
|
|
|
issparse = bool(opts.get('sparse'))
|
|
|
|
if issparse:
|
|
|
|
extensions.wrapfunction(patch, 'trydiff', trydiff)
|
|
|
|
try:
|
|
|
|
orig(ui, repo, *pats, **opts)
|
|
|
|
finally:
|
|
|
|
if issparse:
|
|
|
|
extensions.unwrapfunction(patch, 'trydiff', trydiff)
|
|
|
|
extensions.wrapcommand(commands.table, 'diff', diff)
|
|
|
|
|
2018-03-26 18:44:42 +03:00
|
|
|
def _setupsubcommands(ui):
|
2018-03-28 17:15:11 +03:00
|
|
|
# hg help sparse <subcommand> needs to be acceptable
|
|
|
|
def helpacceptmultiplenames(orig, ui, *names, **opts):
|
|
|
|
name = ' '.join(names) if names else None
|
|
|
|
return orig(ui, name, **opts)
|
|
|
|
|
|
|
|
# hg help should include subcommands
|
2018-03-26 18:44:42 +03:00
|
|
|
def helpsubcommands(orig, self, name, subtopic=None):
|
|
|
|
rst = orig(self, name, subtopic)
|
2018-03-28 17:15:11 +03:00
|
|
|
|
|
|
|
cmd, hassub, sub = name.partition(' ')
|
|
|
|
if cmd == 'sparse':
|
|
|
|
if hassub and rst[0] == 'hg %s\n' % sub:
|
|
|
|
# subcommand help, patch first line
|
|
|
|
rst[0] = 'hg %s\n' % name
|
|
|
|
|
2018-03-26 18:44:42 +03:00
|
|
|
if not self.ui.quiet:
|
|
|
|
subcmdsrst = subcmd.subcmdsrst(self.ui.verbose)
|
|
|
|
# in verbose mode there is an extra line we want to keep at the
|
|
|
|
# end.
|
|
|
|
pos = len(rst) if self.ui.verbose else -1
|
|
|
|
rst[pos:pos] = [subcmdsrst]
|
2018-03-28 17:15:11 +03:00
|
|
|
|
2018-03-26 18:44:42 +03:00
|
|
|
return rst
|
|
|
|
|
2018-03-28 17:15:11 +03:00
|
|
|
# when looking for subcommands, have cmdutil.findpossible find them
|
|
|
|
def findpossible(orig, cmd, table, strict=False):
|
2018-04-10 13:37:16 +03:00
|
|
|
maincmd, hassub, subcmd = cmd.partition(' ')
|
|
|
|
if hassub and maincmd == 'sparse':
|
|
|
|
res = orig(subcmd, subcmdtable, strict)
|
|
|
|
if subcmd in res[0]:
|
|
|
|
# reslot as the full command, including the first alias
|
|
|
|
res[0][cmd] = res[0].pop(subcmd)
|
|
|
|
res[0][cmd][0][0] = cmd
|
|
|
|
return res
|
2018-03-28 17:15:11 +03:00
|
|
|
return orig(cmd, table, strict)
|
|
|
|
|
2018-04-10 13:37:16 +03:00
|
|
|
# when parsing shelve command options, add the switches from subcommands
|
|
|
|
def subcommandopts(orig, args, options, *posargs, **kwargs):
|
|
|
|
sparseopts = cmdtable['^sparse'][1]
|
|
|
|
if options[:len(sparseopts)] == sparseopts: # parsing sparse options
|
|
|
|
return subcmd.parseargs(orig, args, options, *posargs, **kwargs)
|
|
|
|
return orig(args, options, *posargs, **kwargs)
|
|
|
|
|
2018-03-28 17:15:11 +03:00
|
|
|
extensions.wrapcommand(commands.table, 'help', helpacceptmultiplenames)
|
2018-03-26 18:44:42 +03:00
|
|
|
extensions.wrapfunction(help._helpdispatch, 'helpcmd', helpsubcommands)
|
2018-03-28 17:15:11 +03:00
|
|
|
extensions.wrapfunction(cmdutil, 'findpossible', findpossible)
|
2018-04-10 13:37:16 +03:00
|
|
|
extensions.wrapfunction(fancyopts, 'fancyopts', subcommandopts)
|
2018-03-26 18:44:42 +03:00
|
|
|
|
2018-03-29 12:34:01 +03:00
|
|
|
@attr.s(frozen=True, slots=True, cmp=False)
|
2018-03-13 15:02:35 +03:00
|
|
|
class SparseConfig(object):
|
2018-03-29 12:34:01 +03:00
|
|
|
path = attr.ib()
|
2018-04-10 13:37:14 +03:00
|
|
|
includes = attr.ib(convert=frozenset)
|
|
|
|
excludes = attr.ib(convert=frozenset)
|
|
|
|
profiles = attr.ib(convert=tuple)
|
2018-03-14 16:49:45 +03:00
|
|
|
metadata = attr.ib(default=attr.Factory(dict))
|
2018-03-13 15:02:35 +03:00
|
|
|
|
|
|
|
def __iter__(self):
|
2018-03-14 16:49:45 +03:00
|
|
|
# The metadata field is deliberately not included
|
2018-03-13 15:02:35 +03:00
|
|
|
for field in (self.includes, self.excludes, self.profiles):
|
|
|
|
yield field
|
|
|
|
|
2017-10-14 00:51:27 +03:00
|
|
|
def _wraprepo(ui, repo):
|
2018-03-14 16:49:45 +03:00
|
|
|
# metadata parsing expression
|
|
|
|
metadata_key_value = re.compile(r'(?P<key>.*)\s*[:=]\s*(?P<value>.*)')
|
|
|
|
|
2017-10-14 00:51:27 +03:00
|
|
|
class SparseRepo(repo.__class__):
|
2018-03-14 16:49:43 +03:00
|
|
|
def readsparseconfig(self, raw, filename=None):
|
2018-03-13 15:02:35 +03:00
|
|
|
"""Takes a string sparse config and returns a SparseConfig
|
|
|
|
|
|
|
|
This object contains the includes, excludes, and profiles from the
|
|
|
|
raw profile.
|
|
|
|
|
2018-03-14 16:49:43 +03:00
|
|
|
The filename is used to report errors and warnings.
|
|
|
|
|
2017-10-14 00:51:27 +03:00
|
|
|
"""
|
2018-03-14 16:49:43 +03:00
|
|
|
filename = filename or '<sparse profile>'
|
2018-03-14 16:49:45 +03:00
|
|
|
metadata = {}
|
|
|
|
last_key = None
|
2017-10-14 00:51:27 +03:00
|
|
|
includes = set()
|
|
|
|
excludes = set()
|
2018-03-13 15:02:37 +03:00
|
|
|
|
|
|
|
sections = {
|
|
|
|
'[include]': includes,
|
|
|
|
'[exclude]': excludes,
|
2018-03-14 16:49:45 +03:00
|
|
|
'[metadata]': metadata,
|
2018-03-13 15:02:37 +03:00
|
|
|
}
|
|
|
|
current = includes # no sections == includes
|
|
|
|
|
2017-10-14 00:51:27 +03:00
|
|
|
profiles = []
|
2018-03-13 15:02:37 +03:00
|
|
|
|
2018-03-14 16:49:45 +03:00
|
|
|
for i, line in enumerate(raw.splitlines(), start=1):
|
|
|
|
stripped = line.strip()
|
|
|
|
if not stripped or stripped.startswith(('#', ';')):
|
2017-10-14 00:51:27 +03:00
|
|
|
# empty or comment line, skip
|
|
|
|
continue
|
2018-03-13 15:02:37 +03:00
|
|
|
|
2018-03-14 16:49:45 +03:00
|
|
|
if stripped.startswith('%include '):
|
2018-03-13 15:02:37 +03:00
|
|
|
# include another profile
|
2018-03-14 16:49:45 +03:00
|
|
|
stripped = stripped[9:].strip()
|
|
|
|
if stripped:
|
|
|
|
profiles.append(stripped)
|
2017-10-14 00:51:27 +03:00
|
|
|
continue
|
2018-03-13 15:02:37 +03:00
|
|
|
|
2018-03-14 16:49:45 +03:00
|
|
|
if stripped in sections:
|
|
|
|
if sections[stripped] is includes and current is excludes:
|
2018-03-14 16:49:43 +03:00
|
|
|
raise error.Abort(_(
|
|
|
|
'A sparse file cannot have includes after excludes '
|
|
|
|
'in %s:%i') % (filename, i))
|
2018-03-14 16:49:45 +03:00
|
|
|
current = sections[stripped]
|
|
|
|
continue
|
|
|
|
|
|
|
|
if current is metadata:
|
|
|
|
# Metadata parsing, INI-style format
|
|
|
|
if line.startswith((' ', '\t')): # continuation
|
|
|
|
if last_key is None:
|
|
|
|
self.ui.warn(_(
|
|
|
|
'warning: sparse profile [metadata] section '
|
|
|
|
'indented lines that do not belong to a '
|
|
|
|
'multi-line entry, ignoring, in %s:%i\n') % (
|
|
|
|
filename, i))
|
|
|
|
continue
|
|
|
|
key, value = last_key, stripped
|
|
|
|
else:
|
|
|
|
match = metadata_key_value.match(stripped)
|
|
|
|
if match is None:
|
|
|
|
self.ui.warn(_(
|
|
|
|
'warning: sparse profile [metadata] section '
|
|
|
|
'does not appear to have a valid option '
|
|
|
|
'definition, ignoring, in %s:%i\n') % (
|
|
|
|
filename, i))
|
|
|
|
last_key = None
|
|
|
|
continue
|
|
|
|
key, value = (
|
|
|
|
s.strip() for s in match.group('key', 'value'))
|
|
|
|
metadata[key] = []
|
|
|
|
|
|
|
|
metadata[key].append(value)
|
|
|
|
last_key = key
|
2018-03-13 15:02:37 +03:00
|
|
|
continue
|
|
|
|
|
|
|
|
# inclusion or exclusion line
|
2018-03-14 16:49:45 +03:00
|
|
|
if stripped.startswith('/'):
|
2018-03-13 15:02:37 +03:00
|
|
|
self.ui.warn(_(
|
|
|
|
'warning: sparse profile cannot use paths starting '
|
2018-03-14 16:49:43 +03:00
|
|
|
'with /, ignoring %s, in %s:%i\n') % (
|
|
|
|
line, filename, i))
|
2018-03-13 15:02:37 +03:00
|
|
|
continue
|
|
|
|
current.add(line)
|
2017-10-14 00:51:27 +03:00
|
|
|
|
2018-03-14 16:49:45 +03:00
|
|
|
metadata = {key: '\n'.join(value).strip()
|
|
|
|
for key, value in metadata.items()}
|
2018-03-29 12:34:01 +03:00
|
|
|
return SparseConfig(
|
|
|
|
filename, includes, excludes, profiles, metadata)
|
2017-10-14 00:51:27 +03:00
|
|
|
|
2018-03-29 12:34:01 +03:00
|
|
|
def getsparsepatterns(self, rev, config=None):
|
2018-03-13 15:02:35 +03:00
|
|
|
"""Produce the full sparse config for a revision as a SparseConfig
|
|
|
|
|
|
|
|
This includes all patterns from included profiles, transitively.
|
|
|
|
|
2018-03-29 12:34:01 +03:00
|
|
|
if config is None, use the active profile, in .hg/sparse
|
|
|
|
|
2017-10-14 00:51:27 +03:00
|
|
|
"""
|
2017-11-20 17:40:41 +03:00
|
|
|
# Use unfiltered to avoid computing hidden commits
|
2017-10-14 00:51:27 +03:00
|
|
|
if rev is None:
|
2018-03-29 12:34:01 +03:00
|
|
|
raise error.Abort(
|
|
|
|
_("cannot parse sparse patterns from working copy"))
|
2017-10-14 00:51:27 +03:00
|
|
|
|
2018-03-29 12:34:01 +03:00
|
|
|
repo = self.unfiltered()
|
|
|
|
if config is None:
|
|
|
|
if not self.vfs.exists('sparse'):
|
|
|
|
return SparseConfig(None, set(), set(), [])
|
|
|
|
|
|
|
|
raw = self.vfs.read('sparse')
|
|
|
|
config = self.readsparseconfig(
|
|
|
|
raw, filename=self.vfs.join('sparse'))
|
|
|
|
|
2018-04-10 13:37:14 +03:00
|
|
|
# create copies, as these datastructures are updated further on
|
|
|
|
includes, excludes, profiles = (
|
|
|
|
set(config.includes), set(config.excludes),
|
|
|
|
list(config.profiles)
|
|
|
|
)
|
2017-10-14 00:51:27 +03:00
|
|
|
|
2017-11-20 17:40:41 +03:00
|
|
|
ctx = repo[rev]
|
2017-10-14 00:51:27 +03:00
|
|
|
if profiles:
|
|
|
|
visited = set()
|
|
|
|
while profiles:
|
|
|
|
profile = profiles.pop()
|
|
|
|
if profile in visited:
|
|
|
|
continue
|
|
|
|
visited.add(profile)
|
|
|
|
|
|
|
|
try:
|
2018-04-10 13:37:19 +03:00
|
|
|
raw = self.getrawprofile(profile, ctx.hex())
|
2017-10-14 00:51:27 +03:00
|
|
|
except error.ManifestLookupError:
|
|
|
|
msg = (
|
|
|
|
"warning: sparse profile '%s' not found "
|
|
|
|
"in rev %s - ignoring it\n" % (profile, ctx))
|
|
|
|
if self.ui.configbool('sparse', 'missingwarning'):
|
|
|
|
self.ui.warn(msg)
|
|
|
|
else:
|
|
|
|
self.ui.debug(msg)
|
|
|
|
continue
|
2018-03-13 15:02:35 +03:00
|
|
|
pincludes, pexcludes, subprofs = (
|
2018-03-14 16:49:43 +03:00
|
|
|
self.readsparseconfig(raw, filename=profile))
|
2017-10-14 00:51:27 +03:00
|
|
|
includes.update(pincludes)
|
|
|
|
excludes.update(pexcludes)
|
|
|
|
for subprofile in subprofs:
|
|
|
|
profiles.append(subprofile)
|
|
|
|
|
|
|
|
profiles = visited
|
|
|
|
|
|
|
|
if includes:
|
|
|
|
includes.add('.hg*')
|
2018-03-29 12:34:01 +03:00
|
|
|
return SparseConfig(
|
|
|
|
'<aggregated from %s>'.format(config.path),
|
|
|
|
includes, excludes, profiles)
|
2017-10-14 00:51:27 +03:00
|
|
|
|
|
|
|
def getrawprofile(self, profile, changeid):
|
2017-11-20 17:40:41 +03:00
|
|
|
repo = self.unfiltered()
|
2017-10-14 00:51:27 +03:00
|
|
|
try:
|
|
|
|
simplecache = extensions.find('simplecache')
|
2017-11-20 17:40:41 +03:00
|
|
|
|
|
|
|
# Use unfiltered to avoid computing hidden commits
|
|
|
|
node = repo[changeid].hex()
|
2017-10-14 00:51:27 +03:00
|
|
|
def func():
|
2017-11-20 17:40:41 +03:00
|
|
|
return repo.filectx(profile, changeid=changeid).data()
|
2017-10-14 00:51:27 +03:00
|
|
|
key = 'sparseprofile:%s:%s' % (profile.replace('/', '__'), node)
|
|
|
|
return simplecache.memoize(func, key,
|
|
|
|
simplecache.stringserializer, self.ui)
|
|
|
|
except KeyError:
|
2017-11-20 17:40:41 +03:00
|
|
|
return repo.filectx(profile, changeid=changeid).data()
|
2017-10-14 00:51:27 +03:00
|
|
|
|
2018-04-10 13:37:13 +03:00
|
|
|
def _sparsesignature(self, includetemp=True, config=None, revs=()):
|
2017-10-14 00:51:27 +03:00
|
|
|
"""Returns the signature string representing the contents of the
|
|
|
|
current project sparse configuration. This can be used to cache the
|
|
|
|
sparse matcher for a given set of revs."""
|
|
|
|
signaturecache = self.signaturecache
|
2018-04-10 13:37:13 +03:00
|
|
|
sigkey = config.path if config else '.hg/sparse'
|
|
|
|
signature = signaturecache.get(sigkey)
|
2017-10-14 00:51:27 +03:00
|
|
|
if includetemp:
|
|
|
|
tempsignature = signaturecache.get('tempsignature')
|
|
|
|
else:
|
|
|
|
tempsignature = 0
|
|
|
|
|
|
|
|
if signature is None or (includetemp and tempsignature is None):
|
|
|
|
signature = 0
|
2018-03-29 12:34:01 +03:00
|
|
|
if config is None:
|
2018-04-10 13:37:13 +03:00
|
|
|
try:
|
|
|
|
sparsedata = self.vfs.read('sparse')
|
|
|
|
signature = hashlib.sha1(sparsedata).hexdigest()
|
|
|
|
except (OSError, IOError):
|
|
|
|
pass
|
2018-03-29 12:34:01 +03:00
|
|
|
else:
|
2018-04-10 13:37:13 +03:00
|
|
|
sha1 = hashlib.sha1()
|
|
|
|
for r in revs:
|
|
|
|
try:
|
|
|
|
sha1.update(self.getrawprofile(config.path, r))
|
|
|
|
signature = sha1.hexdigest()
|
|
|
|
except KeyError:
|
|
|
|
pass
|
|
|
|
signaturecache[sigkey] = signature
|
2017-10-14 00:51:27 +03:00
|
|
|
|
|
|
|
tempsignature = 0
|
|
|
|
if includetemp:
|
|
|
|
try:
|
2018-04-10 13:37:13 +03:00
|
|
|
tempsparsepath = self.vfs.read('tempsparse')
|
|
|
|
tempsignature = hashlib.sha1(tempsparsepath).hexdigest()
|
2017-10-14 00:51:27 +03:00
|
|
|
except (OSError, IOError):
|
|
|
|
pass
|
|
|
|
signaturecache['tempsignature'] = tempsignature
|
2018-03-29 12:34:01 +03:00
|
|
|
return '%s:%s' % (signature, tempsignature)
|
2017-10-14 00:51:27 +03:00
|
|
|
|
|
|
|
def invalidatecaches(self):
|
|
|
|
self.invalidatesignaturecache()
|
|
|
|
return super(SparseRepo, self).invalidatecaches()
|
|
|
|
|
|
|
|
def invalidatesignaturecache(self):
|
|
|
|
self.signaturecache.clear()
|
|
|
|
|
|
|
|
def sparsematch(self, *revs, **kwargs):
|
2018-03-29 12:34:01 +03:00
|
|
|
"""Returns the sparse match function for the given revs
|
2017-10-14 00:51:27 +03:00
|
|
|
|
|
|
|
If multiple revs are specified, the match function is the union
|
|
|
|
of all the revs.
|
|
|
|
|
|
|
|
`includetemp` is used to indicate if the temporarily included file
|
|
|
|
should be part of the matcher.
|
2018-03-29 12:34:01 +03:00
|
|
|
|
|
|
|
`config` can be used to specify a different sparse profile
|
|
|
|
from the default .hg/sparse active profile
|
|
|
|
|
|
|
|
"""
|
|
|
|
return self._sparsematch_and_key(*revs, **kwargs)[0]
|
|
|
|
|
|
|
|
def _sparsematch_and_key(self, *revs, **kwargs):
|
|
|
|
"""Implementation of sparsematch() with the cache key included.
|
|
|
|
|
|
|
|
This lets us reuse the key elsewhere without having to hit each
|
|
|
|
profile file twice.
|
|
|
|
|
2017-10-14 00:51:27 +03:00
|
|
|
"""
|
|
|
|
if not revs or revs == (None,):
|
|
|
|
revs = [self.changelog.rev(node) for node in
|
|
|
|
self.dirstate.parents() if node != nullid]
|
|
|
|
|
|
|
|
includetemp = kwargs.get('includetemp', True)
|
2018-03-29 12:34:01 +03:00
|
|
|
config = kwargs.get('config')
|
|
|
|
signature = self._sparsesignature(
|
2018-04-10 13:37:13 +03:00
|
|
|
includetemp=includetemp, config=config, revs=revs)
|
2017-10-14 00:51:27 +03:00
|
|
|
|
2018-03-29 12:34:01 +03:00
|
|
|
key = '%s:%s' % (signature, ':'.join([str(r) for r in revs]))
|
2017-10-14 00:51:27 +03:00
|
|
|
|
|
|
|
result = self.sparsecache.get(key, None)
|
|
|
|
if result:
|
2018-03-29 12:34:01 +03:00
|
|
|
return result, key
|
2017-10-14 00:51:27 +03:00
|
|
|
|
|
|
|
matchers = []
|
|
|
|
for rev in revs:
|
|
|
|
try:
|
2018-03-29 12:34:01 +03:00
|
|
|
includes, excludes, profiles = self.getsparsepatterns(
|
|
|
|
rev, config)
|
2017-10-14 00:51:27 +03:00
|
|
|
|
|
|
|
if includes or excludes:
|
|
|
|
# Explicitly include subdirectories of includes so
|
|
|
|
# status will walk them down to the actual include.
|
|
|
|
subdirs = set()
|
|
|
|
for include in includes:
|
|
|
|
dirname = os.path.dirname(include)
|
|
|
|
# basename is used to avoid issues with absolute
|
|
|
|
# paths (which on Windows can include the drive).
|
|
|
|
while os.path.basename(dirname):
|
|
|
|
subdirs.add(dirname)
|
|
|
|
dirname = os.path.dirname(dirname)
|
|
|
|
|
|
|
|
matcher = matchmod.match(self.root, '', [],
|
|
|
|
include=includes, exclude=excludes,
|
|
|
|
default='relpath')
|
|
|
|
if subdirs:
|
|
|
|
matcher = forceincludematcher(matcher, subdirs)
|
|
|
|
matchers.append(matcher)
|
|
|
|
except IOError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
result = None
|
|
|
|
if not matchers:
|
|
|
|
result = matchmod.always(self.root, '')
|
|
|
|
elif len(matchers) == 1:
|
|
|
|
result = matchers[0]
|
|
|
|
else:
|
|
|
|
result = unionmatcher(matchers)
|
|
|
|
|
|
|
|
if kwargs.get('includetemp', True):
|
|
|
|
tempincludes = self.gettemporaryincludes()
|
|
|
|
result = forceincludematcher(result, tempincludes)
|
|
|
|
|
|
|
|
self.sparsecache[key] = result
|
|
|
|
|
2018-03-29 12:34:01 +03:00
|
|
|
return result, key
|
2017-10-14 00:51:27 +03:00
|
|
|
|
|
|
|
def getactiveprofiles(self):
|
2017-11-20 17:40:41 +03:00
|
|
|
# Use unfiltered to avoid computing hidden commits
|
|
|
|
repo = self.unfiltered()
|
|
|
|
revs = [repo.changelog.rev(node) for node in
|
|
|
|
repo.dirstate.parents() if node != nullid]
|
2017-10-14 00:51:27 +03:00
|
|
|
|
|
|
|
activeprofiles = set()
|
|
|
|
for rev in revs:
|
2018-03-13 15:02:35 +03:00
|
|
|
profiles = self.getsparsepatterns(rev).profiles
|
2017-10-14 00:51:27 +03:00
|
|
|
activeprofiles.update(profiles)
|
|
|
|
|
|
|
|
return activeprofiles
|
|
|
|
|
|
|
|
def writesparseconfig(self, include, exclude, profiles):
|
|
|
|
raw = '%s[include]\n%s\n[exclude]\n%s\n' % (
|
|
|
|
''.join(['%%include %s\n' % p for p in sorted(profiles)]),
|
|
|
|
'\n'.join(sorted(include)),
|
|
|
|
'\n'.join(sorted(exclude)))
|
|
|
|
self.vfs.write("sparse", raw)
|
|
|
|
self.invalidatesignaturecache()
|
|
|
|
|
|
|
|
def addtemporaryincludes(self, files):
|
|
|
|
includes = self.gettemporaryincludes()
|
|
|
|
for file in files:
|
|
|
|
includes.add(file)
|
|
|
|
self._writetemporaryincludes(includes)
|
|
|
|
|
|
|
|
def gettemporaryincludes(self):
|
|
|
|
existingtemp = set()
|
|
|
|
if self.vfs.exists('tempsparse'):
|
|
|
|
raw = self.vfs.read('tempsparse')
|
|
|
|
existingtemp.update(raw.split('\n'))
|
|
|
|
return existingtemp
|
|
|
|
|
|
|
|
def _writetemporaryincludes(self, includes):
|
|
|
|
raw = '\n'.join(sorted(includes))
|
|
|
|
self.vfs.write('tempsparse', raw)
|
|
|
|
self.invalidatesignaturecache()
|
|
|
|
|
|
|
|
def prunetemporaryincludes(self):
|
|
|
|
if repo.vfs.exists('tempsparse'):
|
|
|
|
origstatus = self.status()
|
|
|
|
modified, added, removed, deleted, a, b, c = origstatus
|
|
|
|
if modified or added or removed or deleted:
|
|
|
|
# Still have pending changes. Don't bother trying to prune.
|
|
|
|
return
|
|
|
|
|
|
|
|
sparsematch = self.sparsematch(includetemp=False)
|
|
|
|
dirstate = self.dirstate
|
|
|
|
actions = []
|
|
|
|
dropped = []
|
|
|
|
tempincludes = self.gettemporaryincludes()
|
|
|
|
for file in tempincludes:
|
|
|
|
if file in dirstate and not sparsematch(file):
|
|
|
|
message = 'dropping temporarily included sparse files'
|
|
|
|
actions.append((file, None, message))
|
|
|
|
dropped.append(file)
|
|
|
|
|
|
|
|
typeactions = collections.defaultdict(list)
|
|
|
|
typeactions['r'] = actions
|
|
|
|
mergemod.applyupdates(self, typeactions, self[None], self['.'],
|
|
|
|
False)
|
|
|
|
|
|
|
|
# Fix dirstate
|
|
|
|
for file in dropped:
|
|
|
|
dirstate.drop(file)
|
|
|
|
|
|
|
|
self.vfs.unlink('tempsparse')
|
|
|
|
self.invalidatesignaturecache()
|
|
|
|
msg = _("cleaned up %d temporarily added file(s) from the "
|
|
|
|
"sparse checkout\n")
|
|
|
|
ui.status(msg % len(tempincludes))
|
|
|
|
|
|
|
|
if 'dirstate' in repo._filecache:
|
|
|
|
repo.dirstate.repo = repo
|
|
|
|
repo.sparsecache = {}
|
|
|
|
repo.signaturecache = {}
|
|
|
|
repo.__class__ = SparseRepo
|
|
|
|
|
2017-11-10 17:39:11 +03:00
|
|
|
# A profile is either active, inactive or included; the latter is a profile
|
|
|
|
# included (transitively) by an active profile.
|
2018-03-13 15:02:32 +03:00
|
|
|
PROFILE_INACTIVE, PROFILE_ACTIVE, PROFILE_INCLUDED = _profile_flags = range(3)
|
|
|
|
|
|
|
|
@attr.s(slots=True, frozen=True)
|
2018-03-14 16:49:45 +03:00
|
|
|
class ProfileInfo(collections.Mapping):
|
2018-03-13 15:02:32 +03:00
|
|
|
path = attr.ib()
|
|
|
|
active = attr.ib()
|
2018-03-14 16:49:45 +03:00
|
|
|
_metadata = attr.ib(default=attr.Factory(dict))
|
2018-03-13 15:02:32 +03:00
|
|
|
|
|
|
|
@active.validator
|
|
|
|
def checkactive(self, attribute, value):
|
|
|
|
if not any(value is flag for flag in _profile_flags):
|
|
|
|
raise ValueError('Invalid active flag value')
|
2017-11-10 17:39:11 +03:00
|
|
|
|
2018-03-14 16:49:45 +03:00
|
|
|
# Mapping methods for metadata access
|
|
|
|
def __getitem__(self, key):
|
|
|
|
return self._metadata[key]
|
|
|
|
def __iter__(self):
|
|
|
|
return iter(self._metadata)
|
|
|
|
def __len__(self):
|
|
|
|
return len(self._metadata)
|
|
|
|
|
2018-03-29 12:34:06 +03:00
|
|
|
def _discover(ui, repo, include_hidden=False):
|
2018-03-14 16:49:45 +03:00
|
|
|
"""Generate a list of available profiles with metadata
|
2017-11-10 17:39:11 +03:00
|
|
|
|
2018-03-14 16:49:45 +03:00
|
|
|
Returns a generator yielding ProfileInfo objects, paths are relative to the
|
|
|
|
repository root, the sequence is sorted by path.
|
2017-11-10 17:39:11 +03:00
|
|
|
|
|
|
|
If no sparse.profile_directory path is configured, will only
|
2018-03-14 16:49:45 +03:00
|
|
|
yield active and included profiles.
|
2017-11-10 17:39:11 +03:00
|
|
|
|
|
|
|
README(.*) files are filtered out.
|
|
|
|
|
2018-03-29 12:34:06 +03:00
|
|
|
If `include_hidden` is False, we filter out any profile with a 'hidden'
|
|
|
|
entry in the profile metadata (unless it is currently active).
|
|
|
|
|
2017-11-10 17:39:11 +03:00
|
|
|
"""
|
|
|
|
included = repo.getactiveprofiles()
|
|
|
|
sparse = repo.vfs.read('sparse')
|
2018-03-13 15:02:35 +03:00
|
|
|
active = repo.readsparseconfig(sparse).profiles
|
2017-11-10 17:39:11 +03:00
|
|
|
active = set(active)
|
|
|
|
|
|
|
|
profile_directory = ui.config('sparse', 'profile_directory')
|
|
|
|
available = set()
|
|
|
|
if profile_directory is not None:
|
|
|
|
if (os.path.isabs(profile_directory) or
|
|
|
|
profile_directory.startswith('../')):
|
|
|
|
raise error.Abort(
|
|
|
|
_('sparse.profile_directory must be relative to the '
|
|
|
|
'repository root'))
|
|
|
|
if not profile_directory.endswith('/'):
|
|
|
|
profile_directory += '/'
|
|
|
|
|
|
|
|
ctx = repo['.']
|
|
|
|
mf = ctx.manifest()
|
|
|
|
|
2018-03-06 16:11:41 +03:00
|
|
|
matcher = matchmod.match(
|
|
|
|
repo.root, repo.getcwd(),
|
|
|
|
patterns=['path:' + profile_directory],
|
2018-03-20 18:22:48 +03:00
|
|
|
exclude=['relglob:README.*', 'relglob:README'])
|
2018-03-06 16:11:41 +03:00
|
|
|
available.update(mf.matches(matcher))
|
2017-11-10 17:39:11 +03:00
|
|
|
|
2018-03-14 16:49:45 +03:00
|
|
|
# sort profiles and read profile metadata as we iterate
|
|
|
|
for p in sorted(available | included):
|
|
|
|
raw = repo.getrawprofile(p, '.')
|
2018-03-29 12:34:06 +03:00
|
|
|
md = repo.readsparseconfig(raw, filename=p).metadata
|
|
|
|
if 'hidden' not in md or include_hidden or p in active or p in included:
|
|
|
|
yield ProfileInfo(
|
|
|
|
p, (PROFILE_ACTIVE if p in active else
|
|
|
|
PROFILE_INCLUDED if p in included else
|
|
|
|
PROFILE_INACTIVE),
|
|
|
|
md)
|
2017-11-10 17:39:11 +03:00
|
|
|
|
2018-03-29 12:34:01 +03:00
|
|
|
def _profilesizeinfo(ui, repo, *config, **kwargs):
|
|
|
|
"""Get size stats for a given set of profiles
|
|
|
|
|
|
|
|
Returns a dictionary of config -> (count, bytes) tuples. The
|
|
|
|
special key `None` represents the total manifest count and
|
|
|
|
bytecount. bytes is the total size of the files.
|
|
|
|
|
|
|
|
Note: for performance reasons we don't calculate the total repository size
|
|
|
|
and the value for the `None` key is always set to (count, None) to reflect
|
|
|
|
this.
|
|
|
|
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
cache = extensions.find('simplecache')
|
|
|
|
cacheget = functools.partial(
|
|
|
|
cache.cacheget, serializer=cache.jsonserializer, ui=ui)
|
|
|
|
cacheset = functools.partial(
|
|
|
|
cache.cacheset, serializer=cache.jsonserializer, ui=ui)
|
|
|
|
except KeyError:
|
|
|
|
cacheget = cacheset = lambda *args: None
|
|
|
|
|
|
|
|
collectsize = kwargs.get('collectsize', False)
|
|
|
|
|
|
|
|
results = {}
|
|
|
|
matchers = {}
|
|
|
|
to_store = {}
|
|
|
|
|
2018-04-10 13:37:19 +03:00
|
|
|
rev = kwargs.get('rev', '.')
|
|
|
|
ctx = scmutil.revsingle(repo, rev)
|
2018-03-29 12:34:01 +03:00
|
|
|
|
|
|
|
templ = 'sparseprofilestats:%s:{}' % util.split(repo.root)[-1]
|
|
|
|
def _genkey(path, *parts):
|
|
|
|
# paths need to be ascii-safe with
|
|
|
|
path = path.replace('/', '__')
|
|
|
|
return templ.format(':'.join((path,) + parts))
|
|
|
|
|
|
|
|
key = _genkey('unfiltered', ctx.hex())
|
|
|
|
cached = cacheget(key)
|
|
|
|
results[None] = cached if cached else [0, None]
|
|
|
|
if cached is None:
|
|
|
|
# gather complete working copy data
|
|
|
|
matchers[None] = matchmod.always(repo.root, repo.root)
|
|
|
|
to_store[None] = key
|
|
|
|
|
|
|
|
for c in config:
|
|
|
|
matcher, key = repo._sparsematch_and_key(
|
|
|
|
ctx.hex(), includetemp=False, config=c)
|
|
|
|
key = _genkey(c.path, key, str(collectsize))
|
|
|
|
cached = cacheget(key)
|
|
|
|
if not cached and not collectsize:
|
|
|
|
# if not collecting the full size, but we have a cached copy
|
|
|
|
# for a full run, use the file count from that
|
|
|
|
cached = cacheget(_genkey(c.path, key, 'True'))
|
|
|
|
cached = cached and [cached[0], 0]
|
|
|
|
results[c] = cached or [0, 0]
|
|
|
|
if cached is None:
|
|
|
|
matchers[c] = matcher
|
|
|
|
to_store[c] = key
|
|
|
|
|
|
|
|
if matchers:
|
|
|
|
mf = ctx.manifest()
|
|
|
|
if results[None][0]:
|
|
|
|
# use cached working copy size
|
|
|
|
totalfiles = results[None][0]
|
|
|
|
else:
|
|
|
|
with progress.spinner(ui, 'calculating total manifest size'):
|
|
|
|
try:
|
|
|
|
totalfiles = len(mf)
|
2018-04-03 17:27:34 +03:00
|
|
|
except TypeError:
|
2018-03-29 12:34:01 +03:00
|
|
|
# treemanifest does not implement __len__ :-(
|
|
|
|
totalfiles = sum(1 for __ in mf)
|
|
|
|
|
|
|
|
if collectsize and len(matchers) - (None in matchers):
|
|
|
|
# we may need to prefetch file data, to calculate the size of each
|
|
|
|
# profile
|
|
|
|
try:
|
|
|
|
remotefilelog = extensions.find('remotefilelog')
|
|
|
|
except KeyError:
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
if remotefilelog.shallowrepo.requirement in repo.requirements:
|
|
|
|
profilematchers = unionmatcher(
|
|
|
|
[matchers[k] for k in matchers if k])
|
2018-04-10 13:37:19 +03:00
|
|
|
repo.prefetch(
|
|
|
|
repo.revs(ctx.hash()), matcher=profilematchers)
|
2018-03-29 12:34:01 +03:00
|
|
|
|
|
|
|
with progress.bar(ui, _('calculating'), total=totalfiles) as prog:
|
|
|
|
# only matchers for which there was no cache are processed
|
|
|
|
for file in ctx.walk(unionmatcher(matchers.values())):
|
|
|
|
prog.value += 1
|
|
|
|
for c, matcher in matchers.items():
|
|
|
|
if matcher(file):
|
|
|
|
results[c][0] += 1
|
|
|
|
if collectsize and c is not None:
|
2018-04-10 13:37:19 +03:00
|
|
|
results[c][1] += ctx.filectx(file).size()
|
2018-03-29 12:34:01 +03:00
|
|
|
|
|
|
|
results = {k: tuple(v) for k, v in results.items()}
|
|
|
|
for c, key in to_store.items():
|
|
|
|
cacheset(key, results[c])
|
|
|
|
|
|
|
|
return results
|
|
|
|
|
2017-10-14 00:51:27 +03:00
|
|
|
@command('^sparse', [
|
|
|
|
('f', 'force', False, _('allow changing rules even with pending changes')),
|
2018-03-29 12:34:04 +03:00
|
|
|
('I', 'include', False, _('include files in the sparse checkout '
|
|
|
|
'(DEPRECATED)')),
|
|
|
|
('X', 'exclude', False, _('exclude files in the sparse checkout '
|
|
|
|
'(DEPRECATED)')),
|
|
|
|
('d', 'delete', False, _('delete an include/exclude rule '
|
|
|
|
'(DEPRECATED)')),
|
|
|
|
('', 'enable-profile', False, _('enables the specified profile '
|
|
|
|
'(DEPRECATED)')),
|
|
|
|
('', 'disable-profile', False, _('disables the specified profile '
|
|
|
|
'(DEPRECATED)')),
|
|
|
|
('', 'import-rules', False, _('imports rules from a file (DEPRECATED)')),
|
|
|
|
('', 'clear-rules', False, _('clears local include/exclude rules '
|
|
|
|
'(DEPRECATED)')),
|
|
|
|
('', 'refresh', False, _('updates the working after sparseness changes '
|
|
|
|
'(DEPRECATED)')),
|
|
|
|
('', 'reset', False, _('makes the repo full again (DEPRECATED)')),
|
2017-10-14 00:51:27 +03:00
|
|
|
('', 'cwd-list', False, _('list the full contents of the current '
|
2018-03-29 12:34:04 +03:00
|
|
|
'directory (DEPRECATED)')),
|
2017-10-14 00:51:27 +03:00
|
|
|
] + commands.templateopts,
|
2018-03-29 12:34:04 +03:00
|
|
|
_('[--OPTION] SUBCOMMAND ...'))
|
2017-10-14 00:51:27 +03:00
|
|
|
def sparse(ui, repo, *pats, **opts):
|
|
|
|
"""make the current checkout sparse, or edit the existing checkout
|
|
|
|
|
|
|
|
The sparse command is used to make the current checkout sparse.
|
|
|
|
This means files that don't meet the sparse condition will not be
|
|
|
|
written to disk, or show up in any working copy operations. It does
|
|
|
|
not affect files in history in any way.
|
|
|
|
|
2018-03-29 12:34:04 +03:00
|
|
|
All the work is done in subcommands such as `hg sparse enableprofile`;
|
|
|
|
passing no subcommand prints the currently applied sparse rules.
|
2017-10-14 00:51:27 +03:00
|
|
|
|
2018-03-29 12:34:04 +03:00
|
|
|
The `include` and `exclude` subcommands are used to add and remove files
|
|
|
|
from the sparse checkout, while delete removes an existing include/exclude
|
|
|
|
rule.
|
2017-10-14 00:51:27 +03:00
|
|
|
|
2018-03-29 12:34:04 +03:00
|
|
|
Sparse profiles can also be shared with other users of te repository by
|
|
|
|
committing a file with include and exclude rules in a separate file. Use the
|
|
|
|
`enableprofile` and `disableprofile` subcommands to enable or disable
|
|
|
|
such profiles. Changes to shared profiles are not applied until they have
|
|
|
|
been committed.
|
2017-10-14 00:51:27 +03:00
|
|
|
|
2018-03-29 12:34:04 +03:00
|
|
|
See :hg:`help sparse <subcommand>` to get additional information.
|
2017-10-14 00:51:27 +03:00
|
|
|
|
2018-03-29 12:34:04 +03:00
|
|
|
.. container:: verbose
|
2017-10-14 00:51:27 +03:00
|
|
|
|
2018-03-29 12:34:04 +03:00
|
|
|
Sparse file format
|
|
|
|
------------------
|
|
|
|
|
|
|
|
Structure
|
|
|
|
.........
|
|
|
|
|
|
|
|
Shared sparse profile files comprise of 4 sections: `%include` directives
|
|
|
|
that pull in another sparse profile, and `[metadata]`, `[include]` and
|
|
|
|
`[exclude]` sections.
|
|
|
|
|
|
|
|
Any line starting with a `;` or `#` character is a comment and is ignored.
|
|
|
|
|
|
|
|
Extending existing profiles
|
|
|
|
...........................
|
|
|
|
|
|
|
|
`%include <absolute path>` directives (one per line) let you extend as
|
|
|
|
an existing profile file, adding more include and exclude rules. Although
|
|
|
|
this directive can appear anywere in the file, it is recommended you
|
|
|
|
keep these at the top of the file.
|
|
|
|
|
|
|
|
Metadata
|
|
|
|
........
|
|
|
|
|
|
|
|
The `[metadata]` section lets you specify key-value pairs for the profile.
|
|
|
|
Anything before the first `:` or `=` is the key, everything after is the
|
|
|
|
value. Values can be extended over multiple lines by indenting additional
|
|
|
|
lines.
|
|
|
|
|
2018-03-29 12:34:06 +03:00
|
|
|
Only the `title`, `description` and `hidden` keys carry meaning to for
|
|
|
|
`hg sparse`, these are used in the `hg sparse list` and
|
|
|
|
`hg sparse explain` commands. Profiles with the `hidden` key (regardless
|
|
|
|
of its value) are excluded from the `hg sparse list` listing unless
|
|
|
|
the `-v` / `--verbose` switch is given.
|
2017-10-14 00:51:27 +03:00
|
|
|
|
2018-03-29 12:34:04 +03:00
|
|
|
Include and exclude rules
|
|
|
|
.........................
|
2017-10-14 00:51:27 +03:00
|
|
|
|
2018-03-29 12:34:04 +03:00
|
|
|
Each line in the `[include]` and `[exclude]` sections is treated as a
|
|
|
|
standard pattern, see :hg:`help patterns`. Exclude rules override include
|
|
|
|
rules.
|
2017-10-14 00:51:27 +03:00
|
|
|
|
2018-03-29 12:34:04 +03:00
|
|
|
Example
|
|
|
|
.......
|
2017-10-14 00:51:27 +03:00
|
|
|
|
2018-03-29 12:34:04 +03:00
|
|
|
::
|
2017-10-14 00:51:27 +03:00
|
|
|
|
2018-03-29 12:34:04 +03:00
|
|
|
# this profile extends another profile, incorporating all its rules
|
|
|
|
%include some/base/profile
|
2017-10-14 00:51:27 +03:00
|
|
|
|
2018-03-29 12:34:04 +03:00
|
|
|
[metadata]
|
|
|
|
title: This is an example sparse profile
|
|
|
|
description: You can include as much metadata as makes sense for your
|
|
|
|
setup, and values can extend over multiple lines.
|
|
|
|
lorem ipsum = Keys and values are separated by a : or =
|
2018-03-29 12:34:06 +03:00
|
|
|
; hidden: the hidden key lets you mark profiles that should not
|
|
|
|
; generally be discorable. The value doesn't matter, use it to motivate
|
|
|
|
; why it is hidden.
|
2017-10-14 00:51:27 +03:00
|
|
|
|
2018-03-29 12:34:04 +03:00
|
|
|
[include]
|
|
|
|
foo/bar/baz
|
|
|
|
bar/python_project/**/*.py
|
2017-11-10 17:39:11 +03:00
|
|
|
|
2018-03-29 12:34:04 +03:00
|
|
|
[exclude]
|
|
|
|
; exclude rules override include rules, so all files with the extension
|
|
|
|
; .ignore are excluded from this sparse profile.
|
|
|
|
foo/bar/baz/*.ignore
|
2017-11-10 17:39:11 +03:00
|
|
|
|
2018-03-29 12:34:04 +03:00
|
|
|
Configuration options
|
|
|
|
---------------------
|
|
|
|
|
|
|
|
The following config option defines whether sparse treats supplied
|
|
|
|
paths as relative to repo root or to the current working dir for
|
|
|
|
include and exclude options:
|
|
|
|
|
|
|
|
[sparse]
|
|
|
|
includereporootpaths = off
|
|
|
|
|
|
|
|
The following config option defines whether sparse treats supplied
|
|
|
|
paths as relative to repo root or to the current working dir for
|
|
|
|
enableprofile and disableprofile options:
|
|
|
|
|
|
|
|
[sparse]
|
|
|
|
enablereporootpaths = on
|
|
|
|
|
|
|
|
You can configure a path to find sparse profiles in; this path is
|
|
|
|
used to discover available sparse profiles. Nested directories are
|
|
|
|
reflected in the UI.
|
|
|
|
|
|
|
|
[sparse]
|
|
|
|
profile_directory = tools/scm/sparse
|
|
|
|
|
|
|
|
It is not set by default.
|
2017-11-10 17:39:11 +03:00
|
|
|
|
2017-10-14 00:51:27 +03:00
|
|
|
"""
|
2018-03-26 18:44:41 +03:00
|
|
|
cmd = subcmd.parse(pats, opts)
|
|
|
|
if cmd is not None:
|
|
|
|
return cmd(ui, repo)
|
2018-03-23 00:34:29 +03:00
|
|
|
|
2017-10-14 00:51:27 +03:00
|
|
|
include = opts.get('include')
|
|
|
|
exclude = opts.get('exclude')
|
|
|
|
force = opts.get('force')
|
|
|
|
enableprofile = opts.get('enable_profile')
|
|
|
|
disableprofile = opts.get('disable_profile')
|
|
|
|
importrules = opts.get('import_rules')
|
|
|
|
clearrules = opts.get('clear_rules')
|
|
|
|
delete = opts.get('delete')
|
|
|
|
refresh = opts.get('refresh')
|
|
|
|
reset = opts.get('reset')
|
|
|
|
cwdlist = opts.get('cwd_list')
|
|
|
|
count = sum([include, exclude, enableprofile, disableprofile, delete,
|
2018-03-23 00:34:29 +03:00
|
|
|
importrules, refresh, clearrules, reset, cwdlist])
|
2017-10-14 00:51:27 +03:00
|
|
|
if count > 1:
|
|
|
|
raise error.Abort(_("too many flags specified"))
|
|
|
|
|
|
|
|
if count == 0:
|
|
|
|
if repo.vfs.exists('sparse'):
|
|
|
|
ui.status(repo.vfs.read("sparse") + "\n")
|
|
|
|
temporaryincludes = repo.gettemporaryincludes()
|
|
|
|
if temporaryincludes:
|
|
|
|
ui.status(_("Temporarily Included Files (for merge/rebase):\n"))
|
|
|
|
ui.status(("\n".join(temporaryincludes) + "\n"))
|
|
|
|
else:
|
|
|
|
ui.status(_('repo is not sparse\n'))
|
|
|
|
return
|
|
|
|
|
|
|
|
if include or exclude or delete or reset or enableprofile or disableprofile:
|
|
|
|
_config(ui, repo, pats, opts, include=include, exclude=exclude,
|
|
|
|
reset=reset, delete=delete, enableprofile=enableprofile,
|
|
|
|
disableprofile=disableprofile, force=force)
|
|
|
|
|
|
|
|
if importrules:
|
|
|
|
_import(ui, repo, pats, opts, force=force)
|
|
|
|
|
|
|
|
if clearrules:
|
|
|
|
_clear(ui, repo, pats, force=force)
|
|
|
|
|
|
|
|
if refresh:
|
2018-03-15 14:34:07 +03:00
|
|
|
with repo.wlock():
|
|
|
|
c = _refresh(ui, repo, repo.status(), repo.sparsematch(), force)
|
|
|
|
fcounts = map(len, c)
|
2017-10-14 00:51:27 +03:00
|
|
|
_verbose_output(ui, opts, 0, 0, 0, *fcounts)
|
|
|
|
|
|
|
|
if cwdlist:
|
|
|
|
_cwdlist(repo)
|
|
|
|
|
2018-03-23 00:34:29 +03:00
|
|
|
# subcommands for the hg sparse command line
|
|
|
|
class subcmdfunc(registrar._funcregistrarbase):
|
2018-03-26 18:44:41 +03:00
|
|
|
"""Register a function to be invoked for "hg sparse <thing>" subcommands
|
|
|
|
|
|
|
|
Help info is taken from the function docstring, or can be set explicitly
|
|
|
|
with the help='...' keyword argument.
|
2018-03-28 17:15:11 +03:00
|
|
|
|
2018-04-10 13:37:16 +03:00
|
|
|
Per-subcommand options are specified with the options keyword, which
|
|
|
|
takes the same format as the options table for commands.
|
2018-03-26 18:44:41 +03:00
|
|
|
"""
|
2018-03-23 00:34:29 +03:00
|
|
|
|
2018-04-10 13:37:16 +03:00
|
|
|
def __init__(self, table=None):
|
2018-03-23 00:34:29 +03:00
|
|
|
if table is None:
|
|
|
|
# List commands in registration order
|
|
|
|
table = collections.OrderedDict()
|
|
|
|
super(subcmdfunc, self).__init__(table)
|
|
|
|
|
2018-04-10 13:37:16 +03:00
|
|
|
def _doregister(self, func, name, options=(), synopsis=None, help=None):
|
2018-03-26 18:44:41 +03:00
|
|
|
if name in self._table:
|
|
|
|
msg = 'duplicate registration for name: "%s"' % name
|
|
|
|
raise error.ProgrammingError(msg)
|
2018-03-23 00:34:29 +03:00
|
|
|
|
2018-03-28 17:15:11 +03:00
|
|
|
@functools.wraps(func)
|
|
|
|
def dispatch(*args, **kwargs):
|
|
|
|
return func(name, *args, **kwargs)
|
|
|
|
|
|
|
|
if help is not None:
|
|
|
|
dispatch.__doc__ = help
|
|
|
|
|
2018-04-10 13:37:16 +03:00
|
|
|
registration = dispatch, tuple(options)
|
|
|
|
if synopsis:
|
|
|
|
registration += (synopsis,)
|
2018-03-23 00:34:29 +03:00
|
|
|
|
2018-03-28 17:15:11 +03:00
|
|
|
self._table[name] = registration
|
2018-03-23 00:34:29 +03:00
|
|
|
|
2018-03-26 18:44:41 +03:00
|
|
|
return func
|
|
|
|
|
2018-03-26 18:44:42 +03:00
|
|
|
def subcmdsrst(self, verbose=False):
|
|
|
|
"""Produce a table of subcommands"""
|
|
|
|
def cmdhelp():
|
2018-03-28 17:15:11 +03:00
|
|
|
for name, entry in self._table.items():
|
|
|
|
doc = pycompat.getdoc(entry[0])
|
2018-03-26 18:44:42 +03:00
|
|
|
doc, __, rest = doc.strip().partition('\n')
|
|
|
|
if verbose and rest.strip():
|
|
|
|
doc = '{} - {}'.format(doc, rest.strip())
|
2018-03-28 17:15:11 +03:00
|
|
|
yield (name, doc)
|
2018-03-26 18:44:42 +03:00
|
|
|
rst = ['\n%s:\n\n' % _('subcommands')]
|
|
|
|
rst += minirst.maketable(list(cmdhelp()), 1)
|
|
|
|
return ''.join(rst)
|
|
|
|
|
2018-04-10 13:37:16 +03:00
|
|
|
def parseargs(self, parser, args, options, *posargs, **kwargs):
|
|
|
|
subcmd = args[0] if args else None
|
|
|
|
if subcmd in self._table:
|
|
|
|
options = options + list(self._table[subcmd][1])
|
|
|
|
try:
|
|
|
|
return parser(args, options, *posargs, **kwargs)
|
|
|
|
except pycompat.getopt.GetoptError as ex:
|
|
|
|
if subcmd in self._table:
|
|
|
|
raise error.CommandError('sparse {}'.format(subcmd), ex)
|
|
|
|
raise
|
|
|
|
|
2018-03-28 17:15:11 +03:00
|
|
|
def parse(self, args, opts):
|
|
|
|
if not args or args[0] not in self._table:
|
2018-03-26 18:44:41 +03:00
|
|
|
return
|
|
|
|
|
2018-03-28 17:15:11 +03:00
|
|
|
name, args = args[0], args[1:]
|
|
|
|
def callsubcmd(ui, repo, *moreargs, **kw):
|
2018-03-26 18:44:41 +03:00
|
|
|
opts.update(kw)
|
2018-03-28 17:15:11 +03:00
|
|
|
return self._table[name][0](ui, repo, *(moreargs + args), **opts)
|
2018-03-26 18:44:41 +03:00
|
|
|
return callsubcmd
|
|
|
|
|
2018-03-28 17:15:11 +03:00
|
|
|
subcmdtable = collections.OrderedDict()
|
2018-04-10 13:37:16 +03:00
|
|
|
subcmd = subcmdfunc(subcmdtable)
|
2018-03-23 00:34:29 +03:00
|
|
|
|
|
|
|
@subcmd('list')
|
2018-03-26 18:44:41 +03:00
|
|
|
def _listprofiles(cmd, ui, repo, *pats, **opts):
|
2018-03-26 18:44:42 +03:00
|
|
|
"""List available sparse profiles
|
|
|
|
|
|
|
|
Show all available sparse profiles, with the active profiles marked.
|
2018-03-29 12:34:06 +03:00
|
|
|
However, if a profile has a key named `hidden` in it's metadata, the profile
|
|
|
|
is excluded from this list unless explicitly active or included in an active
|
|
|
|
profile, or when the `--verbose` switch is used.
|
2018-03-26 18:44:42 +03:00
|
|
|
|
|
|
|
"""
|
2018-03-23 00:34:29 +03:00
|
|
|
chars = {PROFILE_INACTIVE: '', PROFILE_INCLUDED: '~', PROFILE_ACTIVE: '*'}
|
|
|
|
labels = {
|
|
|
|
PROFILE_INACTIVE: 'inactive',
|
|
|
|
PROFILE_INCLUDED: 'included',
|
|
|
|
PROFILE_ACTIVE: 'active',
|
|
|
|
}
|
2018-03-29 12:34:01 +03:00
|
|
|
ui.pager('sparse list')
|
2018-03-23 00:34:29 +03:00
|
|
|
with ui.formatter('sparse', opts) as fm:
|
|
|
|
if fm.isplain():
|
|
|
|
ui.write_err(
|
|
|
|
_('symbols: * = active profile, ~ = transitively '
|
|
|
|
'included\n'),
|
|
|
|
label='sparse.profile.legend')
|
|
|
|
|
2018-03-29 12:34:06 +03:00
|
|
|
profiles = list(_discover(ui, repo, include_hidden=ui.verbose))
|
2018-03-23 00:34:29 +03:00
|
|
|
max_width = max(len(p.path) for p in profiles)
|
|
|
|
|
|
|
|
for info in profiles:
|
|
|
|
fm.startitem()
|
|
|
|
label = 'sparse.profile.' + labels[info.active]
|
|
|
|
fm.plain('%-1s ' % chars[info.active], label=label)
|
|
|
|
fm.data(active=labels[info.active], metadata=dict(info))
|
|
|
|
fm.write(b'path', '%-{}s'.format(max_width), info.path, label=label)
|
|
|
|
if 'title' in info:
|
|
|
|
fm.plain(' - %s' % info.get('title', b''), label=label)
|
|
|
|
fm.plain('\n')
|
2017-11-10 17:39:11 +03:00
|
|
|
|
2018-04-10 13:37:19 +03:00
|
|
|
@subcmd('explain', [
|
|
|
|
('r', 'rev', '', _('explain the profile(s) against the specified revision'),
|
|
|
|
_('REV')),
|
|
|
|
] + commands.templateopts,
|
|
|
|
'[OPTION]... [PROFILE]...')
|
2018-03-26 18:44:48 +03:00
|
|
|
def _explainprofile(cmd, ui, repo, *profiles, **opts):
|
2018-03-29 12:34:01 +03:00
|
|
|
"""Show information on individual profiles
|
|
|
|
|
|
|
|
If --verbose is given, calculates the file size impact of a profile (slow).
|
|
|
|
"""
|
2018-03-26 18:44:48 +03:00
|
|
|
if ui.plain() and not opts.get('template'):
|
|
|
|
hint = _('invoke with -T/--template to control output format')
|
|
|
|
raise error.Abort(_('must specify a template in plain mode'), hint=hint)
|
|
|
|
|
2018-03-29 12:34:01 +03:00
|
|
|
if not profiles:
|
|
|
|
raise error.Abort(_('no profiles specified'))
|
|
|
|
|
2018-04-10 13:37:19 +03:00
|
|
|
rev = scmutil.revrange(repo, [opts.get('rev') or '.']).last()
|
|
|
|
if rev is None:
|
|
|
|
raise error.Abort(_('empty revision set'))
|
|
|
|
|
2018-03-29 12:34:01 +03:00
|
|
|
configs = []
|
|
|
|
for i, p in enumerate(profiles):
|
|
|
|
try:
|
2018-04-10 13:37:19 +03:00
|
|
|
raw = repo.getrawprofile(p, rev)
|
2018-03-29 12:34:01 +03:00
|
|
|
except KeyError:
|
|
|
|
ui.warn(_('The profile %s was not found\n') % p)
|
|
|
|
exitcode = 255
|
|
|
|
continue
|
|
|
|
profile = repo.readsparseconfig(raw, p)
|
|
|
|
configs.append(profile)
|
|
|
|
|
2018-04-10 13:37:19 +03:00
|
|
|
stats = _profilesizeinfo(
|
|
|
|
ui, repo, *configs, rev=rev, collectsize=ui.verbose)
|
2018-03-29 12:34:01 +03:00
|
|
|
filecount, totalsize = stats[None]
|
|
|
|
|
2018-03-26 18:44:48 +03:00
|
|
|
exitcode = 0
|
2018-03-29 12:34:01 +03:00
|
|
|
|
|
|
|
def sortedsets(d):
|
2018-04-10 13:37:14 +03:00
|
|
|
return {
|
|
|
|
k: sorted(v) if isinstance(v, collections.Set) else v
|
|
|
|
for k, v in d.items()}
|
2018-03-29 12:34:01 +03:00
|
|
|
|
|
|
|
ui.pager('sparse explain')
|
2018-03-26 18:44:48 +03:00
|
|
|
with ui.formatter('sparse', opts) as fm:
|
2018-03-29 12:34:01 +03:00
|
|
|
for i, profile in enumerate(configs):
|
2018-03-26 18:44:48 +03:00
|
|
|
if i:
|
|
|
|
fm.plain('\n')
|
|
|
|
fm.startitem()
|
|
|
|
|
2018-03-29 12:34:01 +03:00
|
|
|
fm.write('path', '%s\n\n', profile.path)
|
2018-03-26 18:44:48 +03:00
|
|
|
|
2018-03-29 12:34:01 +03:00
|
|
|
pfilecount, ptotalsize = stats.get(profile, (-1, -1))
|
|
|
|
pfileperc = 0.0
|
|
|
|
if pfilecount > -1 and filecount > 0:
|
|
|
|
pfileperc = (pfilecount / filecount) * 100
|
|
|
|
profilestats = {
|
|
|
|
'filecount': pfilecount, 'filecountpercentage': pfileperc
|
|
|
|
}
|
|
|
|
if ptotalsize:
|
|
|
|
profilestats['totalsize'] = ptotalsize
|
|
|
|
fm.data(
|
|
|
|
stats=profilestats,
|
|
|
|
**sortedsets(attr.asdict(profile, retain_collection_types=True))
|
|
|
|
)
|
2018-03-26 18:44:48 +03:00
|
|
|
|
|
|
|
if fm.isplain():
|
|
|
|
md = profile.metadata
|
|
|
|
title = md.get('title', _('(untitled)'))
|
|
|
|
lines = [
|
|
|
|
minirst.section(title)
|
|
|
|
]
|
|
|
|
description = md.get('description')
|
|
|
|
if description:
|
|
|
|
lines.append('%s\n\n' % description)
|
|
|
|
|
2018-03-29 12:34:01 +03:00
|
|
|
if pfileperc or ptotalsize:
|
|
|
|
lines.append(minirst.subsection(
|
|
|
|
_('Size impact compared to a full checkout')))
|
|
|
|
|
|
|
|
if pfileperc:
|
|
|
|
lines.append(':file count: {:d} ({:.2f}%)\n'.format(
|
|
|
|
pfilecount, pfileperc))
|
|
|
|
if ptotalsize:
|
|
|
|
lines.append(':total size: {:s}\n'.format(
|
|
|
|
util.bytecount(ptotalsize)))
|
|
|
|
lines.append('\n')
|
|
|
|
|
2018-03-26 18:44:48 +03:00
|
|
|
other = md.viewkeys() - {'title', 'description'}
|
|
|
|
if other:
|
|
|
|
lines += (
|
|
|
|
minirst.subsection(_('Additional metadata')),
|
|
|
|
''.join(
|
|
|
|
[':%s: %s\n' % (
|
|
|
|
key, '\n '.join(md[key].splitlines()))
|
|
|
|
for key in sorted(other)]),
|
|
|
|
'\n')
|
|
|
|
|
|
|
|
sections = (
|
|
|
|
('profiles', _('Profiles included')),
|
|
|
|
('includes', _('Inclusion rules')),
|
|
|
|
('excludes', _('Exclusion rules')),
|
|
|
|
)
|
|
|
|
|
|
|
|
for attrib, label in sections:
|
|
|
|
section = getattr(profile, attrib)
|
|
|
|
if not section:
|
|
|
|
continue
|
|
|
|
lines += (minirst.subsection(label), '::\n\n')
|
|
|
|
lines += (' %s\n' % entry for entry in sorted(section))
|
|
|
|
lines += ('\n',)
|
|
|
|
|
|
|
|
textwidth = ui.configint('ui', 'textwidth')
|
|
|
|
termwidth = ui.termwidth() - 2
|
|
|
|
if not (0 < textwidth <= termwidth):
|
|
|
|
textwidth = termwidth
|
|
|
|
fm.plain(minirst.format(''.join(lines), textwidth))
|
|
|
|
|
|
|
|
return exitcode
|
|
|
|
|
2018-03-29 12:34:02 +03:00
|
|
|
@subcmd('files')
|
|
|
|
def _listfilessubcmd(cmd, ui, repo, *profiles, **opts):
|
|
|
|
"""List all files included in a profiles
|
|
|
|
|
|
|
|
If files are given to match, this command only prints the names of the
|
|
|
|
files in a profile that match those patterns.
|
|
|
|
|
|
|
|
"""
|
|
|
|
if not profiles:
|
|
|
|
raise error.Abort(_('no profiles specified'))
|
|
|
|
|
|
|
|
profile, files = profiles[0], profiles[1:]
|
|
|
|
try:
|
|
|
|
raw = repo.getrawprofile(profile, '.')
|
|
|
|
except KeyError:
|
|
|
|
raise error.Abort(_('The profile %s was not found\n') % profile)
|
|
|
|
|
|
|
|
config = repo.readsparseconfig(raw, profile)
|
|
|
|
ctx = repo['.']
|
|
|
|
matcher = matchmod.intersectmatchers(
|
|
|
|
matchmod.match(repo.root, repo.getcwd(), files),
|
|
|
|
repo.sparsematch(ctx.hex(), includetemp=False, config=config))
|
|
|
|
|
|
|
|
exitcode = 1
|
|
|
|
ui.pager('sparse listfiles')
|
|
|
|
with ui.formatter('files', opts) as fm:
|
|
|
|
for f in ctx.matches(matcher):
|
|
|
|
fm.startitem()
|
|
|
|
fm.data(abspath=f)
|
|
|
|
fm.write('path', '%s\n', matcher.rel(f))
|
|
|
|
exitcode = 0
|
|
|
|
return exitcode
|
|
|
|
|
2018-03-29 12:34:04 +03:00
|
|
|
_details = '''\n
|
|
|
|
The effects of adding or deleting an include or exclude rule are applied
|
|
|
|
immediately. If applying the new rule would cause a file with pending
|
|
|
|
changes to be added or removed, the command will fail. Pass --force to
|
|
|
|
force a rule change even with pending changes (the changes on disk will
|
|
|
|
be preserved).
|
|
|
|
'''
|
|
|
|
|
2018-03-26 18:44:44 +03:00
|
|
|
@subcmd('reset', help=_('makes the repo full again'))
|
|
|
|
@subcmd('disableprofile', help=_('disables the specified profile'))
|
|
|
|
@subcmd('enableprofile', help=_('enables the specified profile'))
|
2018-03-29 12:34:04 +03:00
|
|
|
@subcmd('delete', help=_('delete an include/exclude rule' + _details))
|
|
|
|
@subcmd('exclude', help=_('exclude files in the sparse checkout' + _details))
|
|
|
|
@subcmd('include', help=_('include files in the sparse checkout' + _details))
|
2018-03-26 18:44:44 +03:00
|
|
|
def _configsubcmd(cmd, ui, repo, *pats, **opts):
|
|
|
|
_config(ui, repo, pats, opts, force=opts.get('force'), **{cmd: True})
|
|
|
|
|
|
|
|
@subcmd('importrules')
|
|
|
|
def _importsubcmd(cmd, ui, repo, *pats, **opts):
|
|
|
|
"""Directly import sparse profile rules
|
|
|
|
|
|
|
|
Accepts a path to a file containing rules in the .hgsparse format.
|
|
|
|
|
|
|
|
This allows you to add *include*, *exclude* and *enable* rules
|
|
|
|
in bulk. Like the include, exclude and enable subcommands, the
|
|
|
|
changes are applied immediately.
|
|
|
|
|
|
|
|
"""
|
|
|
|
_import(ui, repo, pats, opts, force=opts.get('force'))
|
|
|
|
|
|
|
|
@subcmd('clear')
|
|
|
|
def _clearsubcmd(cmd, ui, repo, *pats, **opts):
|
|
|
|
"""Clear local sparse rules
|
|
|
|
|
|
|
|
Removes all local include and exclude rules, while leaving
|
|
|
|
any enabled profiles in place.
|
|
|
|
|
|
|
|
"""
|
|
|
|
_clear(ui, repo, pats, force=opts.get('force'))
|
|
|
|
|
|
|
|
@subcmd('refresh')
|
|
|
|
def _refreshsubcmd(cmd, ui, repo, *pats, **opts):
|
|
|
|
"""Refreshes the files on disk based on the sparse rules
|
|
|
|
|
|
|
|
This is only necessary if .hg/sparse was changed by hand.
|
|
|
|
|
|
|
|
"""
|
|
|
|
force = opts.get('force')
|
|
|
|
with repo.wlock():
|
|
|
|
c = _refresh(ui, repo, repo.status(), repo.sparsematch(), force)
|
|
|
|
fcounts = map(len, c)
|
|
|
|
_verbose_output(ui, opts, 0, 0, 0, *fcounts)
|
|
|
|
|
|
|
|
@subcmd('cwd')
|
|
|
|
def _cwdsubcmd(cmd, ui, repo, *pats, **opts):
|
|
|
|
"""List all names in this directory
|
|
|
|
|
|
|
|
The list includes any names that are excluded by the current sparse
|
|
|
|
checkout; these are annotated with a hyphen ('-') before the name.
|
|
|
|
|
|
|
|
"""
|
|
|
|
_cwdlist(repo)
|
|
|
|
|
2017-10-14 00:51:27 +03:00
|
|
|
def _config(ui, repo, pats, opts, include=False, exclude=False, reset=False,
|
|
|
|
delete=False, enableprofile=False, disableprofile=False,
|
|
|
|
force=False):
|
|
|
|
"""
|
|
|
|
Perform a sparse config update. Only one of the kwargs may be specified.
|
|
|
|
"""
|
|
|
|
wlock = repo.wlock()
|
|
|
|
try:
|
|
|
|
oldsparsematch = repo.sparsematch()
|
|
|
|
|
|
|
|
if repo.vfs.exists('sparse'):
|
|
|
|
raw = repo.vfs.read('sparse')
|
|
|
|
oldinclude, oldexclude, oldprofiles = map(
|
|
|
|
set, repo.readsparseconfig(raw))
|
|
|
|
else:
|
|
|
|
oldinclude = set()
|
|
|
|
oldexclude = set()
|
|
|
|
oldprofiles = set()
|
|
|
|
|
|
|
|
try:
|
|
|
|
if reset:
|
|
|
|
newinclude = set()
|
|
|
|
newexclude = set()
|
|
|
|
newprofiles = set()
|
|
|
|
else:
|
|
|
|
newinclude = set(oldinclude)
|
|
|
|
newexclude = set(oldexclude)
|
|
|
|
newprofiles = set(oldprofiles)
|
|
|
|
|
|
|
|
if any(os.path.isabs(pat) for pat in pats):
|
|
|
|
err = _('paths cannot be absolute')
|
|
|
|
raise error.Abort(err)
|
|
|
|
|
|
|
|
adjustpats = ((include or exclude or delete) and
|
|
|
|
not ui.configbool('sparse', 'includereporootpaths', False))
|
|
|
|
adjustpats |= ((enableprofile or disableprofile) and
|
|
|
|
not ui.configbool('sparse', 'enablereporootpaths', True))
|
|
|
|
if adjustpats:
|
|
|
|
# supplied file patterns should be treated as relative
|
|
|
|
# to current working dir, so we need to convert them first
|
|
|
|
root, cwd = repo.root, repo.getcwd()
|
|
|
|
abspats = []
|
|
|
|
for kindpat in pats:
|
|
|
|
kind, pat = matchmod._patsplit(kindpat, None)
|
|
|
|
if kind in cwdrealtivepatkinds or kind is None:
|
|
|
|
kindpat = ((kind + ':' if kind else '') +
|
|
|
|
pathutil.canonpath(root, cwd, pat))
|
|
|
|
abspats.append(kindpat)
|
|
|
|
pats = abspats
|
|
|
|
|
|
|
|
oldstatus = repo.status()
|
|
|
|
if include:
|
|
|
|
newinclude.update(pats)
|
|
|
|
elif exclude:
|
|
|
|
newexclude.update(pats)
|
|
|
|
elif enableprofile:
|
|
|
|
newprofiles.update(pats)
|
|
|
|
elif disableprofile:
|
|
|
|
newprofiles.difference_update(pats)
|
|
|
|
elif delete:
|
|
|
|
newinclude.difference_update(pats)
|
|
|
|
newexclude.difference_update(pats)
|
|
|
|
|
|
|
|
repo.writesparseconfig(newinclude, newexclude, newprofiles)
|
|
|
|
fcounts = map(
|
|
|
|
len, _refresh(ui, repo, oldstatus, oldsparsematch, force))
|
|
|
|
|
|
|
|
profilecount = (len(newprofiles - oldprofiles) -
|
|
|
|
len(oldprofiles - newprofiles))
|
|
|
|
includecount = (len(newinclude - oldinclude) -
|
|
|
|
len(oldinclude - newinclude))
|
|
|
|
excludecount = (len(newexclude - oldexclude) -
|
|
|
|
len(oldexclude - newexclude))
|
|
|
|
_verbose_output(
|
|
|
|
ui, opts, profilecount, includecount, excludecount, *fcounts)
|
|
|
|
except Exception:
|
|
|
|
repo.writesparseconfig(oldinclude, oldexclude, oldprofiles)
|
|
|
|
raise
|
|
|
|
finally:
|
|
|
|
wlock.release()
|
|
|
|
|
|
|
|
def _import(ui, repo, files, opts, force=False):
|
|
|
|
with repo.wlock():
|
|
|
|
# load union of current active profile
|
|
|
|
revs = [repo.changelog.rev(node) for node in
|
|
|
|
repo.dirstate.parents() if node != nullid]
|
|
|
|
|
|
|
|
# read current configuration
|
|
|
|
raw = ''
|
|
|
|
if repo.vfs.exists('sparse'):
|
|
|
|
raw = repo.vfs.read('sparse')
|
|
|
|
oincludes, oexcludes, oprofiles = repo.readsparseconfig(raw)
|
|
|
|
includes, excludes, profiles = map(
|
|
|
|
set, (oincludes, oexcludes, oprofiles))
|
|
|
|
|
|
|
|
# all active rules
|
|
|
|
aincludes, aexcludes, aprofiles = set(), set(), set()
|
|
|
|
for rev in revs:
|
|
|
|
rincludes, rexcludes, rprofiles = repo.getsparsepatterns(rev)
|
|
|
|
aincludes.update(rincludes)
|
|
|
|
aexcludes.update(rexcludes)
|
|
|
|
aprofiles.update(rprofiles)
|
|
|
|
|
|
|
|
# import rules on top; only take in rules that are not yet
|
|
|
|
# part of the active rules.
|
|
|
|
changed = False
|
|
|
|
for file in files:
|
|
|
|
with util.posixfile(util.expandpath(file)) as importfile:
|
|
|
|
iincludes, iexcludes, iprofiles = repo.readsparseconfig(
|
2018-03-14 16:49:43 +03:00
|
|
|
importfile.read(), filename=file)
|
2017-10-14 00:51:27 +03:00
|
|
|
oldsize = len(includes) + len(excludes) + len(profiles)
|
|
|
|
includes.update(iincludes - aincludes)
|
|
|
|
excludes.update(iexcludes - aexcludes)
|
|
|
|
profiles.update(set(iprofiles) - aprofiles)
|
|
|
|
if len(includes) + len(excludes) + len(profiles) > oldsize:
|
|
|
|
changed = True
|
|
|
|
|
|
|
|
profilecount = includecount = excludecount = 0
|
|
|
|
fcounts = (0, 0, 0)
|
|
|
|
|
|
|
|
if changed:
|
|
|
|
profilecount = len(profiles - aprofiles)
|
|
|
|
includecount = len(includes - aincludes)
|
|
|
|
excludecount = len(excludes - aexcludes)
|
|
|
|
|
|
|
|
oldstatus = repo.status()
|
|
|
|
oldsparsematch = repo.sparsematch()
|
|
|
|
repo.writesparseconfig(includes, excludes, profiles)
|
|
|
|
|
|
|
|
try:
|
|
|
|
fcounts = map(
|
|
|
|
len, _refresh(ui, repo, oldstatus, oldsparsematch, force))
|
|
|
|
except Exception:
|
|
|
|
repo.writesparseconfig(oincludes, oexcludes, oprofiles)
|
|
|
|
raise
|
|
|
|
|
|
|
|
_verbose_output(ui, opts, profilecount, includecount, excludecount,
|
|
|
|
*fcounts)
|
|
|
|
|
|
|
|
def _clear(ui, repo, files, force=False):
|
|
|
|
with repo.wlock():
|
|
|
|
raw = ''
|
|
|
|
if repo.vfs.exists('sparse'):
|
|
|
|
raw = repo.vfs.read('sparse')
|
|
|
|
includes, excludes, profiles = repo.readsparseconfig(raw)
|
|
|
|
|
|
|
|
if includes or excludes:
|
|
|
|
oldstatus = repo.status()
|
|
|
|
oldsparsematch = repo.sparsematch()
|
|
|
|
repo.writesparseconfig(set(), set(), profiles)
|
|
|
|
_refresh(ui, repo, oldstatus, oldsparsematch, force)
|
|
|
|
|
|
|
|
def _refresh(ui, repo, origstatus, origsparsematch, force):
|
|
|
|
"""Refreshes which files are on disk by comparing the old status and
|
|
|
|
sparsematch with the new sparsematch.
|
|
|
|
|
|
|
|
Will raise an exception if a file with pending changes is being excluded
|
|
|
|
or included (unless force=True).
|
|
|
|
"""
|
|
|
|
modified, added, removed, deleted, unknown, ignored, clean = origstatus
|
|
|
|
|
|
|
|
# Verify there are no pending changes
|
|
|
|
pending = set()
|
|
|
|
pending.update(modified)
|
|
|
|
pending.update(added)
|
|
|
|
pending.update(removed)
|
|
|
|
sparsematch = repo.sparsematch()
|
|
|
|
abort = False
|
2018-02-06 01:13:19 +03:00
|
|
|
if len(pending) > 0:
|
|
|
|
ui.note(_('verifying pending changes for refresh\n'))
|
2017-10-14 00:51:27 +03:00
|
|
|
for file in pending:
|
|
|
|
if not sparsematch(file):
|
|
|
|
ui.warn(_("pending changes to '%s'\n") % file)
|
|
|
|
abort = not force
|
|
|
|
if abort:
|
|
|
|
raise error.Abort(_("could not update sparseness due to " +
|
|
|
|
"pending changes"))
|
|
|
|
|
|
|
|
# Calculate actions
|
2018-02-06 01:13:19 +03:00
|
|
|
ui.note(_('calculating actions for refresh\n'))
|
2018-03-15 14:34:09 +03:00
|
|
|
with progress.spinner(ui, 'populating file set'):
|
|
|
|
dirstate = repo.dirstate
|
|
|
|
ctx = repo['.']
|
|
|
|
added = []
|
|
|
|
lookup = []
|
|
|
|
dropped = []
|
|
|
|
mf = ctx.manifest()
|
|
|
|
files = set(mf)
|
2017-10-14 00:51:27 +03:00
|
|
|
|
|
|
|
actions = {}
|
|
|
|
|
2018-03-21 23:45:28 +03:00
|
|
|
with progress.bar(ui, _('calculating'), total=len(files)) as prog:
|
|
|
|
for file in files:
|
|
|
|
prog.value += 1
|
2017-10-14 00:51:27 +03:00
|
|
|
|
2018-03-21 23:45:28 +03:00
|
|
|
old = origsparsematch(file)
|
|
|
|
new = sparsematch(file)
|
|
|
|
# Add files that are newly included, or that don't exist in
|
|
|
|
# the dirstate yet.
|
|
|
|
if (new and not old) or (old and new and not file in dirstate):
|
|
|
|
fl = mf.flags(file)
|
|
|
|
if repo.wvfs.exists(file):
|
|
|
|
actions[file] = ('e', (fl,), '')
|
|
|
|
lookup.append(file)
|
|
|
|
else:
|
|
|
|
actions[file] = ('g', (fl, False), '')
|
|
|
|
added.append(file)
|
|
|
|
# Drop files that are newly excluded, or that still exist in
|
|
|
|
# the dirstate.
|
|
|
|
elif ((old and not new)
|
|
|
|
or (not (old or new) and file in dirstate)):
|
|
|
|
dropped.append(file)
|
|
|
|
if file not in pending:
|
|
|
|
actions[file] = ('r', [], '')
|
2018-03-15 14:34:11 +03:00
|
|
|
|
2017-10-14 00:51:27 +03:00
|
|
|
# Verify there are no pending changes in newly included files
|
2018-02-06 01:13:19 +03:00
|
|
|
if len(lookup) > 0:
|
|
|
|
ui.note(_('verifying no pending changes in newly included files\n'))
|
2017-10-14 00:51:27 +03:00
|
|
|
abort = False
|
|
|
|
for file in lookup:
|
|
|
|
ui.warn(_("pending changes to '%s'\n") % file)
|
|
|
|
abort = not force
|
|
|
|
if abort:
|
|
|
|
raise error.Abort(_("cannot change sparseness due to " +
|
|
|
|
"pending changes (delete the files or use --force " +
|
|
|
|
"to bring them back dirty)"))
|
|
|
|
|
|
|
|
# Check for files that were only in the dirstate.
|
|
|
|
for file, state in dirstate.iteritems():
|
|
|
|
if not file in files:
|
|
|
|
old = origsparsematch(file)
|
|
|
|
new = sparsematch(file)
|
|
|
|
if old and not new:
|
|
|
|
dropped.append(file)
|
|
|
|
|
|
|
|
# Apply changes to disk
|
2018-02-06 01:13:19 +03:00
|
|
|
if len(actions) > 0:
|
|
|
|
ui.note(_('applying changes to disk (%d actions)\n') % len(actions))
|
2017-10-17 03:53:16 +03:00
|
|
|
typeactions = dict((m, [])
|
|
|
|
for m in 'a f g am cd dc r dm dg m e k p pr'.split())
|
2018-03-21 23:45:28 +03:00
|
|
|
|
|
|
|
with progress.bar(ui, _('applying'), total=len(actions)) as prog:
|
|
|
|
for f, (m, args, msg) in actions.iteritems():
|
|
|
|
prog.value += 1
|
|
|
|
if m not in typeactions:
|
|
|
|
typeactions[m] = []
|
|
|
|
typeactions[m].append((f, args, msg))
|
|
|
|
mergemod.applyupdates(repo, typeactions, repo[None], repo['.'], False)
|
2017-10-14 00:51:27 +03:00
|
|
|
|
|
|
|
# Fix dirstate
|
2018-02-06 01:13:19 +03:00
|
|
|
filecount = len(added) + len(dropped) + len(lookup)
|
|
|
|
if filecount > 0:
|
|
|
|
ui.note(_('updating dirstate\n'))
|
2018-03-21 23:45:28 +03:00
|
|
|
with progress.bar(ui, _('recording'), _('files'), filecount) as prog:
|
|
|
|
for file in added:
|
|
|
|
prog.value += 1
|
|
|
|
dirstate.normal(file)
|
|
|
|
|
|
|
|
for file in dropped:
|
|
|
|
prog.value += 1
|
|
|
|
dirstate.drop(file)
|
|
|
|
|
|
|
|
for file in lookup:
|
|
|
|
prog.value += 1
|
|
|
|
# File exists on disk, and we're bringing it back in an unknown
|
|
|
|
# state.
|
|
|
|
dirstate.normallookup(file)
|
2017-10-14 00:51:27 +03:00
|
|
|
|
|
|
|
return added, dropped, lookup
|
|
|
|
|
|
|
|
def _verbose_output(ui, opts, profilecount, includecount, excludecount, added,
|
|
|
|
dropped, lookup):
|
|
|
|
"""Produce --verbose and templatable output
|
|
|
|
|
|
|
|
This specifically enables -Tjson, providing machine-readable stats on how
|
|
|
|
the sparse profile changed.
|
|
|
|
|
|
|
|
"""
|
|
|
|
with ui.formatter('sparse', opts) as fm:
|
|
|
|
fm.startitem()
|
|
|
|
fm.condwrite(ui.verbose, 'profiles_added', 'Profile # change: %d\n',
|
|
|
|
profilecount)
|
|
|
|
fm.condwrite(ui.verbose, 'include_rules_added',
|
|
|
|
'Include rule # change: %d\n', includecount)
|
|
|
|
fm.condwrite(ui.verbose, 'exclude_rules_added',
|
|
|
|
'Exclude rule # change: %d\n', excludecount)
|
|
|
|
# In 'plain' verbose mode, mergemod.applyupdates already outputs what
|
|
|
|
# files are added or removed outside of the templating formatter
|
|
|
|
# framework. No point in repeating ourselves in that case.
|
|
|
|
if not fm.isplain():
|
|
|
|
fm.condwrite(ui.verbose, 'files_added', 'Files added: %d\n',
|
|
|
|
added)
|
|
|
|
fm.condwrite(ui.verbose, 'files_dropped', 'Files dropped: %d\n',
|
|
|
|
dropped)
|
|
|
|
fm.condwrite(ui.verbose, 'files_conflicting',
|
|
|
|
'Files conflicting: %d\n', lookup)
|
|
|
|
|
|
|
|
def _cwdlist(repo):
|
|
|
|
""" List the contents in the current directory. Annotate
|
|
|
|
the files in the sparse profile.
|
|
|
|
"""
|
|
|
|
ctx = repo['.']
|
|
|
|
mf = ctx.manifest()
|
|
|
|
|
|
|
|
# Get the root of the repo so that we remove the content of
|
|
|
|
# the root from the current working directory
|
|
|
|
root = repo.root
|
2018-02-27 22:13:59 +03:00
|
|
|
cwd = util.normpath(pycompat.getcwd())
|
|
|
|
cwd = os.path.relpath(cwd, root)
|
|
|
|
cwd = '' if cwd == os.curdir else cwd + pycompat.ossep
|
|
|
|
if cwd.startswith(os.pardir + pycompat.ossep):
|
|
|
|
raise error.Abort(
|
|
|
|
_("the current working directory should begin "
|
|
|
|
"with the root %s") % root)
|
2017-10-14 00:51:27 +03:00
|
|
|
|
2018-03-06 15:23:56 +03:00
|
|
|
matcher = matchmod.match(
|
|
|
|
repo.root, repo.getcwd(),
|
|
|
|
patterns=['path:' + cwd])
|
|
|
|
files = mf.matches(matcher)
|
|
|
|
|
2017-10-14 00:51:27 +03:00
|
|
|
sparsematch = repo.sparsematch(ctx.rev())
|
|
|
|
checkedoutentries = set()
|
|
|
|
allentries = set()
|
2018-02-27 22:13:59 +03:00
|
|
|
cwdlength = len(cwd)
|
|
|
|
|
2018-03-06 15:23:56 +03:00
|
|
|
for filepath in files:
|
|
|
|
entryname = filepath[cwdlength:].partition(pycompat.ossep)[0]
|
2017-10-14 00:51:27 +03:00
|
|
|
|
2018-03-06 15:23:56 +03:00
|
|
|
allentries.add(entryname)
|
|
|
|
if sparsematch(filepath):
|
|
|
|
checkedoutentries.add(entryname)
|
2017-10-14 00:51:27 +03:00
|
|
|
|
|
|
|
ui = repo.ui
|
|
|
|
for entry in sorted(allentries):
|
|
|
|
marker = ' ' if entry in checkedoutentries else '-'
|
|
|
|
ui.status("%s %s\n" % (marker, entry))
|
|
|
|
|
2018-03-29 12:34:01 +03:00
|
|
|
class forceincludematcher(matchmod.basematcher):
|
2017-10-14 00:51:27 +03:00
|
|
|
"""A matcher that returns true for any of the forced includes before testing
|
|
|
|
against the actual matcher."""
|
|
|
|
def __init__(self, matcher, includes):
|
2018-03-29 12:34:01 +03:00
|
|
|
super(forceincludematcher, self).__init__(matcher._root, matcher._cwd)
|
2017-10-14 00:51:27 +03:00
|
|
|
self._matcher = matcher
|
|
|
|
self._includes = includes
|
|
|
|
|
|
|
|
def __call__(self, value):
|
|
|
|
return value in self._includes or self._matcher(value)
|
|
|
|
|
|
|
|
def visitdir(self, dir):
|
|
|
|
if any(True for path in self._includes if path.startswith(dir)):
|
|
|
|
return True
|
|
|
|
return self._matcher.visitdir(dir)
|
|
|
|
|
|
|
|
def hash(self):
|
|
|
|
sha1 = hashlib.sha1()
|
|
|
|
sha1.update(_hashmatcher(self._matcher))
|
|
|
|
for include in sorted(self._includes):
|
|
|
|
sha1.update(include + '\0')
|
|
|
|
return sha1.hexdigest()
|
|
|
|
|
2018-03-29 12:34:01 +03:00
|
|
|
class unionmatcher(matchmod.unionmatcher):
|
2017-10-14 00:51:27 +03:00
|
|
|
def hash(self):
|
|
|
|
sha1 = hashlib.sha1()
|
|
|
|
for m in self._matchers:
|
|
|
|
sha1.update(_hashmatcher(m))
|
|
|
|
return sha1.hexdigest()
|
|
|
|
|
2018-03-29 12:34:01 +03:00
|
|
|
class negatematcher(matchmod.basematcher):
|
2017-10-14 00:51:27 +03:00
|
|
|
def __init__(self, matcher):
|
2018-03-29 12:34:01 +03:00
|
|
|
super(negatematcher, self).__init__(matcher._root, matcher._cwd)
|
2017-10-14 00:51:27 +03:00
|
|
|
self._matcher = matcher
|
|
|
|
|
|
|
|
def __call__(self, value):
|
|
|
|
return not self._matcher(value)
|
|
|
|
|
|
|
|
def hash(self):
|
|
|
|
sha1 = hashlib.sha1()
|
|
|
|
sha1.update('negate')
|
|
|
|
sha1.update(_hashmatcher(self._matcher))
|
|
|
|
return sha1.hexdigest()
|
|
|
|
|
|
|
|
def _hashmatcher(matcher):
|
|
|
|
if util.safehasattr(matcher, 'hash'):
|
|
|
|
return matcher.hash()
|
|
|
|
|
|
|
|
sha1 = hashlib.sha1()
|
|
|
|
sha1.update(repr(matcher))
|
|
|
|
return sha1.hexdigest()
|