mirror of
https://github.com/facebook/sapling.git
synced 2024-10-11 09:17:30 +03:00
05edd145db
Summary: Update `fbsparse.reposetup()` to avoid attempting to wrap the localrepo object in Eden repositories. The fbsparse extension is normally disabled in Eden repositories, but this ensures the code does the correct thing even if the fbsparse extension has been explicitly loaded. Reviewed By: ryanmce Differential Revision: D7642679 fbshipit-source-id: d0a229091d794fb326ab1db1c01e5a0a271092c4
1991 lines
74 KiB
Python
1991 lines
74 KiB
Python
# fbsparse.py - allow sparse checkouts of the working directory
|
|
#
|
|
# Copyright 2014 Facebook, Inc.
|
|
#
|
|
# This software may be used and distributed according to the terms of the
|
|
# GNU General Public License version 2 or any later version.
|
|
|
|
"""allow sparse checkouts of the working directory
|
|
"""
|
|
|
|
from __future__ import division
|
|
|
|
from mercurial import (
|
|
util,
|
|
cmdutil,
|
|
extensions,
|
|
context,
|
|
dirstate,
|
|
commands,
|
|
fancyopts,
|
|
progress,
|
|
localrepo,
|
|
error,
|
|
help,
|
|
hg,
|
|
hintutil,
|
|
minirst,
|
|
pathutil,
|
|
registrar,
|
|
patch,
|
|
pycompat,
|
|
scmutil,
|
|
)
|
|
from mercurial import match as matchmod
|
|
from mercurial import merge as mergemod
|
|
from mercurial.node import nullid
|
|
from mercurial.i18n import _
|
|
from mercurial.thirdparty import attr
|
|
import os, collections, functools, hashlib
|
|
import re
|
|
|
|
cmdtable = {}
|
|
command = registrar.command(cmdtable)
|
|
testedwith = 'ships-with-fb-hgext'
|
|
|
|
cwdrealtivepatkinds = ('glob', 'relpath')
|
|
|
|
def uisetup(ui):
|
|
_setupupdates(ui)
|
|
_setupcommit(ui)
|
|
|
|
def extsetup(ui):
|
|
_setupclone(ui)
|
|
_setuplog(ui)
|
|
_setupadd(ui)
|
|
_setupdirstate(ui)
|
|
_setupdiff(ui)
|
|
_setupsubcommands(ui)
|
|
# if fsmonitor is enabled, tell it to use our hash function
|
|
try:
|
|
fsmonitor = extensions.find('fsmonitor')
|
|
def _hashignore(orig, ignore):
|
|
return _hashmatcher(ignore)
|
|
extensions.wrapfunction(fsmonitor, '_hashignore', _hashignore)
|
|
except KeyError:
|
|
pass
|
|
# do the same for hgwatchman, old name
|
|
try:
|
|
hgwatchman = extensions.find('hgwatchman')
|
|
def _hashignore(orig, ignore):
|
|
return _hashmatcher(ignore)
|
|
extensions.wrapfunction(hgwatchman, '_hashignore', _hashignore)
|
|
except KeyError:
|
|
pass
|
|
|
|
def reposetup(ui, repo):
|
|
if not util.safehasattr(repo, 'dirstate'):
|
|
return
|
|
|
|
# The sparse extension should never be enabled in Eden repositories;
|
|
# Eden automatically only fetches the parts of the repository that are
|
|
# actually required.
|
|
if 'eden' in repo.requirements:
|
|
return
|
|
|
|
_wraprepo(ui, repo)
|
|
|
|
def replacefilecache(cls, propname, replacement):
|
|
"""Replace a filecache property with a new class. This allows changing the
|
|
cache invalidation condition."""
|
|
origcls = cls
|
|
assert callable(replacement)
|
|
while cls is not object:
|
|
if propname in cls.__dict__:
|
|
orig = cls.__dict__[propname]
|
|
setattr(cls, propname, replacement(orig))
|
|
break
|
|
cls = cls.__bases__[0]
|
|
|
|
if cls is object:
|
|
raise AttributeError(_("type '%s' has no property '%s'") % (origcls,
|
|
propname))
|
|
|
|
def _setupupdates(ui):
|
|
def _calculateupdates(orig, repo, wctx, mctx, ancestors, branchmerge, *arg,
|
|
**kwargs):
|
|
"""Filter updates to only lay out files that match the sparse rules.
|
|
"""
|
|
actions, diverge, renamedelete = orig(repo, wctx, mctx, ancestors,
|
|
branchmerge, *arg, **kwargs)
|
|
|
|
# If the working context is in memory (virtual), there's no need to
|
|
# apply the user's sparse rules at all (and in fact doing so would
|
|
# cause unexpected behavior in the real working copy).
|
|
if not util.safehasattr(repo, 'sparsematch') or wctx.isinmemory():
|
|
return actions, diverge, renamedelete
|
|
|
|
files = set()
|
|
prunedactions = {}
|
|
oldrevs = [pctx.rev() for pctx in wctx.parents()]
|
|
oldsparsematch = repo.sparsematch(*oldrevs)
|
|
|
|
if branchmerge:
|
|
# If we're merging, use the wctx filter, since we're merging into
|
|
# the wctx.
|
|
sparsematch = repo.sparsematch(wctx.parents()[0].rev())
|
|
else:
|
|
# If we're updating, use the target context's filter, since we're
|
|
# moving to the target context.
|
|
sparsematch = repo.sparsematch(mctx.rev())
|
|
|
|
temporaryfiles = []
|
|
for file, action in actions.iteritems():
|
|
type, args, msg = action
|
|
files.add(file)
|
|
if sparsematch(file):
|
|
prunedactions[file] = action
|
|
elif type == 'm':
|
|
temporaryfiles.append(file)
|
|
prunedactions[file] = action
|
|
elif branchmerge:
|
|
if type != 'k':
|
|
temporaryfiles.append(file)
|
|
prunedactions[file] = action
|
|
elif type == 'f':
|
|
prunedactions[file] = action
|
|
elif file in wctx:
|
|
prunedactions[file] = ('r', args, msg)
|
|
|
|
if len(temporaryfiles) > 0:
|
|
ui.status(_("temporarily included %d file(s) in the sparse checkout"
|
|
" for merging\n") % len(temporaryfiles))
|
|
repo.addtemporaryincludes(temporaryfiles)
|
|
|
|
# Add the new files to the working copy so they can be merged, etc
|
|
actions = []
|
|
message = 'temporarily adding to sparse checkout'
|
|
wctxmanifest = repo[None].manifest()
|
|
for file in temporaryfiles:
|
|
if file in wctxmanifest:
|
|
fctx = repo[None][file]
|
|
actions.append((file, (fctx.flags(), False), message))
|
|
|
|
typeactions = collections.defaultdict(list)
|
|
typeactions['g'] = actions
|
|
mergemod.applyupdates(repo, typeactions, repo[None], repo['.'],
|
|
False)
|
|
|
|
dirstate = repo.dirstate
|
|
for file, flags, msg in actions:
|
|
dirstate.normal(file)
|
|
|
|
profiles = repo.getactiveprofiles()
|
|
changedprofiles = profiles & files
|
|
# If an active profile changed during the update, refresh the checkout.
|
|
# Don't do this during a branch merge, since all incoming changes should
|
|
# have been handled by the temporary includes above.
|
|
if changedprofiles and not branchmerge:
|
|
mf = mctx.manifest()
|
|
for file in mf:
|
|
old = oldsparsematch(file)
|
|
new = sparsematch(file)
|
|
if not old and new:
|
|
flags = mf.flags(file)
|
|
prunedactions[file] = ('g', (flags, False), '')
|
|
elif old and not new:
|
|
prunedactions[file] = ('r', [], '')
|
|
|
|
return prunedactions, diverge, renamedelete
|
|
|
|
extensions.wrapfunction(mergemod, 'calculateupdates', _calculateupdates)
|
|
|
|
def _update(orig, repo, node, branchmerge, *args, **kwargs):
|
|
results = orig(repo, node, branchmerge, *args, **kwargs)
|
|
|
|
# If we're updating to a location, clean up any stale temporary includes
|
|
# (ex: this happens during hg rebase --abort).
|
|
if not branchmerge and util.safehasattr(repo, 'sparsematch'):
|
|
repo.prunetemporaryincludes()
|
|
return results
|
|
|
|
extensions.wrapfunction(mergemod, 'update', _update)
|
|
|
|
def _checkcollision(orig, repo, wmf, actions):
|
|
if util.safehasattr(repo, 'sparsematch'):
|
|
# Only check for collisions on files and directories in the
|
|
# sparse profile
|
|
wmf = wmf.matches(repo.sparsematch())
|
|
return orig(repo, wmf, actions)
|
|
|
|
extensions.wrapfunction(mergemod, '_checkcollision', _checkcollision)
|
|
|
|
def _setupcommit(ui):
|
|
def _refreshoncommit(orig, self, node):
|
|
"""Refresh the checkout when commits touch .hgsparse
|
|
"""
|
|
orig(self, node)
|
|
|
|
# Use unfiltered to avoid computing hidden commits
|
|
repo = self._repo.unfiltered()
|
|
|
|
if util.safehasattr(repo, 'getsparsepatterns'):
|
|
ctx = repo[node]
|
|
profiles = repo.getsparsepatterns(ctx.rev()).profiles
|
|
if set(profiles) & set(ctx.files()):
|
|
origstatus = repo.status()
|
|
origsparsematch = repo.sparsematch()
|
|
_refresh(repo.ui, repo, origstatus, origsparsematch, True)
|
|
|
|
repo.prunetemporaryincludes()
|
|
|
|
extensions.wrapfunction(context.committablectx, 'markcommitted',
|
|
_refreshoncommit)
|
|
|
|
def _setuplog(ui):
|
|
entry = commands.table['^log|history']
|
|
entry[1].append(('', 'sparse', None,
|
|
"limit to changesets affecting the sparse checkout"))
|
|
|
|
def _logrevs(orig, repo, opts):
|
|
revs = orig(repo, opts)
|
|
if opts.get('sparse'):
|
|
sparsematch = repo.sparsematch()
|
|
def ctxmatch(rev):
|
|
ctx = repo[rev]
|
|
return any(f for f in ctx.files() if sparsematch(f))
|
|
revs = revs.filter(ctxmatch)
|
|
return revs
|
|
extensions.wrapfunction(cmdutil, '_logrevs', _logrevs)
|
|
|
|
def _clonesparsecmd(orig, ui, repo, *args, **opts):
|
|
include_pat = opts.get('include')
|
|
exclude_pat = opts.get('exclude')
|
|
enableprofile_pat = opts.get('enable_profile')
|
|
include = exclude = enableprofile = False
|
|
if include_pat:
|
|
pat = include_pat
|
|
include = True
|
|
if exclude_pat:
|
|
pat = exclude_pat
|
|
exclude = True
|
|
if enableprofile_pat:
|
|
pat = enableprofile_pat
|
|
enableprofile = True
|
|
if sum([include, exclude, enableprofile]) > 1:
|
|
raise error.Abort(_("too many flags specified."))
|
|
if include or exclude or enableprofile:
|
|
def clone_sparse(orig, self, node, overwrite, *args, **kwargs):
|
|
# sparse clone is a special snowflake as in that case always
|
|
# are outside of the repo's dir hierachy, yet we always want
|
|
# to name our includes/excludes/enables using repo-root
|
|
# relative paths
|
|
overrides = {
|
|
('sparse', 'includereporootpaths'): True,
|
|
('sparse', 'enablereporootpaths'): True,
|
|
}
|
|
with self.ui.configoverride(overrides, 'sparse'):
|
|
_config(self.ui, self.unfiltered(), pat, {}, include=include,
|
|
exclude=exclude, enableprofile=enableprofile)
|
|
return orig(self, node, overwrite, *args, **kwargs)
|
|
extensions.wrapfunction(hg, 'updaterepo', clone_sparse)
|
|
return orig(ui, repo, *args, **opts)
|
|
|
|
def _setupclone(ui):
|
|
entry = commands.table['^clone']
|
|
entry[1].append(('', 'enable-profile', [],
|
|
'enable a sparse profile'))
|
|
entry[1].append(('', 'include', [],
|
|
'include sparse pattern'))
|
|
entry[1].append(('', 'exclude', [],
|
|
'exclude sparse pattern'))
|
|
extensions.wrapcommand(commands.table, 'clone', _clonesparsecmd)
|
|
|
|
def _setupadd(ui):
|
|
entry = commands.table['^add']
|
|
entry[1].append(('s', 'sparse', None,
|
|
'also include directories of added files in sparse config'))
|
|
|
|
def _add(orig, ui, repo, *pats, **opts):
|
|
if opts.get('sparse'):
|
|
dirs = set()
|
|
for pat in pats:
|
|
dirname, basename = util.split(pat)
|
|
dirs.add(dirname)
|
|
_config(ui, repo, list(dirs), opts, include=True)
|
|
return orig(ui, repo, *pats, **opts)
|
|
|
|
extensions.wrapcommand(commands.table, 'add', _add)
|
|
|
|
def _setupdirstate(ui):
|
|
"""Modify the dirstate to prevent stat'ing excluded files,
|
|
and to prevent modifications to files outside the checkout.
|
|
"""
|
|
|
|
def _dirstate(orig, repo):
|
|
dirstate = orig(repo)
|
|
dirstate.repo = repo
|
|
return dirstate
|
|
extensions.wrapfunction(
|
|
localrepo.localrepository.dirstate, 'func', _dirstate)
|
|
|
|
# The atrocity below is needed to wrap dirstate._ignore. It is a cached
|
|
# property, which means normal function wrapping doesn't work.
|
|
class ignorewrapper(object):
|
|
def __init__(self, orig):
|
|
self.orig = orig
|
|
self.origignore = None
|
|
self.func = None
|
|
self.sparsematch = None
|
|
|
|
def __get__(self, obj, type=None):
|
|
repo = obj.repo
|
|
origignore = self.orig.__get__(obj)
|
|
if not util.safehasattr(repo, 'sparsematch'):
|
|
return origignore
|
|
|
|
sparsematch = repo.sparsematch()
|
|
if self.sparsematch != sparsematch or self.origignore != origignore:
|
|
self.func = unionmatcher([origignore,
|
|
negatematcher(sparsematch)])
|
|
self.sparsematch = sparsematch
|
|
self.origignore = origignore
|
|
return self.func
|
|
|
|
def __set__(self, obj, value):
|
|
return self.orig.__set__(obj, value)
|
|
|
|
def __delete__(self, obj):
|
|
return self.orig.__delete__(obj)
|
|
|
|
replacefilecache(dirstate.dirstate, '_ignore', ignorewrapper)
|
|
|
|
# dirstate.rebuild should not add non-matching files
|
|
def _rebuild(orig, self, parent, allfiles, changedfiles=None):
|
|
if util.safehasattr(self.repo, 'sparsematch'):
|
|
matcher = self.repo.sparsematch()
|
|
allfiles = allfiles.matches(matcher)
|
|
if changedfiles:
|
|
changedfiles = [f for f in changedfiles if matcher(f)]
|
|
|
|
if changedfiles is not None:
|
|
# In _rebuild, these files will be deleted from the dirstate
|
|
# when they are not found to be in allfiles
|
|
dirstatefilestoremove = set(f for f in self if not matcher(f))
|
|
changedfiles = dirstatefilestoremove.union(changedfiles)
|
|
|
|
return orig(self, parent, allfiles, changedfiles)
|
|
extensions.wrapfunction(dirstate.dirstate, 'rebuild', _rebuild)
|
|
|
|
# Prevent adding files that are outside the sparse checkout
|
|
editfuncs = ['normal', 'add', 'normallookup', 'copy', 'remove', 'merge']
|
|
hint = _('include file with `hg sparse include <pattern>` or use ' +
|
|
'`hg add -s <file>` to include file directory while adding')
|
|
for func in editfuncs:
|
|
def _wrapper(orig, self, *args):
|
|
repo = self.repo
|
|
if util.safehasattr(repo, 'sparsematch'):
|
|
dirstate = repo.dirstate
|
|
sparsematch = repo.sparsematch()
|
|
for f in args:
|
|
if (f is not None and not sparsematch(f) and
|
|
f not in dirstate):
|
|
raise error.Abort(_("cannot add '%s' - it is outside "
|
|
"the sparse checkout") % f,
|
|
hint=hint)
|
|
return orig(self, *args)
|
|
extensions.wrapfunction(dirstate.dirstate, func, _wrapper)
|
|
|
|
def _setupdiff(ui):
|
|
entry = commands.table['^diff']
|
|
entry[1].append(('s', 'sparse', None,
|
|
'only show changes in files in the sparse config'))
|
|
|
|
def workingfilectxdata(orig, self):
|
|
try:
|
|
# Try lookup working copy first.
|
|
return orig(self)
|
|
except IOError:
|
|
# Then try working copy parent if the file is outside sparse.
|
|
if util.safehasattr(self._repo, 'sparsematch'):
|
|
sparsematch = self._repo.sparsematch()
|
|
if not sparsematch(self._path):
|
|
basectx = self._changectx._parents[0]
|
|
return basectx[self._path].data()
|
|
raise
|
|
|
|
extensions.wrapfunction(context.workingfilectx, 'data', workingfilectxdata)
|
|
|
|
# wrap trydiff to filter diffs if '--sparse' is set
|
|
def trydiff(orig, repo, revs, ctx1, ctx2, modified, added, removed,
|
|
copy, getfilectx, opts, losedatafn, prefix, relroot):
|
|
sparsematch = repo.sparsematch()
|
|
modified = filter(sparsematch, modified)
|
|
added = filter(sparsematch, added)
|
|
removed = filter(sparsematch, removed)
|
|
copy = dict((d, s) for d, s in copy.items() if sparsematch(s))
|
|
return orig(repo, revs, ctx1, ctx2, modified, added, removed,
|
|
copy, getfilectx, opts, losedatafn, prefix, relroot)
|
|
def diff(orig, ui, repo, *pats, **opts):
|
|
issparse = bool(opts.get('sparse'))
|
|
if issparse:
|
|
extensions.wrapfunction(patch, 'trydiff', trydiff)
|
|
try:
|
|
orig(ui, repo, *pats, **opts)
|
|
finally:
|
|
if issparse:
|
|
extensions.unwrapfunction(patch, 'trydiff', trydiff)
|
|
extensions.wrapcommand(commands.table, 'diff', diff)
|
|
|
|
def _setupsubcommands(ui):
|
|
# hg help sparse <subcommand> needs to be acceptable
|
|
def helpacceptmultiplenames(orig, ui, *names, **opts):
|
|
name = ' '.join(names) if names else None
|
|
return orig(ui, name, **opts)
|
|
|
|
# hg help should include subcommands
|
|
def helpsubcommands(orig, self, name, subtopic=None):
|
|
rst = orig(self, name, subtopic)
|
|
|
|
cmd, hassub, sub = name.partition(' ')
|
|
if cmd == 'sparse':
|
|
if hassub and rst[0] == 'hg %s\n' % sub:
|
|
# subcommand help, patch first line
|
|
rst[0] = 'hg %s\n' % name
|
|
|
|
if not self.ui.quiet:
|
|
subcmdsrst = subcmd.subcmdsrst(self.ui.verbose, self.ui.quiet)
|
|
# in verbose mode there is an extra line we want to keep at the
|
|
# end.
|
|
pos = len(rst) if self.ui.verbose else -1
|
|
rst[pos:pos] = [subcmdsrst]
|
|
|
|
return rst
|
|
|
|
# when looking for subcommands, have cmdutil.findpossible find them
|
|
def findpossible(orig, cmd, table, strict=False):
|
|
maincmd, hassub, subcmd = cmd.partition(' ')
|
|
if hassub and maincmd == 'sparse':
|
|
res = orig(subcmd, subcmdtable, strict)
|
|
if subcmd in res[0]:
|
|
# reslot as the full command, including the first alias
|
|
res[0][cmd] = res[0].pop(subcmd)
|
|
res[0][cmd][0][0] = cmd
|
|
return res
|
|
return orig(cmd, table, strict)
|
|
|
|
# when parsing shelve command options, add the switches from subcommands
|
|
def subcommandopts(orig, args, options, *posargs, **kwargs):
|
|
sparseopts = cmdtable['^sparse'][1]
|
|
if options[:len(sparseopts)] == sparseopts: # parsing sparse options
|
|
return subcmd.parseargs(orig, args, options, *posargs, **kwargs)
|
|
return orig(args, options, *posargs, **kwargs)
|
|
|
|
extensions.wrapcommand(commands.table, 'help', helpacceptmultiplenames)
|
|
extensions.wrapfunction(help._helpdispatch, 'helpcmd', helpsubcommands)
|
|
extensions.wrapfunction(cmdutil, 'findpossible', findpossible)
|
|
extensions.wrapfunction(fancyopts, 'fancyopts', subcommandopts)
|
|
|
|
@attr.s(frozen=True, slots=True, cmp=False)
|
|
class SparseConfig(object):
|
|
path = attr.ib()
|
|
includes = attr.ib(convert=frozenset)
|
|
excludes = attr.ib(convert=frozenset)
|
|
profiles = attr.ib(convert=tuple)
|
|
metadata = attr.ib(default=attr.Factory(dict))
|
|
|
|
def __iter__(self):
|
|
# The metadata field is deliberately not included
|
|
for field in (self.includes, self.excludes, self.profiles):
|
|
yield field
|
|
|
|
def _wraprepo(ui, repo):
|
|
# metadata parsing expression
|
|
metadata_key_value = re.compile(r'(?P<key>.*)\s*[:=]\s*(?P<value>.*)')
|
|
|
|
class SparseRepo(repo.__class__):
|
|
def readsparseconfig(self, raw, filename=None):
|
|
"""Takes a string sparse config and returns a SparseConfig
|
|
|
|
This object contains the includes, excludes, and profiles from the
|
|
raw profile.
|
|
|
|
The filename is used to report errors and warnings.
|
|
|
|
"""
|
|
filename = filename or '<sparse profile>'
|
|
metadata = {}
|
|
last_key = None
|
|
includes = set()
|
|
excludes = set()
|
|
|
|
sections = {
|
|
'[include]': includes,
|
|
'[exclude]': excludes,
|
|
'[metadata]': metadata,
|
|
}
|
|
current = includes # no sections == includes
|
|
|
|
profiles = []
|
|
|
|
for i, line in enumerate(raw.splitlines(), start=1):
|
|
stripped = line.strip()
|
|
if not stripped or stripped.startswith(('#', ';')):
|
|
# empty or comment line, skip
|
|
continue
|
|
|
|
if stripped.startswith('%include '):
|
|
# include another profile
|
|
stripped = stripped[9:].strip()
|
|
if stripped:
|
|
profiles.append(stripped)
|
|
continue
|
|
|
|
if stripped in sections:
|
|
if sections[stripped] is includes and current is excludes:
|
|
raise error.Abort(_(
|
|
'A sparse file cannot have includes after excludes '
|
|
'in %s:%i') % (filename, i))
|
|
current = sections[stripped]
|
|
continue
|
|
|
|
if current is metadata:
|
|
# Metadata parsing, INI-style format
|
|
if line.startswith((' ', '\t')): # continuation
|
|
if last_key is None:
|
|
self.ui.warn(_(
|
|
'warning: sparse profile [metadata] section '
|
|
'indented lines that do not belong to a '
|
|
'multi-line entry, ignoring, in %s:%i\n') % (
|
|
filename, i))
|
|
continue
|
|
key, value = last_key, stripped
|
|
else:
|
|
match = metadata_key_value.match(stripped)
|
|
if match is None:
|
|
self.ui.warn(_(
|
|
'warning: sparse profile [metadata] section '
|
|
'does not appear to have a valid option '
|
|
'definition, ignoring, in %s:%i\n') % (
|
|
filename, i))
|
|
last_key = None
|
|
continue
|
|
key, value = (
|
|
s.strip() for s in match.group('key', 'value'))
|
|
metadata[key] = []
|
|
|
|
metadata[key].append(value)
|
|
last_key = key
|
|
continue
|
|
|
|
# inclusion or exclusion line
|
|
if stripped.startswith('/'):
|
|
self.ui.warn(_(
|
|
'warning: sparse profile cannot use paths starting '
|
|
'with /, ignoring %s, in %s:%i\n') % (
|
|
line, filename, i))
|
|
continue
|
|
current.add(line)
|
|
|
|
metadata = {key: '\n'.join(value).strip()
|
|
for key, value in metadata.items()}
|
|
return SparseConfig(
|
|
filename, includes, excludes, profiles, metadata)
|
|
|
|
def getsparsepatterns(self, rev, config=None):
|
|
"""Produce the full sparse config for a revision as a SparseConfig
|
|
|
|
This includes all patterns from included profiles, transitively.
|
|
|
|
if config is None, use the active profile, in .hg/sparse
|
|
|
|
"""
|
|
# Use unfiltered to avoid computing hidden commits
|
|
if rev is None:
|
|
raise error.Abort(
|
|
_("cannot parse sparse patterns from working copy"))
|
|
|
|
repo = self.unfiltered()
|
|
if config is None:
|
|
if not self.vfs.exists('sparse'):
|
|
return SparseConfig(None, set(), set(), [])
|
|
|
|
raw = self.vfs.read('sparse')
|
|
config = self.readsparseconfig(
|
|
raw, filename=self.vfs.join('sparse'))
|
|
|
|
# create copies, as these datastructures are updated further on
|
|
includes, excludes, profiles = (
|
|
set(config.includes), set(config.excludes),
|
|
list(config.profiles)
|
|
)
|
|
|
|
ctx = repo[rev]
|
|
if profiles:
|
|
visited = set()
|
|
while profiles:
|
|
profile = profiles.pop()
|
|
if profile in visited:
|
|
continue
|
|
visited.add(profile)
|
|
|
|
try:
|
|
raw = self.getrawprofile(profile, ctx.hex())
|
|
except error.ManifestLookupError:
|
|
msg = (
|
|
"warning: sparse profile '%s' not found "
|
|
"in rev %s - ignoring it\n" % (profile, ctx))
|
|
if self.ui.configbool('sparse', 'missingwarning'):
|
|
self.ui.warn(msg)
|
|
else:
|
|
self.ui.debug(msg)
|
|
continue
|
|
pincludes, pexcludes, subprofs = (
|
|
self.readsparseconfig(raw, filename=profile))
|
|
includes.update(pincludes)
|
|
excludes.update(pexcludes)
|
|
for subprofile in subprofs:
|
|
profiles.append(subprofile)
|
|
|
|
profiles = visited
|
|
|
|
if includes:
|
|
includes.add('.hg*')
|
|
return SparseConfig(
|
|
'<aggregated from %s>'.format(config.path),
|
|
includes, excludes, profiles)
|
|
|
|
def getrawprofile(self, profile, changeid):
|
|
repo = self.unfiltered()
|
|
try:
|
|
simplecache = extensions.find('simplecache')
|
|
|
|
# Use unfiltered to avoid computing hidden commits
|
|
node = repo[changeid].hex()
|
|
def func():
|
|
return repo.filectx(profile, changeid=changeid).data()
|
|
key = 'sparseprofile:%s:%s' % (profile.replace('/', '__'), node)
|
|
return simplecache.memoize(func, key,
|
|
simplecache.stringserializer, self.ui)
|
|
except KeyError:
|
|
return repo.filectx(profile, changeid=changeid).data()
|
|
|
|
def _sparsesignature(self, includetemp=True, config=None, revs=()):
|
|
"""Returns the signature string representing the contents of the
|
|
current project sparse configuration. This can be used to cache the
|
|
sparse matcher for a given set of revs."""
|
|
signaturecache = self.signaturecache
|
|
sigkey = config.path if config else '.hg/sparse'
|
|
signature = signaturecache.get(sigkey)
|
|
if includetemp:
|
|
tempsignature = signaturecache.get('tempsignature')
|
|
else:
|
|
tempsignature = 0
|
|
|
|
if signature is None or (includetemp and tempsignature is None):
|
|
signature = 0
|
|
if config is None:
|
|
try:
|
|
sparsedata = self.vfs.read('sparse')
|
|
signature = hashlib.sha1(sparsedata).hexdigest()
|
|
except (OSError, IOError):
|
|
pass
|
|
else:
|
|
sha1 = hashlib.sha1()
|
|
for r in revs:
|
|
try:
|
|
sha1.update(self.getrawprofile(config.path, r))
|
|
signature = sha1.hexdigest()
|
|
except KeyError:
|
|
pass
|
|
signaturecache[sigkey] = signature
|
|
|
|
tempsignature = 0
|
|
if includetemp:
|
|
try:
|
|
tempsparsepath = self.vfs.read('tempsparse')
|
|
tempsignature = hashlib.sha1(tempsparsepath).hexdigest()
|
|
except (OSError, IOError):
|
|
pass
|
|
signaturecache['tempsignature'] = tempsignature
|
|
return '%s:%s' % (signature, tempsignature)
|
|
|
|
def invalidatecaches(self):
|
|
self.invalidatesignaturecache()
|
|
return super(SparseRepo, self).invalidatecaches()
|
|
|
|
def invalidatesignaturecache(self):
|
|
self.signaturecache.clear()
|
|
|
|
def sparsematch(self, *revs, **kwargs):
|
|
"""Returns the sparse match function for the given revs
|
|
|
|
If multiple revs are specified, the match function is the union
|
|
of all the revs.
|
|
|
|
`includetemp` is used to indicate if the temporarily included file
|
|
should be part of the matcher.
|
|
|
|
`config` can be used to specify a different sparse profile
|
|
from the default .hg/sparse active profile
|
|
|
|
"""
|
|
return self._sparsematch_and_key(*revs, **kwargs)[0]
|
|
|
|
def _sparsematch_and_key(self, *revs, **kwargs):
|
|
"""Implementation of sparsematch() with the cache key included.
|
|
|
|
This lets us reuse the key elsewhere without having to hit each
|
|
profile file twice.
|
|
|
|
"""
|
|
if not revs or revs == (None,):
|
|
revs = [self.changelog.rev(node) for node in
|
|
self.dirstate.parents() if node != nullid]
|
|
|
|
includetemp = kwargs.get('includetemp', True)
|
|
config = kwargs.get('config')
|
|
signature = self._sparsesignature(
|
|
includetemp=includetemp, config=config, revs=revs)
|
|
|
|
key = '%s:%s' % (signature, ':'.join([str(r) for r in revs]))
|
|
|
|
result = self.sparsecache.get(key, None)
|
|
if result:
|
|
return result, key
|
|
|
|
matchers = []
|
|
for rev in revs:
|
|
try:
|
|
includes, excludes, profiles = self.getsparsepatterns(
|
|
rev, config)
|
|
|
|
if includes or excludes:
|
|
# Explicitly include subdirectories of includes so
|
|
# status will walk them down to the actual include.
|
|
subdirs = set()
|
|
for include in includes:
|
|
dirname = os.path.dirname(include)
|
|
# basename is used to avoid issues with absolute
|
|
# paths (which on Windows can include the drive).
|
|
while os.path.basename(dirname):
|
|
subdirs.add(dirname)
|
|
dirname = os.path.dirname(dirname)
|
|
|
|
matcher = matchmod.match(self.root, '', [],
|
|
include=includes, exclude=excludes,
|
|
default='relpath')
|
|
if subdirs:
|
|
matcher = forceincludematcher(matcher, subdirs)
|
|
matchers.append(matcher)
|
|
except IOError:
|
|
pass
|
|
|
|
result = None
|
|
if not matchers:
|
|
result = matchmod.always(self.root, '')
|
|
elif len(matchers) == 1:
|
|
result = matchers[0]
|
|
else:
|
|
result = unionmatcher(matchers)
|
|
|
|
if kwargs.get('includetemp', True):
|
|
tempincludes = self.gettemporaryincludes()
|
|
result = forceincludematcher(result, tempincludes)
|
|
|
|
self.sparsecache[key] = result
|
|
|
|
return result, key
|
|
|
|
def getactiveprofiles(self):
|
|
# Use unfiltered to avoid computing hidden commits
|
|
repo = self.unfiltered()
|
|
revs = [repo.changelog.rev(node) for node in
|
|
repo.dirstate.parents() if node != nullid]
|
|
|
|
activeprofiles = set()
|
|
for rev in revs:
|
|
profiles = self.getsparsepatterns(rev).profiles
|
|
activeprofiles.update(profiles)
|
|
|
|
return activeprofiles
|
|
|
|
def writesparseconfig(self, include, exclude, profiles):
|
|
raw = '%s[include]\n%s\n[exclude]\n%s\n' % (
|
|
''.join(['%%include %s\n' % p for p in sorted(profiles)]),
|
|
'\n'.join(sorted(include)),
|
|
'\n'.join(sorted(exclude)))
|
|
self.vfs.write("sparse", raw)
|
|
self.invalidatesignaturecache()
|
|
|
|
def addtemporaryincludes(self, files):
|
|
includes = self.gettemporaryincludes()
|
|
for file in files:
|
|
includes.add(file)
|
|
self._writetemporaryincludes(includes)
|
|
|
|
def gettemporaryincludes(self):
|
|
existingtemp = set()
|
|
if self.vfs.exists('tempsparse'):
|
|
raw = self.vfs.read('tempsparse')
|
|
existingtemp.update(raw.split('\n'))
|
|
return existingtemp
|
|
|
|
def _writetemporaryincludes(self, includes):
|
|
raw = '\n'.join(sorted(includes))
|
|
self.vfs.write('tempsparse', raw)
|
|
self.invalidatesignaturecache()
|
|
|
|
def prunetemporaryincludes(self):
|
|
if repo.vfs.exists('tempsparse'):
|
|
origstatus = self.status()
|
|
modified, added, removed, deleted, a, b, c = origstatus
|
|
if modified or added or removed or deleted:
|
|
# Still have pending changes. Don't bother trying to prune.
|
|
return
|
|
|
|
sparsematch = self.sparsematch(includetemp=False)
|
|
dirstate = self.dirstate
|
|
actions = []
|
|
dropped = []
|
|
tempincludes = self.gettemporaryincludes()
|
|
for file in tempincludes:
|
|
if file in dirstate and not sparsematch(file):
|
|
message = 'dropping temporarily included sparse files'
|
|
actions.append((file, None, message))
|
|
dropped.append(file)
|
|
|
|
typeactions = collections.defaultdict(list)
|
|
typeactions['r'] = actions
|
|
mergemod.applyupdates(self, typeactions, self[None], self['.'],
|
|
False)
|
|
|
|
# Fix dirstate
|
|
for file in dropped:
|
|
dirstate.drop(file)
|
|
|
|
self.vfs.unlink('tempsparse')
|
|
self.invalidatesignaturecache()
|
|
msg = _("cleaned up %d temporarily added file(s) from the "
|
|
"sparse checkout\n")
|
|
ui.status(msg % len(tempincludes))
|
|
|
|
if 'dirstate' in repo._filecache:
|
|
repo.dirstate.repo = repo
|
|
repo.sparsecache = {}
|
|
repo.signaturecache = {}
|
|
repo.__class__ = SparseRepo
|
|
|
|
# A profile is either active, inactive or included; the latter is a profile
|
|
# included (transitively) by an active profile.
|
|
PROFILE_INACTIVE, PROFILE_ACTIVE, PROFILE_INCLUDED = _profile_flags = range(3)
|
|
|
|
@attr.s(slots=True, frozen=True)
|
|
class ProfileInfo(collections.Mapping):
|
|
path = attr.ib()
|
|
active = attr.ib()
|
|
_metadata = attr.ib(default=attr.Factory(dict))
|
|
|
|
@active.validator
|
|
def checkactive(self, attribute, value):
|
|
if not any(value is flag for flag in _profile_flags):
|
|
raise ValueError('Invalid active flag value')
|
|
|
|
# Mapping methods for metadata access
|
|
def __getitem__(self, key):
|
|
return self._metadata[key]
|
|
def __iter__(self):
|
|
return iter(self._metadata)
|
|
def __len__(self):
|
|
return len(self._metadata)
|
|
|
|
def _discover(ui, repo, include_hidden=False):
|
|
"""Generate a list of available profiles with metadata
|
|
|
|
Returns a generator yielding ProfileInfo objects, paths are relative to the
|
|
repository root, the sequence is sorted by path.
|
|
|
|
If no sparse.profile_directory path is configured, will only
|
|
yield active and included profiles.
|
|
|
|
README(.*) files are filtered out.
|
|
|
|
If `include_hidden` is False, we filter out any profile with a 'hidden'
|
|
entry in the profile metadata (unless it is currently active).
|
|
|
|
"""
|
|
included = repo.getactiveprofiles()
|
|
sparse = repo.vfs.read('sparse')
|
|
active = repo.readsparseconfig(sparse).profiles
|
|
active = set(active)
|
|
|
|
profile_directory = ui.config('sparse', 'profile_directory')
|
|
available = set()
|
|
if profile_directory is not None:
|
|
if (os.path.isabs(profile_directory) or
|
|
profile_directory.startswith('../')):
|
|
raise error.Abort(
|
|
_('sparse.profile_directory must be relative to the '
|
|
'repository root'))
|
|
if not profile_directory.endswith('/'):
|
|
profile_directory += '/'
|
|
|
|
ctx = repo['.']
|
|
mf = ctx.manifest()
|
|
|
|
matcher = matchmod.match(
|
|
repo.root, repo.getcwd(),
|
|
patterns=['path:' + profile_directory],
|
|
exclude=['relglob:README.*', 'relglob:README'])
|
|
available.update(mf.matches(matcher))
|
|
|
|
# sort profiles and read profile metadata as we iterate
|
|
for p in sorted(available | included):
|
|
raw = repo.getrawprofile(p, '.')
|
|
md = repo.readsparseconfig(raw, filename=p).metadata
|
|
if 'hidden' not in md or include_hidden or p in active or p in included:
|
|
yield ProfileInfo(
|
|
p, (PROFILE_ACTIVE if p in active else
|
|
PROFILE_INCLUDED if p in included else
|
|
PROFILE_INACTIVE),
|
|
md)
|
|
|
|
def _profilesizeinfo(ui, repo, *config, **kwargs):
|
|
"""Get size stats for a given set of profiles
|
|
|
|
Returns a dictionary of config -> (count, bytes) tuples. The
|
|
special key `None` represents the total manifest count and
|
|
bytecount. bytes is the total size of the files.
|
|
|
|
Note: for performance reasons we don't calculate the total repository size
|
|
and the value for the `None` key is always set to (count, None) to reflect
|
|
this.
|
|
|
|
"""
|
|
try:
|
|
cache = extensions.find('simplecache')
|
|
cacheget = functools.partial(
|
|
cache.cacheget, serializer=cache.jsonserializer, ui=ui)
|
|
cacheset = functools.partial(
|
|
cache.cacheset, serializer=cache.jsonserializer, ui=ui)
|
|
except KeyError:
|
|
cacheget = cacheset = lambda *args: None
|
|
|
|
collectsize = kwargs.get('collectsize', False)
|
|
|
|
results = {}
|
|
matchers = {}
|
|
to_store = {}
|
|
|
|
rev = kwargs.get('rev', '.')
|
|
ctx = scmutil.revsingle(repo, rev)
|
|
|
|
templ = 'sparseprofilestats:%s:{}' % util.split(repo.root)[-1]
|
|
def _genkey(path, *parts):
|
|
# paths need to be ascii-safe with
|
|
path = path.replace('/', '__')
|
|
return templ.format(':'.join((path,) + parts))
|
|
|
|
key = _genkey('unfiltered', ctx.hex())
|
|
cached = cacheget(key)
|
|
results[None] = cached if cached else [0, None]
|
|
if cached is None:
|
|
# gather complete working copy data
|
|
matchers[None] = matchmod.always(repo.root, repo.root)
|
|
to_store[None] = key
|
|
|
|
for c in config:
|
|
matcher, key = repo._sparsematch_and_key(
|
|
ctx.hex(), includetemp=False, config=c)
|
|
key = _genkey(c.path, key, str(collectsize))
|
|
cached = cacheget(key)
|
|
if not cached and not collectsize:
|
|
# if not collecting the full size, but we have a cached copy
|
|
# for a full run, use the file count from that
|
|
cached = cacheget(_genkey(c.path, key, 'True'))
|
|
cached = cached and [cached[0], 0]
|
|
results[c] = cached or [0, 0]
|
|
if cached is None:
|
|
matchers[c] = matcher
|
|
to_store[c] = key
|
|
|
|
if matchers:
|
|
mf = ctx.manifest()
|
|
if results[None][0]:
|
|
# use cached working copy size
|
|
totalfiles = results[None][0]
|
|
else:
|
|
with progress.spinner(ui, 'calculating total manifest size'):
|
|
try:
|
|
totalfiles = len(mf)
|
|
except TypeError:
|
|
# treemanifest does not implement __len__ :-(
|
|
totalfiles = sum(1 for __ in mf)
|
|
|
|
if collectsize and len(matchers) - (None in matchers):
|
|
# we may need to prefetch file data, to calculate the size of each
|
|
# profile
|
|
try:
|
|
remotefilelog = extensions.find('remotefilelog')
|
|
except KeyError:
|
|
pass
|
|
else:
|
|
if remotefilelog.shallowrepo.requirement in repo.requirements:
|
|
profilematchers = unionmatcher(
|
|
[matchers[k] for k in matchers if k])
|
|
repo.prefetch(
|
|
repo.revs(ctx.hex()), matcher=profilematchers)
|
|
|
|
with progress.bar(ui, _('calculating'), total=totalfiles) as prog:
|
|
# only matchers for which there was no cache are processed
|
|
for file in ctx.walk(unionmatcher(matchers.values())):
|
|
prog.value += 1
|
|
for c, matcher in matchers.items():
|
|
if matcher(file):
|
|
results[c][0] += 1
|
|
if collectsize and c is not None:
|
|
results[c][1] += ctx.filectx(file).size()
|
|
|
|
results = {k: tuple(v) for k, v in results.items()}
|
|
for c, key in to_store.items():
|
|
cacheset(key, results[c])
|
|
|
|
return results
|
|
|
|
# hints
|
|
hint = registrar.hint()
|
|
|
|
@hint('sparse-explain-verbose')
|
|
def hintexplainverbose(*profiles):
|
|
return _("use 'hg sparse explain --verbose {}' to include the total file "
|
|
"size for a give profile".format(' '.join(profiles)))
|
|
|
|
_deprecate = lambda o, l=_('(DEPRECATED)'): (
|
|
o[:3] + (' '.join([o[4], l]),) + o[4:]) if l not in o[4] else l
|
|
@command('^sparse', [
|
|
('f', 'force', False, _('allow changing rules even with pending changes'
|
|
'(DEPRECATED)')),
|
|
('I', 'include', False, _('include files in the sparse checkout '
|
|
'(DEPRECATED)')),
|
|
('X', 'exclude', False, _('exclude files in the sparse checkout '
|
|
'(DEPRECATED)')),
|
|
('d', 'delete', False, _('delete an include/exclude rule '
|
|
'(DEPRECATED)')),
|
|
('', 'enable-profile', False, _('enables the specified profile '
|
|
'(DEPRECATED)')),
|
|
('', 'disable-profile', False, _('disables the specified profile '
|
|
'(DEPRECATED)')),
|
|
('', 'import-rules', False, _('imports rules from a file (DEPRECATED)')),
|
|
('', 'clear-rules', False, _('clears local include/exclude rules '
|
|
'(DEPRECATED)')),
|
|
('', 'refresh', False, _('updates the working after sparseness changes '
|
|
'(DEPRECATED)')),
|
|
('', 'reset', False, _('makes the repo full again (DEPRECATED)')),
|
|
('', 'cwd-list', False, _('list the full contents of the current '
|
|
'directory (DEPRECATED)')),
|
|
] + [_deprecate(o) for o in commands.templateopts],
|
|
_('[OPTION] SUBCOMMAND ...'))
|
|
def sparse(ui, repo, *pats, **opts):
|
|
"""make the current checkout sparse, or edit the existing checkout
|
|
|
|
The sparse command is used to make the current checkout sparse.
|
|
This means files that don't meet the sparse condition will not be
|
|
written to disk, or show up in any working copy operations. It does
|
|
not affect files in history in any way.
|
|
|
|
All the work is done in subcommands such as `hg sparse enableprofile`;
|
|
passing no subcommand prints the currently applied sparse rules.
|
|
|
|
The `include` and `exclude` subcommands are used to add and remove files
|
|
from the sparse checkout, while delete removes an existing include/exclude
|
|
rule.
|
|
|
|
Sparse profiles can also be shared with other users of te repository by
|
|
committing a file with include and exclude rules in a separate file. Use the
|
|
`enableprofile` and `disableprofile` subcommands to enable or disable
|
|
such profiles. Changes to shared profiles are not applied until they have
|
|
been committed.
|
|
|
|
See :hg:`help sparse [subcommand]` to get additional information.
|
|
|
|
.. container:: verbose
|
|
|
|
Sparse file format
|
|
------------------
|
|
|
|
Structure
|
|
.........
|
|
|
|
Shared sparse profile files comprise of 4 sections: `%include` directives
|
|
that pull in another sparse profile, and `[metadata]`, `[include]` and
|
|
`[exclude]` sections.
|
|
|
|
Any line starting with a `;` or `#` character is a comment and is ignored.
|
|
|
|
Extending existing profiles
|
|
...........................
|
|
|
|
`%include <absolute path>` directives (one per line) let you extend as
|
|
an existing profile file, adding more include and exclude rules. Although
|
|
this directive can appear anywere in the file, it is recommended you
|
|
keep these at the top of the file.
|
|
|
|
Metadata
|
|
........
|
|
|
|
The `[metadata]` section lets you specify key-value pairs for the profile.
|
|
Anything before the first `:` or `=` is the key, everything after is the
|
|
value. Values can be extended over multiple lines by indenting additional
|
|
lines.
|
|
|
|
Only the `title`, `description` and `hidden` keys carry meaning to for
|
|
`hg sparse`, these are used in the `hg sparse list` and
|
|
`hg sparse explain` commands. Profiles with the `hidden` key (regardless
|
|
of its value) are excluded from the `hg sparse list` listing unless
|
|
the `-v` / `--verbose` switch is given.
|
|
|
|
Include and exclude rules
|
|
.........................
|
|
|
|
Each line in the `[include]` and `[exclude]` sections is treated as a
|
|
standard pattern, see :hg:`help patterns`. Exclude rules override include
|
|
rules.
|
|
|
|
Example
|
|
.......
|
|
|
|
::
|
|
|
|
# this profile extends another profile, incorporating all its rules
|
|
%include some/base/profile
|
|
|
|
[metadata]
|
|
title: This is an example sparse profile
|
|
description: You can include as much metadata as makes sense for your
|
|
setup, and values can extend over multiple lines.
|
|
lorem ipsum = Keys and values are separated by a : or =
|
|
; hidden: the hidden key lets you mark profiles that should not
|
|
; generally be discorable. The value doesn't matter, use it to motivate
|
|
; why it is hidden.
|
|
|
|
[include]
|
|
foo/bar/baz
|
|
bar/python_project/**/*.py
|
|
|
|
[exclude]
|
|
; exclude rules override include rules, so all files with the extension
|
|
; .ignore are excluded from this sparse profile.
|
|
foo/bar/baz/*.ignore
|
|
|
|
Configuration options
|
|
---------------------
|
|
|
|
The following config option defines whether sparse treats supplied
|
|
paths as relative to repo root or to the current working dir for
|
|
include and exclude options:
|
|
|
|
[sparse]
|
|
includereporootpaths = off
|
|
|
|
The following config option defines whether sparse treats supplied
|
|
paths as relative to repo root or to the current working dir for
|
|
enableprofile and disableprofile options:
|
|
|
|
[sparse]
|
|
enablereporootpaths = on
|
|
|
|
You can configure a path to find sparse profiles in; this path is
|
|
used to discover available sparse profiles. Nested directories are
|
|
reflected in the UI.
|
|
|
|
[sparse]
|
|
profile_directory = tools/scm/sparse
|
|
|
|
It is not set by default.
|
|
|
|
"""
|
|
if not util.safehasattr(repo, 'sparsematch'):
|
|
raise error.Abort(_('this is not a sparse repository'))
|
|
|
|
cmd = subcmd.parse(pats, opts)
|
|
if cmd is not None:
|
|
return cmd(ui, repo)
|
|
|
|
include = opts.get('include')
|
|
exclude = opts.get('exclude')
|
|
force = opts.get('force')
|
|
enableprofile = opts.get('enable_profile')
|
|
disableprofile = opts.get('disable_profile')
|
|
importrules = opts.get('import_rules')
|
|
clearrules = opts.get('clear_rules')
|
|
delete = opts.get('delete')
|
|
refresh = opts.get('refresh')
|
|
reset = opts.get('reset')
|
|
cwdlist = opts.get('cwd_list')
|
|
count = sum([include, exclude, enableprofile, disableprofile, delete,
|
|
importrules, refresh, clearrules, reset, cwdlist])
|
|
if count > 1:
|
|
raise error.Abort(_("too many flags specified"))
|
|
|
|
if count == 0:
|
|
if repo.vfs.exists('sparse'):
|
|
ui.status(repo.vfs.read("sparse") + "\n")
|
|
temporaryincludes = repo.gettemporaryincludes()
|
|
if temporaryincludes:
|
|
ui.status(_("Temporarily Included Files (for merge/rebase):\n"))
|
|
ui.status(("\n".join(temporaryincludes) + "\n"))
|
|
else:
|
|
ui.status(_('repo is not sparse\n'))
|
|
return
|
|
|
|
if include or exclude or delete or reset or enableprofile or disableprofile:
|
|
_config(ui, repo, pats, opts, include=include, exclude=exclude,
|
|
reset=reset, delete=delete, enableprofile=enableprofile,
|
|
disableprofile=disableprofile, force=force)
|
|
|
|
if importrules:
|
|
_import(ui, repo, pats, opts, force=force)
|
|
|
|
if clearrules:
|
|
_clear(ui, repo, pats, force=force)
|
|
|
|
if refresh:
|
|
with repo.wlock():
|
|
c = _refresh(ui, repo, repo.status(), repo.sparsematch(), force)
|
|
fcounts = map(len, c)
|
|
_verbose_output(ui, opts, 0, 0, 0, *fcounts)
|
|
|
|
if cwdlist:
|
|
_cwdlist(repo)
|
|
|
|
# subcommands for the hg sparse command line
|
|
class subcmdfunc(registrar._funcregistrarbase):
|
|
"""Register a function to be invoked for "hg sparse <thing>" subcommands
|
|
|
|
Help info is taken from the function docstring, or can be set explicitly
|
|
with the help='...' keyword argument.
|
|
|
|
Per-subcommand options are specified with the options keyword, which
|
|
takes the same format as the options table for commands.
|
|
"""
|
|
|
|
def __init__(self, table=None):
|
|
if table is None:
|
|
# List commands in registration order
|
|
table = util.sortdict()
|
|
super(subcmdfunc, self).__init__(table)
|
|
|
|
def _doregister(self, func, name, options=(), synopsis=None, help=None):
|
|
if name in self._table:
|
|
msg = 'duplicate registration for name: "%s"' % name
|
|
raise error.ProgrammingError(msg)
|
|
|
|
@functools.wraps(func)
|
|
def dispatch(*args, **kwargs):
|
|
return func(name, *args, **kwargs)
|
|
|
|
if help is not None:
|
|
dispatch.__doc__ = help
|
|
|
|
registration = dispatch, tuple(options)
|
|
if synopsis:
|
|
registration += (synopsis,)
|
|
|
|
self._table[name] = registration
|
|
|
|
return func
|
|
|
|
def subcmdsrst(self, verbose=False, quiet=False):
|
|
"""Produce a table of subcommands"""
|
|
def cmdhelp():
|
|
for name, entry in self._table.items():
|
|
doc = pycompat.getdoc(entry[0])
|
|
doc, __, rest = doc.strip().partition('\n')
|
|
if verbose and rest.strip():
|
|
if len(entry) > 2: # synopsis
|
|
name = '{} {}'.format(name, entry[2])
|
|
doc = '{} - {}'.format(doc, rest.strip())
|
|
yield (name, doc)
|
|
rst = ['\n%s:\n\n' % _('subcommands')]
|
|
rst += minirst.maketable(list(cmdhelp()), 1)
|
|
if not quiet:
|
|
rst.append(_('\n(Use hg help sparse [subcommand] '
|
|
'to show complete subcommand help)\n'))
|
|
return ''.join(rst)
|
|
|
|
def parseargs(self, parser, args, options, *posargs, **kwargs):
|
|
subcmd = args[0] if args else None
|
|
if subcmd in self._table:
|
|
options = options + list(self._table[subcmd][1])
|
|
try:
|
|
return parser(args, options, *posargs, **kwargs)
|
|
except pycompat.getopt.GetoptError as ex:
|
|
if subcmd in self._table:
|
|
raise error.CommandError('sparse {}'.format(subcmd), ex)
|
|
raise
|
|
|
|
def parse(self, args, opts):
|
|
if not args or args[0] not in self._table:
|
|
return
|
|
|
|
name, args = args[0], args[1:]
|
|
def callsubcmd(ui, repo, *moreargs, **kw):
|
|
opts.update(kw)
|
|
return self._table[name][0](ui, repo, *(moreargs + args), **opts)
|
|
return callsubcmd
|
|
|
|
subcmdtable = util.sortdict()
|
|
subcmd = subcmdfunc(subcmdtable)
|
|
|
|
@subcmd('list', commands.templateopts, '[OPTION]')
|
|
def _listprofiles(cmd, ui, repo, *pats, **opts):
|
|
"""List available sparse profiles
|
|
|
|
Show all available sparse profiles, with the active profiles marked.
|
|
However, if a profile has a key named `hidden` in it's metadata, the profile
|
|
is excluded from this list unless explicitly active or included in an active
|
|
profile, or when the `--verbose` switch is used.
|
|
|
|
"""
|
|
chars = {PROFILE_INACTIVE: '', PROFILE_INCLUDED: '~', PROFILE_ACTIVE: '*'}
|
|
labels = {
|
|
PROFILE_INACTIVE: 'inactive',
|
|
PROFILE_INCLUDED: 'included',
|
|
PROFILE_ACTIVE: 'active',
|
|
}
|
|
ui.pager('sparse list')
|
|
with ui.formatter('sparse', opts) as fm:
|
|
if fm.isplain():
|
|
ui.write_err(
|
|
_('symbols: * = active profile, ~ = transitively '
|
|
'included\n'),
|
|
label='sparse.profile.legend')
|
|
|
|
profiles = list(_discover(ui, repo, include_hidden=ui.verbose))
|
|
max_width = max(len(p.path) for p in profiles)
|
|
|
|
for info in profiles:
|
|
fm.startitem()
|
|
label = 'sparse.profile.' + labels[info.active]
|
|
fm.plain('%-1s ' % chars[info.active], label=label)
|
|
fm.data(active=labels[info.active], metadata=dict(info))
|
|
fm.write(b'path', '%-{}s'.format(max_width), info.path, label=label)
|
|
if 'title' in info:
|
|
fm.plain(' - %s' % info.get('title', b''), label=label)
|
|
fm.plain('\n')
|
|
|
|
@subcmd('explain', [
|
|
('r', 'rev', '', _('explain the profile(s) against the specified revision'),
|
|
_('REV')),
|
|
] + commands.templateopts,
|
|
_('[OPTION]... [PROFILE]...'))
|
|
def _explainprofile(cmd, ui, repo, *profiles, **opts):
|
|
"""Show information on individual profiles
|
|
|
|
If --verbose is given, calculates the file size impact of a profile (slow).
|
|
"""
|
|
if ui.plain() and not opts.get('template'):
|
|
hint = _('invoke with -T/--template to control output format')
|
|
raise error.Abort(_('must specify a template in plain mode'), hint=hint)
|
|
|
|
if not profiles:
|
|
raise error.Abort(_('no profiles specified'))
|
|
|
|
rev = scmutil.revrange(repo, [opts.get('rev') or '.']).last()
|
|
if rev is None:
|
|
raise error.Abort(_('empty revision set'))
|
|
|
|
configs = []
|
|
for i, p in enumerate(profiles):
|
|
try:
|
|
raw = repo.getrawprofile(p, rev)
|
|
except KeyError:
|
|
ui.warn(_('The profile %s was not found\n') % p)
|
|
exitcode = 255
|
|
continue
|
|
profile = repo.readsparseconfig(raw, p)
|
|
configs.append(profile)
|
|
|
|
stats = _profilesizeinfo(
|
|
ui, repo, *configs, rev=rev, collectsize=ui.verbose)
|
|
filecount, totalsize = stats[None]
|
|
|
|
exitcode = 0
|
|
|
|
def sortedsets(d):
|
|
return {
|
|
k: sorted(v) if isinstance(v, collections.Set) else v
|
|
for k, v in d.items()}
|
|
|
|
ui.pager('sparse explain')
|
|
with ui.formatter('sparse', opts) as fm:
|
|
for i, profile in enumerate(configs):
|
|
if i:
|
|
fm.plain('\n')
|
|
fm.startitem()
|
|
|
|
fm.write('path', '%s\n\n', profile.path)
|
|
|
|
pfilecount, ptotalsize = stats.get(profile, (-1, -1))
|
|
pfileperc = 0.0
|
|
if pfilecount > -1 and filecount > 0:
|
|
pfileperc = (pfilecount / filecount) * 100
|
|
profilestats = {
|
|
'filecount': pfilecount, 'filecountpercentage': pfileperc
|
|
}
|
|
if ptotalsize:
|
|
profilestats['totalsize'] = ptotalsize
|
|
fm.data(
|
|
stats=profilestats,
|
|
**sortedsets(attr.asdict(profile, retain_collection_types=True))
|
|
)
|
|
|
|
if fm.isplain():
|
|
md = profile.metadata
|
|
title = md.get('title', _('(untitled)'))
|
|
lines = [
|
|
minirst.section(title)
|
|
]
|
|
description = md.get('description')
|
|
if description:
|
|
lines.append('%s\n\n' % description)
|
|
|
|
if pfileperc or ptotalsize:
|
|
lines.append(minirst.subsection(
|
|
_('Size impact compared to a full checkout')))
|
|
|
|
if pfileperc:
|
|
lines.append(':file count: {:d} ({:.2f}%)\n'.format(
|
|
pfilecount, pfileperc))
|
|
if ptotalsize:
|
|
lines.append(':total size: {:s}\n'.format(
|
|
util.bytecount(ptotalsize)))
|
|
lines.append('\n')
|
|
|
|
other = md.viewkeys() - {'title', 'description'}
|
|
if other:
|
|
lines += (
|
|
minirst.subsection(_('Additional metadata')),
|
|
''.join(
|
|
[':%s: %s\n' % (
|
|
key, '\n '.join(md[key].splitlines()))
|
|
for key in sorted(other)]),
|
|
'\n')
|
|
|
|
sections = (
|
|
('profiles', _('Profiles included')),
|
|
('includes', _('Inclusion rules')),
|
|
('excludes', _('Exclusion rules')),
|
|
)
|
|
|
|
for attrib, label in sections:
|
|
section = getattr(profile, attrib)
|
|
if not section:
|
|
continue
|
|
lines += (minirst.subsection(label), '::\n\n')
|
|
lines += (' %s\n' % entry for entry in sorted(section))
|
|
lines += ('\n',)
|
|
|
|
textwidth = ui.configint('ui', 'textwidth')
|
|
termwidth = ui.termwidth() - 2
|
|
if not (0 < textwidth <= termwidth):
|
|
textwidth = termwidth
|
|
fm.plain(minirst.format(''.join(lines), textwidth))
|
|
|
|
if not ui.verbose:
|
|
hintutil.trigger('sparse-explain-verbose', *profiles)
|
|
|
|
return exitcode
|
|
|
|
@subcmd('files', commands.templateopts, _('[OPTION]...'))
|
|
def _listfilessubcmd(cmd, ui, repo, *profiles, **opts):
|
|
"""List all files included in a profiles
|
|
|
|
If files are given to match, this command only prints the names of the
|
|
files in a profile that match those patterns.
|
|
|
|
"""
|
|
if not profiles:
|
|
raise error.Abort(_('no profiles specified'))
|
|
|
|
profile, files = profiles[0], profiles[1:]
|
|
try:
|
|
raw = repo.getrawprofile(profile, '.')
|
|
except KeyError:
|
|
raise error.Abort(_('The profile %s was not found\n') % profile)
|
|
|
|
config = repo.readsparseconfig(raw, profile)
|
|
ctx = repo['.']
|
|
matcher = matchmod.intersectmatchers(
|
|
matchmod.match(repo.root, repo.getcwd(), files),
|
|
repo.sparsematch(ctx.hex(), includetemp=False, config=config))
|
|
|
|
exitcode = 1
|
|
ui.pager('sparse listfiles')
|
|
with ui.formatter('files', opts) as fm:
|
|
for f in ctx.matches(matcher):
|
|
fm.startitem()
|
|
fm.data(abspath=f)
|
|
fm.write('path', '%s\n', matcher.rel(f))
|
|
exitcode = 0
|
|
return exitcode
|
|
|
|
_details = '''\n
|
|
The effects of adding or deleting an include or exclude rule are applied
|
|
immediately. If applying the new rule would cause a file with pending
|
|
changes to be added or removed, the command will fail. Pass --force to
|
|
force a rule change even with pending changes (the changes on disk will
|
|
be preserved).
|
|
'''
|
|
|
|
_common_config_opts = [
|
|
('f', 'force', False, _('allow changing rules even with pending changes')),
|
|
]
|
|
@subcmd('reset', _common_config_opts, help=_('makes the repo full again'))
|
|
@subcmd('disableprofile', _common_config_opts, '[PROFILE]...',
|
|
help=_('disables the specified profile'))
|
|
@subcmd('enableprofile', _common_config_opts, '[PROFILE]...',
|
|
help=_('enables the specified profile'))
|
|
@subcmd('delete', _common_config_opts, '[RULE]...',
|
|
help=_('delete an include/exclude rule' + _details))
|
|
@subcmd('exclude', _common_config_opts, '[RULE]...',
|
|
help=_('exclude files in the sparse checkout' + _details))
|
|
@subcmd('include', _common_config_opts, '[RULE]...',
|
|
help=_('include files in the sparse checkout' + _details))
|
|
def _configsubcmd(cmd, ui, repo, *pats, **opts):
|
|
if cmd == 'reset' and pats:
|
|
raise error.CommandError('sparse ' + cmd, 'invalid arguments')
|
|
_config(ui, repo, pats, opts, force=opts.get('force'), **{cmd: True})
|
|
|
|
@subcmd('importrules', _common_config_opts, _('[OPTION]... [FILE]...'))
|
|
def _importsubcmd(cmd, ui, repo, *pats, **opts):
|
|
"""Directly import sparse profile rules
|
|
|
|
Accepts a path to a file containing rules in the .hgsparse format.
|
|
|
|
This allows you to add *include*, *exclude* and *enable* rules
|
|
in bulk. Like the include, exclude and enable subcommands, the
|
|
changes are applied immediately.
|
|
|
|
"""
|
|
_import(ui, repo, pats, opts, force=opts.get('force'))
|
|
|
|
@subcmd('clear', _common_config_opts, _('[OPTION]...'))
|
|
def _clearsubcmd(cmd, ui, repo, *pats, **opts):
|
|
"""Clear local sparse rules
|
|
|
|
Removes all local include and exclude rules, while leaving
|
|
any enabled profiles in place.
|
|
|
|
"""
|
|
_clear(ui, repo, pats, force=opts.get('force'))
|
|
|
|
@subcmd('refresh', _common_config_opts, _('[OPTION]...'))
|
|
def _refreshsubcmd(cmd, ui, repo, *pats, **opts):
|
|
"""Refreshes the files on disk based on the sparse rules
|
|
|
|
This is only necessary if .hg/sparse was changed by hand.
|
|
|
|
"""
|
|
force = opts.get('force')
|
|
with repo.wlock():
|
|
c = _refresh(ui, repo, repo.status(), repo.sparsematch(), force)
|
|
fcounts = map(len, c)
|
|
_verbose_output(ui, opts, 0, 0, 0, *fcounts)
|
|
|
|
@subcmd('cwd')
|
|
def _cwdsubcmd(cmd, ui, repo, *pats, **opts):
|
|
"""List all names in this directory
|
|
|
|
The list includes any names that are excluded by the current sparse
|
|
checkout; these are annotated with a hyphen ('-') before the name.
|
|
|
|
"""
|
|
_cwdlist(repo)
|
|
|
|
def _config(ui, repo, pats, opts, include=False, exclude=False, reset=False,
|
|
delete=False, enableprofile=False, disableprofile=False,
|
|
force=False):
|
|
"""
|
|
Perform a sparse config update. Only one of the kwargs may be specified.
|
|
"""
|
|
wlock = repo.wlock()
|
|
try:
|
|
oldsparsematch = repo.sparsematch()
|
|
|
|
if repo.vfs.exists('sparse'):
|
|
raw = repo.vfs.read('sparse')
|
|
oldinclude, oldexclude, oldprofiles = map(
|
|
set, repo.readsparseconfig(raw))
|
|
else:
|
|
oldinclude = set()
|
|
oldexclude = set()
|
|
oldprofiles = set()
|
|
|
|
try:
|
|
if reset:
|
|
newinclude = set()
|
|
newexclude = set()
|
|
newprofiles = set()
|
|
else:
|
|
newinclude = set(oldinclude)
|
|
newexclude = set(oldexclude)
|
|
newprofiles = set(oldprofiles)
|
|
|
|
if any(os.path.isabs(pat) for pat in pats):
|
|
err = _('paths cannot be absolute')
|
|
raise error.Abort(err)
|
|
|
|
adjustpats = ((include or exclude or delete) and
|
|
not ui.configbool('sparse', 'includereporootpaths', False))
|
|
adjustpats |= ((enableprofile or disableprofile) and
|
|
not ui.configbool('sparse', 'enablereporootpaths', True))
|
|
if adjustpats:
|
|
# supplied file patterns should be treated as relative
|
|
# to current working dir, so we need to convert them first
|
|
root, cwd = repo.root, repo.getcwd()
|
|
abspats = []
|
|
for kindpat in pats:
|
|
kind, pat = matchmod._patsplit(kindpat, None)
|
|
if kind in cwdrealtivepatkinds or kind is None:
|
|
kindpat = ((kind + ':' if kind else '') +
|
|
pathutil.canonpath(root, cwd, pat))
|
|
abspats.append(kindpat)
|
|
pats = abspats
|
|
|
|
oldstatus = repo.status()
|
|
if include:
|
|
newinclude.update(pats)
|
|
elif exclude:
|
|
newexclude.update(pats)
|
|
elif enableprofile:
|
|
newprofiles.update(pats)
|
|
elif disableprofile:
|
|
newprofiles.difference_update(pats)
|
|
elif delete:
|
|
newinclude.difference_update(pats)
|
|
newexclude.difference_update(pats)
|
|
|
|
repo.writesparseconfig(newinclude, newexclude, newprofiles)
|
|
fcounts = map(
|
|
len, _refresh(ui, repo, oldstatus, oldsparsematch, force))
|
|
|
|
profilecount = (len(newprofiles - oldprofiles) -
|
|
len(oldprofiles - newprofiles))
|
|
includecount = (len(newinclude - oldinclude) -
|
|
len(oldinclude - newinclude))
|
|
excludecount = (len(newexclude - oldexclude) -
|
|
len(oldexclude - newexclude))
|
|
_verbose_output(
|
|
ui, opts, profilecount, includecount, excludecount, *fcounts)
|
|
except Exception:
|
|
repo.writesparseconfig(oldinclude, oldexclude, oldprofiles)
|
|
raise
|
|
finally:
|
|
wlock.release()
|
|
|
|
def _import(ui, repo, files, opts, force=False):
|
|
with repo.wlock():
|
|
# load union of current active profile
|
|
revs = [repo.changelog.rev(node) for node in
|
|
repo.dirstate.parents() if node != nullid]
|
|
|
|
# read current configuration
|
|
raw = ''
|
|
if repo.vfs.exists('sparse'):
|
|
raw = repo.vfs.read('sparse')
|
|
oincludes, oexcludes, oprofiles = repo.readsparseconfig(raw)
|
|
includes, excludes, profiles = map(
|
|
set, (oincludes, oexcludes, oprofiles))
|
|
|
|
# all active rules
|
|
aincludes, aexcludes, aprofiles = set(), set(), set()
|
|
for rev in revs:
|
|
rincludes, rexcludes, rprofiles = repo.getsparsepatterns(rev)
|
|
aincludes.update(rincludes)
|
|
aexcludes.update(rexcludes)
|
|
aprofiles.update(rprofiles)
|
|
|
|
# import rules on top; only take in rules that are not yet
|
|
# part of the active rules.
|
|
changed = False
|
|
for file in files:
|
|
with util.posixfile(util.expandpath(file)) as importfile:
|
|
iincludes, iexcludes, iprofiles = repo.readsparseconfig(
|
|
importfile.read(), filename=file)
|
|
oldsize = len(includes) + len(excludes) + len(profiles)
|
|
includes.update(iincludes - aincludes)
|
|
excludes.update(iexcludes - aexcludes)
|
|
profiles.update(set(iprofiles) - aprofiles)
|
|
if len(includes) + len(excludes) + len(profiles) > oldsize:
|
|
changed = True
|
|
|
|
profilecount = includecount = excludecount = 0
|
|
fcounts = (0, 0, 0)
|
|
|
|
if changed:
|
|
profilecount = len(profiles - aprofiles)
|
|
includecount = len(includes - aincludes)
|
|
excludecount = len(excludes - aexcludes)
|
|
|
|
oldstatus = repo.status()
|
|
oldsparsematch = repo.sparsematch()
|
|
repo.writesparseconfig(includes, excludes, profiles)
|
|
|
|
try:
|
|
fcounts = map(
|
|
len, _refresh(ui, repo, oldstatus, oldsparsematch, force))
|
|
except Exception:
|
|
repo.writesparseconfig(oincludes, oexcludes, oprofiles)
|
|
raise
|
|
|
|
_verbose_output(ui, opts, profilecount, includecount, excludecount,
|
|
*fcounts)
|
|
|
|
def _clear(ui, repo, files, force=False):
|
|
with repo.wlock():
|
|
raw = ''
|
|
if repo.vfs.exists('sparse'):
|
|
raw = repo.vfs.read('sparse')
|
|
includes, excludes, profiles = repo.readsparseconfig(raw)
|
|
|
|
if includes or excludes:
|
|
oldstatus = repo.status()
|
|
oldsparsematch = repo.sparsematch()
|
|
repo.writesparseconfig(set(), set(), profiles)
|
|
_refresh(ui, repo, oldstatus, oldsparsematch, force)
|
|
|
|
def _refresh(ui, repo, origstatus, origsparsematch, force):
|
|
"""Refreshes which files are on disk by comparing the old status and
|
|
sparsematch with the new sparsematch.
|
|
|
|
Will raise an exception if a file with pending changes is being excluded
|
|
or included (unless force=True).
|
|
"""
|
|
modified, added, removed, deleted, unknown, ignored, clean = origstatus
|
|
|
|
# Verify there are no pending changes
|
|
pending = set()
|
|
pending.update(modified)
|
|
pending.update(added)
|
|
pending.update(removed)
|
|
sparsematch = repo.sparsematch()
|
|
abort = False
|
|
if len(pending) > 0:
|
|
ui.note(_('verifying pending changes for refresh\n'))
|
|
for file in pending:
|
|
if not sparsematch(file):
|
|
ui.warn(_("pending changes to '%s'\n") % file)
|
|
abort = not force
|
|
if abort:
|
|
raise error.Abort(_("could not update sparseness due to " +
|
|
"pending changes"))
|
|
|
|
# Calculate actions
|
|
ui.note(_('calculating actions for refresh\n'))
|
|
with progress.spinner(ui, 'populating file set'):
|
|
dirstate = repo.dirstate
|
|
ctx = repo['.']
|
|
added = []
|
|
lookup = []
|
|
dropped = []
|
|
mf = ctx.manifest()
|
|
files = set(mf)
|
|
|
|
actions = {}
|
|
|
|
with progress.bar(ui, _('calculating'), total=len(files)) as prog:
|
|
for file in files:
|
|
prog.value += 1
|
|
|
|
old = origsparsematch(file)
|
|
new = sparsematch(file)
|
|
# Add files that are newly included, or that don't exist in
|
|
# the dirstate yet.
|
|
if (new and not old) or (old and new and not file in dirstate):
|
|
fl = mf.flags(file)
|
|
if repo.wvfs.exists(file):
|
|
actions[file] = ('e', (fl,), '')
|
|
lookup.append(file)
|
|
else:
|
|
actions[file] = ('g', (fl, False), '')
|
|
added.append(file)
|
|
# Drop files that are newly excluded, or that still exist in
|
|
# the dirstate.
|
|
elif ((old and not new)
|
|
or (not (old or new) and file in dirstate)):
|
|
dropped.append(file)
|
|
if file not in pending:
|
|
actions[file] = ('r', [], '')
|
|
|
|
# Verify there are no pending changes in newly included files
|
|
if len(lookup) > 0:
|
|
ui.note(_('verifying no pending changes in newly included files\n'))
|
|
abort = False
|
|
for file in lookup:
|
|
ui.warn(_("pending changes to '%s'\n") % file)
|
|
abort = not force
|
|
if abort:
|
|
raise error.Abort(_("cannot change sparseness due to " +
|
|
"pending changes (delete the files or use --force " +
|
|
"to bring them back dirty)"))
|
|
|
|
# Check for files that were only in the dirstate.
|
|
for file, state in dirstate.iteritems():
|
|
if not file in files:
|
|
old = origsparsematch(file)
|
|
new = sparsematch(file)
|
|
if old and not new:
|
|
dropped.append(file)
|
|
|
|
# Apply changes to disk
|
|
if len(actions) > 0:
|
|
ui.note(_('applying changes to disk (%d actions)\n') % len(actions))
|
|
typeactions = dict((m, [])
|
|
for m in 'a f g am cd dc r dm dg m e k p pr'.split())
|
|
|
|
with progress.bar(ui, _('applying'), total=len(actions)) as prog:
|
|
for f, (m, args, msg) in actions.iteritems():
|
|
prog.value += 1
|
|
if m not in typeactions:
|
|
typeactions[m] = []
|
|
typeactions[m].append((f, args, msg))
|
|
mergemod.applyupdates(repo, typeactions, repo[None], repo['.'], False)
|
|
|
|
# Fix dirstate
|
|
filecount = len(added) + len(dropped) + len(lookup)
|
|
if filecount > 0:
|
|
ui.note(_('updating dirstate\n'))
|
|
with progress.bar(ui, _('recording'), _('files'), filecount) as prog:
|
|
for file in added:
|
|
prog.value += 1
|
|
dirstate.normal(file)
|
|
|
|
for file in dropped:
|
|
prog.value += 1
|
|
dirstate.drop(file)
|
|
|
|
for file in lookup:
|
|
prog.value += 1
|
|
# File exists on disk, and we're bringing it back in an unknown
|
|
# state.
|
|
dirstate.normallookup(file)
|
|
|
|
return added, dropped, lookup
|
|
|
|
def _verbose_output(ui, opts, profilecount, includecount, excludecount, added,
|
|
dropped, lookup):
|
|
"""Produce --verbose and templatable output
|
|
|
|
This specifically enables -Tjson, providing machine-readable stats on how
|
|
the sparse profile changed.
|
|
|
|
"""
|
|
with ui.formatter('sparse', opts) as fm:
|
|
fm.startitem()
|
|
fm.condwrite(ui.verbose, 'profiles_added', 'Profile # change: %d\n',
|
|
profilecount)
|
|
fm.condwrite(ui.verbose, 'include_rules_added',
|
|
'Include rule # change: %d\n', includecount)
|
|
fm.condwrite(ui.verbose, 'exclude_rules_added',
|
|
'Exclude rule # change: %d\n', excludecount)
|
|
# In 'plain' verbose mode, mergemod.applyupdates already outputs what
|
|
# files are added or removed outside of the templating formatter
|
|
# framework. No point in repeating ourselves in that case.
|
|
if not fm.isplain():
|
|
fm.condwrite(ui.verbose, 'files_added', 'Files added: %d\n',
|
|
added)
|
|
fm.condwrite(ui.verbose, 'files_dropped', 'Files dropped: %d\n',
|
|
dropped)
|
|
fm.condwrite(ui.verbose, 'files_conflicting',
|
|
'Files conflicting: %d\n', lookup)
|
|
|
|
def _cwdlist(repo):
|
|
""" List the contents in the current directory. Annotate
|
|
the files in the sparse profile.
|
|
"""
|
|
ctx = repo['.']
|
|
mf = ctx.manifest()
|
|
|
|
# Get the root of the repo so that we remove the content of
|
|
# the root from the current working directory
|
|
root = repo.root
|
|
cwd = util.normpath(pycompat.getcwd())
|
|
cwd = os.path.relpath(cwd, root)
|
|
cwd = '' if cwd == os.curdir else cwd + pycompat.ossep
|
|
if cwd.startswith(os.pardir + pycompat.ossep):
|
|
raise error.Abort(
|
|
_("the current working directory should begin "
|
|
"with the root %s") % root)
|
|
|
|
matcher = matchmod.match(
|
|
repo.root, repo.getcwd(),
|
|
patterns=['path:' + cwd])
|
|
files = mf.matches(matcher)
|
|
|
|
sparsematch = repo.sparsematch(ctx.rev())
|
|
checkedoutentries = set()
|
|
allentries = set()
|
|
cwdlength = len(cwd)
|
|
|
|
for filepath in files:
|
|
entryname = filepath[cwdlength:].partition(pycompat.ossep)[0]
|
|
|
|
allentries.add(entryname)
|
|
if sparsematch(filepath):
|
|
checkedoutentries.add(entryname)
|
|
|
|
ui = repo.ui
|
|
for entry in sorted(allentries):
|
|
marker = ' ' if entry in checkedoutentries else '-'
|
|
ui.status("%s %s\n" % (marker, entry))
|
|
|
|
class forceincludematcher(matchmod.basematcher):
|
|
"""A matcher that returns true for any of the forced includes before testing
|
|
against the actual matcher."""
|
|
def __init__(self, matcher, includes):
|
|
super(forceincludematcher, self).__init__(matcher._root, matcher._cwd)
|
|
self._matcher = matcher
|
|
self._includes = includes
|
|
|
|
def __call__(self, value):
|
|
return value in self._includes or self._matcher(value)
|
|
|
|
def __repr__(self):
|
|
return '<forceincludematcher matcher=%r includes=%r>' % (
|
|
self._matcher, self._includes)
|
|
|
|
def visitdir(self, dir):
|
|
if any(True for path in self._includes if path.startswith(dir)):
|
|
return True
|
|
return self._matcher.visitdir(dir)
|
|
|
|
def hash(self):
|
|
sha1 = hashlib.sha1()
|
|
sha1.update(_hashmatcher(self._matcher))
|
|
for include in sorted(self._includes):
|
|
sha1.update(include + '\0')
|
|
return sha1.hexdigest()
|
|
|
|
class unionmatcher(matchmod.unionmatcher):
|
|
def hash(self):
|
|
sha1 = hashlib.sha1()
|
|
for m in self._matchers:
|
|
sha1.update(_hashmatcher(m))
|
|
return sha1.hexdigest()
|
|
|
|
class negatematcher(matchmod.basematcher):
|
|
def __init__(self, matcher):
|
|
super(negatematcher, self).__init__(matcher._root, matcher._cwd)
|
|
self._matcher = matcher
|
|
|
|
def __call__(self, value):
|
|
return not self._matcher(value)
|
|
|
|
def __repr__(self):
|
|
return '<negatematcher matcher=%r>' % self._matcher
|
|
|
|
def hash(self):
|
|
sha1 = hashlib.sha1()
|
|
sha1.update('negate')
|
|
sha1.update(_hashmatcher(self._matcher))
|
|
return sha1.hexdigest()
|
|
|
|
def _hashmatcher(matcher):
|
|
if util.safehasattr(matcher, 'hash'):
|
|
return matcher.hash()
|
|
|
|
sha1 = hashlib.sha1()
|
|
sha1.update(repr(matcher))
|
|
return sha1.hexdigest()
|