2013-06-21 21:14:29 +04:00
|
|
|
# shallowrepo.py - shallow repository that uses remote filelogs
|
2013-05-18 05:08:53 +04:00
|
|
|
#
|
|
|
|
# Copyright 2013 Facebook, Inc.
|
|
|
|
#
|
|
|
|
# This software may be used and distributed according to the terms of the
|
|
|
|
# GNU General Public License version 2 or any later version.
|
|
|
|
|
2017-07-17 13:04:22 +03:00
|
|
|
from hgext3rd.extutil import runshellcommand
|
2017-01-10 23:07:15 +03:00
|
|
|
from mercurial.i18n import _
|
2016-10-21 21:02:09 +03:00
|
|
|
from mercurial.node import hex, nullid, nullrev
|
2017-06-20 21:08:15 +03:00
|
|
|
from mercurial import error, localrepo, util, match, scmutil
|
2017-11-21 17:52:51 +03:00
|
|
|
from . import (
|
|
|
|
connectionpool,
|
|
|
|
fileserverclient,
|
|
|
|
remotefilelog,
|
|
|
|
remotefilectx,
|
|
|
|
)
|
2016-08-30 02:19:52 +03:00
|
|
|
import constants, shallowutil
|
2016-04-26 23:00:31 +03:00
|
|
|
from contentstore import remotefilelogcontentstore, unioncontentstore
|
|
|
|
from contentstore import remotecontentstore
|
|
|
|
from metadatastore import remotefilelogmetadatastore, unionmetadatastore
|
|
|
|
from metadatastore import remotemetadatastore
|
2016-04-28 02:49:15 +03:00
|
|
|
from datapack import datapackstore
|
2016-04-28 02:49:24 +03:00
|
|
|
from historypack import historypackstore
|
2013-05-21 02:03:07 +04:00
|
|
|
|
2016-10-21 21:02:09 +03:00
|
|
|
import os
|
|
|
|
|
2013-11-26 04:36:44 +04:00
|
|
|
requirement = "remotefilelog"
|
2017-01-10 23:07:15 +03:00
|
|
|
_prefetching = _('prefetching')
|
2013-11-26 04:36:44 +04:00
|
|
|
|
2017-02-14 13:11:09 +03:00
|
|
|
# These make*stores functions are global so that other extensions can replace
|
|
|
|
# them.
|
|
|
|
def makelocalstores(repo):
|
|
|
|
"""In-repo stores, like .hg/store/data; can not be discarded."""
|
|
|
|
localpath = os.path.join(repo.svfs.vfs.base, 'data')
|
|
|
|
if not os.path.exists(localpath):
|
|
|
|
os.makedirs(localpath)
|
|
|
|
|
|
|
|
# Instantiate local data stores
|
|
|
|
localcontent = remotefilelogcontentstore(repo, localpath, repo.name,
|
|
|
|
shared=False)
|
|
|
|
localmetadata = remotefilelogmetadatastore(repo, localpath, repo.name,
|
|
|
|
shared=False)
|
|
|
|
return localcontent, localmetadata
|
|
|
|
|
|
|
|
def makecachestores(repo):
|
|
|
|
"""Typically machine-wide, cache of remote data; can be discarded."""
|
|
|
|
# Instantiate shared cache stores
|
|
|
|
cachepath = shallowutil.getcachepath(repo.ui)
|
|
|
|
cachecontent = remotefilelogcontentstore(repo, cachepath, repo.name,
|
|
|
|
shared=True)
|
|
|
|
cachemetadata = remotefilelogmetadatastore(repo, cachepath, repo.name,
|
|
|
|
shared=True)
|
|
|
|
|
|
|
|
repo.sharedstore = cachecontent
|
|
|
|
repo.shareddatastores.append(cachecontent)
|
|
|
|
repo.sharedhistorystores.append(cachemetadata)
|
|
|
|
|
|
|
|
return cachecontent, cachemetadata
|
|
|
|
|
|
|
|
def makeremotestores(repo, cachecontent, cachemetadata):
|
|
|
|
"""These stores fetch data from a remote server."""
|
|
|
|
# Instantiate remote stores
|
|
|
|
repo.fileservice = fileserverclient.fileserverclient(repo)
|
|
|
|
remotecontent = remotecontentstore(repo.ui, repo.fileservice,
|
|
|
|
cachecontent)
|
|
|
|
remotemetadata = remotemetadatastore(repo.ui, repo.fileservice,
|
|
|
|
cachemetadata)
|
|
|
|
return remotecontent, remotemetadata
|
|
|
|
|
|
|
|
def makepackstores(repo):
|
|
|
|
"""Packs are more efficient (to read from) cache stores."""
|
|
|
|
# Instantiate pack stores
|
|
|
|
packpath = shallowutil.getcachepackpath(repo,
|
|
|
|
constants.FILEPACK_CATEGORY)
|
|
|
|
packcontentstore = datapackstore(
|
|
|
|
repo.ui,
|
|
|
|
packpath,
|
|
|
|
usecdatapack=repo.ui.configbool('remotefilelog', 'fastdatapack'))
|
|
|
|
packmetadatastore = historypackstore(repo.ui, packpath)
|
|
|
|
|
|
|
|
repo.shareddatastores.append(packcontentstore)
|
|
|
|
repo.sharedhistorystores.append(packmetadatastore)
|
2017-11-07 05:13:55 +03:00
|
|
|
shallowutil.reportpackmetrics(repo.ui, 'filestore', packcontentstore,
|
|
|
|
packmetadatastore)
|
2017-02-14 13:11:09 +03:00
|
|
|
return packcontentstore, packmetadatastore
|
|
|
|
|
|
|
|
def makeunionstores(repo):
|
|
|
|
"""Union stores iterate the other stores and return the first result."""
|
|
|
|
repo.shareddatastores = []
|
|
|
|
repo.sharedhistorystores = []
|
|
|
|
|
|
|
|
packcontentstore, packmetadatastore = makepackstores(repo)
|
|
|
|
cachecontent, cachemetadata = makecachestores(repo)
|
|
|
|
localcontent, localmetadata = makelocalstores(repo)
|
|
|
|
remotecontent, remotemetadata = makeremotestores(repo, cachecontent,
|
|
|
|
cachemetadata)
|
|
|
|
|
|
|
|
# Instantiate union stores
|
|
|
|
repo.contentstore = unioncontentstore(packcontentstore, cachecontent,
|
|
|
|
localcontent, remotecontent, writestore=localcontent)
|
|
|
|
repo.metadatastore = unionmetadatastore(packmetadatastore, cachemetadata,
|
|
|
|
localmetadata, remotemetadata, writestore=localmetadata)
|
|
|
|
|
|
|
|
fileservicedatawrite = cachecontent
|
2017-11-09 20:19:21 +03:00
|
|
|
fileservicehistorywrite = cachemetadata
|
2017-02-14 13:11:09 +03:00
|
|
|
if repo.ui.configbool('remotefilelog', 'fetchpacks'):
|
|
|
|
fileservicedatawrite = packcontentstore
|
|
|
|
fileservicehistorywrite = packmetadatastore
|
|
|
|
repo.fileservice.setstore(repo.contentstore, repo.metadatastore,
|
|
|
|
fileservicedatawrite, fileservicehistorywrite)
|
2017-11-07 05:13:55 +03:00
|
|
|
shallowutil.reportpackmetrics(repo.ui, 'filestore',
|
|
|
|
packcontentstore, packmetadatastore)
|
2017-02-14 13:11:09 +03:00
|
|
|
|
2013-06-12 03:41:55 +04:00
|
|
|
def wraprepo(repo):
|
2013-05-21 02:03:07 +04:00
|
|
|
class shallowrepository(repo.__class__):
|
2013-08-15 22:00:51 +04:00
|
|
|
@util.propertycache
|
|
|
|
def name(self):
|
|
|
|
return self.ui.config('remotefilelog', 'reponame', '')
|
|
|
|
|
2014-10-10 02:20:54 +04:00
|
|
|
@util.propertycache
|
|
|
|
def fallbackpath(self):
|
2017-06-20 21:08:15 +03:00
|
|
|
path = repo.ui.config("remotefilelog", "fallbackpath",
|
|
|
|
# fallbackrepo is the old, deprecated name
|
2014-10-10 02:20:54 +04:00
|
|
|
repo.ui.config("remotefilelog", "fallbackrepo",
|
|
|
|
repo.ui.config("paths", "default")))
|
2017-06-20 21:08:15 +03:00
|
|
|
if not path:
|
|
|
|
raise error.Abort("no remotefilelog server "
|
|
|
|
"configured - is your .hg/hgrc trusted?")
|
|
|
|
|
|
|
|
return path
|
2014-10-10 02:20:54 +04:00
|
|
|
|
2017-01-26 22:01:31 +03:00
|
|
|
def maybesparsematch(self, *revs, **kwargs):
|
|
|
|
'''
|
|
|
|
A wrapper that allows the remotefilelog to invoke sparsematch() if
|
|
|
|
this is a sparse repository, or returns None if this is not a
|
|
|
|
sparse repository.
|
|
|
|
'''
|
|
|
|
if util.safehasattr(self, 'sparsematch'):
|
|
|
|
return self.sparsematch(*revs, **kwargs)
|
2015-04-02 19:58:46 +03:00
|
|
|
|
|
|
|
return None
|
|
|
|
|
2013-05-21 02:03:07 +04:00
|
|
|
def file(self, f):
|
|
|
|
if f[0] == '/':
|
|
|
|
f = f[1:]
|
2013-09-26 21:46:06 +04:00
|
|
|
|
|
|
|
if self.shallowmatch(f):
|
2015-06-30 17:12:38 +03:00
|
|
|
return remotefilelog.remotefilelog(self.svfs, f, self)
|
2013-09-26 21:46:06 +04:00
|
|
|
else:
|
|
|
|
return super(shallowrepository, self).file(f)
|
2013-05-21 02:03:07 +04:00
|
|
|
|
|
|
|
def filectx(self, path, changeid=None, fileid=None):
|
2013-09-26 21:46:06 +04:00
|
|
|
if self.shallowmatch(path):
|
|
|
|
return remotefilectx.remotefilectx(self, path, changeid, fileid)
|
|
|
|
else:
|
2016-04-26 23:00:31 +03:00
|
|
|
return super(shallowrepository, self).filectx(path, changeid,
|
|
|
|
fileid)
|
2013-05-21 02:03:07 +04:00
|
|
|
|
2015-03-06 03:06:12 +03:00
|
|
|
@localrepo.unfilteredmethod
|
|
|
|
def commitctx(self, ctx, error=False):
|
|
|
|
"""Add a new revision to current repository.
|
|
|
|
Revision information is passed via the context argument.
|
|
|
|
"""
|
|
|
|
|
2017-01-25 17:07:19 +03:00
|
|
|
# some contexts already have manifest nodes, they don't need any
|
|
|
|
# prefetching (for example if we're just editing a commit message
|
|
|
|
# we can reuse manifest
|
|
|
|
if not ctx.manifestnode():
|
|
|
|
# prefetch files that will likely be compared
|
|
|
|
m1 = ctx.p1().manifest()
|
|
|
|
files = []
|
|
|
|
for f in ctx.modified() + ctx.added():
|
|
|
|
fparent1 = m1.get(f, nullid)
|
|
|
|
if fparent1 != nullid:
|
|
|
|
files.append((f, hex(fparent1)))
|
|
|
|
self.fileservice.prefetch(files)
|
|
|
|
return super(shallowrepository, self).commitctx(ctx,
|
|
|
|
error=error)
|
2015-03-06 03:06:12 +03:00
|
|
|
|
2017-07-17 13:04:22 +03:00
|
|
|
def backgroundprefetch(self, revs, base=None, repack=False, pats=None,
|
|
|
|
opts=None):
|
|
|
|
"""Runs prefetch in background with optional repack
|
|
|
|
"""
|
|
|
|
cmd = util.hgcmd() + ['-R', repo.origroot, 'prefetch']
|
|
|
|
if repack:
|
|
|
|
cmd.append('--repack')
|
|
|
|
if revs:
|
|
|
|
cmd += ['-r', revs]
|
|
|
|
cmd = ' '.join(map(util.shellquote, cmd))
|
|
|
|
|
|
|
|
runshellcommand(cmd, os.environ)
|
|
|
|
|
2017-11-17 02:28:07 +03:00
|
|
|
def prefetch(self, revs, base=None, pats=None, opts=None):
|
2014-07-04 00:05:11 +04:00
|
|
|
"""Prefetches all the necessary file revisions for the given revs
|
2017-07-17 13:04:22 +03:00
|
|
|
Optionally runs repack in background
|
2014-07-04 00:05:11 +04:00
|
|
|
"""
|
2017-07-17 13:04:22 +03:00
|
|
|
with repo._lock(repo.svfs, 'prefetchlock', True, None, None,
|
|
|
|
_('prefetching in %s') % repo.origroot):
|
2017-11-17 02:28:07 +03:00
|
|
|
self._prefetch(revs, base, pats, opts)
|
2017-07-17 13:04:22 +03:00
|
|
|
|
2017-11-17 02:28:07 +03:00
|
|
|
def _prefetch(self, revs, base=None, pats=None, opts=None):
|
2014-10-10 02:20:54 +04:00
|
|
|
fallbackpath = self.fallbackpath
|
|
|
|
if fallbackpath:
|
|
|
|
# If we know a rev is on the server, we should fetch the server
|
|
|
|
# version of those files, since our local file versions might
|
|
|
|
# become obsolete if the local commits are stripped.
|
|
|
|
localrevs = repo.revs('outgoing(%s)', fallbackpath)
|
|
|
|
if base is not None and base != nullrev:
|
2016-04-26 23:00:31 +03:00
|
|
|
serverbase = list(repo.revs('first(reverse(::%s) - %ld)',
|
|
|
|
base, localrevs))
|
2014-10-10 02:20:54 +04:00
|
|
|
if serverbase:
|
|
|
|
base = serverbase[0]
|
|
|
|
else:
|
|
|
|
localrevs = repo
|
|
|
|
|
2016-09-23 23:28:36 +03:00
|
|
|
mfl = repo.manifestlog
|
2016-11-16 23:11:08 +03:00
|
|
|
mfrevlog = mfl._revlog
|
2014-08-19 20:33:31 +04:00
|
|
|
if base is not None:
|
2016-09-23 23:28:36 +03:00
|
|
|
mfdict = mfl[repo[base].manifestnode()].read()
|
2014-08-19 20:33:31 +04:00
|
|
|
skip = set(mfdict.iteritems())
|
|
|
|
else:
|
|
|
|
skip = set()
|
|
|
|
|
|
|
|
# Copy the skip set to start large and avoid constant resizing,
|
|
|
|
# and since it's likely to be very similar to the prefetch set.
|
|
|
|
files = skip.copy()
|
2014-10-10 02:20:54 +04:00
|
|
|
serverfiles = skip.copy()
|
2014-07-04 00:05:11 +04:00
|
|
|
visited = set()
|
|
|
|
visited.add(nullrev)
|
2017-01-10 23:07:15 +03:00
|
|
|
revnum = 0
|
|
|
|
revcount = len(revs)
|
|
|
|
self.ui.progress(_prefetching, revnum, total=revcount)
|
2014-07-04 00:05:11 +04:00
|
|
|
for rev in sorted(revs):
|
|
|
|
ctx = repo[rev]
|
|
|
|
if pats:
|
|
|
|
m = scmutil.match(ctx, pats, opts)
|
2017-01-26 22:01:31 +03:00
|
|
|
sparsematch = repo.maybesparsematch(rev)
|
2014-07-04 00:05:11 +04:00
|
|
|
|
|
|
|
mfnode = ctx.manifestnode()
|
2016-11-16 23:11:08 +03:00
|
|
|
mfrev = mfrevlog.rev(mfnode)
|
2014-07-04 00:05:11 +04:00
|
|
|
|
|
|
|
# Decompressing manifests is expensive.
|
|
|
|
# When possible, only read the deltas.
|
2016-11-16 23:11:08 +03:00
|
|
|
p1, p2 = mfrevlog.parentrevs(mfrev)
|
2014-07-04 00:05:11 +04:00
|
|
|
if p1 in visited and p2 in visited:
|
2016-09-23 23:28:36 +03:00
|
|
|
mfdict = mfl[mfnode].readfast()
|
2014-07-04 00:05:11 +04:00
|
|
|
else:
|
2016-09-23 23:28:36 +03:00
|
|
|
mfdict = mfl[mfnode].read()
|
2014-07-04 00:05:11 +04:00
|
|
|
|
2015-04-02 19:58:46 +03:00
|
|
|
diff = mfdict.iteritems()
|
|
|
|
if pats:
|
|
|
|
diff = (pf for pf in diff if m(pf[0]))
|
|
|
|
if sparsematch:
|
|
|
|
diff = (pf for pf in diff if sparsematch(pf[0]))
|
2014-10-10 02:20:54 +04:00
|
|
|
if rev not in localrevs:
|
|
|
|
serverfiles.update(diff)
|
|
|
|
else:
|
|
|
|
files.update(diff)
|
2014-07-04 00:05:11 +04:00
|
|
|
|
2014-08-07 05:50:57 +04:00
|
|
|
visited.add(mfrev)
|
2017-01-10 23:07:15 +03:00
|
|
|
revnum += 1
|
|
|
|
self.ui.progress(_prefetching, revnum, total=revcount)
|
2014-07-04 00:05:11 +04:00
|
|
|
|
2014-08-19 20:33:31 +04:00
|
|
|
files.difference_update(skip)
|
2014-10-10 02:20:54 +04:00
|
|
|
serverfiles.difference_update(skip)
|
2017-01-10 23:07:15 +03:00
|
|
|
self.ui.progress(_prefetching, None)
|
2014-10-10 02:20:54 +04:00
|
|
|
|
|
|
|
# Fetch files known to be on the server
|
|
|
|
if serverfiles:
|
|
|
|
results = [(path, hex(fnode)) for (path, fnode) in serverfiles]
|
|
|
|
repo.fileservice.prefetch(results, force=True)
|
|
|
|
|
|
|
|
# Fetch files that may or may not be on the server
|
|
|
|
if files:
|
|
|
|
results = [(path, hex(fnode)) for (path, fnode) in files]
|
|
|
|
repo.fileservice.prefetch(results)
|
2013-09-09 22:44:08 +04:00
|
|
|
|
2017-11-21 17:52:51 +03:00
|
|
|
def close(self):
|
|
|
|
super(shallowrepository, self).close()
|
|
|
|
self.connectionpool.close()
|
|
|
|
|
2013-05-21 02:03:07 +04:00
|
|
|
repo.__class__ = shallowrepository
|
2013-09-09 21:23:29 +04:00
|
|
|
|
2013-09-26 21:46:06 +04:00
|
|
|
repo.shallowmatch = match.always(repo.root, '')
|
2016-04-05 02:26:12 +03:00
|
|
|
|
2017-02-14 13:11:09 +03:00
|
|
|
makeunionstores(repo)
|
2016-05-05 00:52:33 +03:00
|
|
|
|
2016-04-26 23:00:31 +03:00
|
|
|
repo.includepattern = repo.ui.configlist("remotefilelog", "includepattern",
|
|
|
|
None)
|
|
|
|
repo.excludepattern = repo.ui.configlist("remotefilelog", "excludepattern",
|
|
|
|
None)
|
2017-11-21 17:52:51 +03:00
|
|
|
if not util.safehasattr(repo, 'connectionpool'):
|
|
|
|
repo.connectionpool = connectionpool.connectionpool(repo)
|
|
|
|
|
2013-09-26 21:46:06 +04:00
|
|
|
if repo.includepattern or repo.excludepattern:
|
|
|
|
repo.shallowmatch = match.match(repo.root, '', None,
|
|
|
|
repo.includepattern, repo.excludepattern)
|