sapling/remotefilelog/fileserverclient.py

244 lines
8.0 KiB
Python
Raw Normal View History

# fileserverclient.py - client for communicating with the cache process
#
# Copyright 2013 Facebook, Inc.
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
from mercurial.i18n import _
2013-06-08 02:13:58 +04:00
from mercurial import util, sshpeer
2013-08-20 23:59:33 +04:00
import os, socket, lz4, time, grp
2013-05-07 03:59:05 +04:00
# Statistics for debugging
fetchcost = 0
fetches = 0
fetched = 0
2013-07-02 04:37:55 +04:00
fetchmisses = 0
_downloading = _('downloading')
2013-05-07 03:49:55 +04:00
client = None
2013-08-20 23:59:33 +04:00
def makedirs(root, path, owner):
os.makedirs(path)
while path != root:
stat = os.stat(path)
if stat.st_uid == owner:
os.chmod(path, 0o2775)
path = os.path.dirname(path)
2013-05-18 05:08:53 +04:00
def getcachekey(file, id):
pathhash = util.sha1(file).hexdigest()
return os.path.join(pathhash, id)
class fileserverclient(object):
"""A client for requesting files from the remote file server.
"""
def __init__(self, ui):
2013-05-07 03:51:48 +04:00
self.ui = ui
self.cachepath = ui.config("remotefilelog", "cachepath")
self.cacheprocess = ui.config("remotefilelog", "cacheprocess")
self.debugoutput = ui.configbool("remotefilelog", "debug")
2013-06-08 02:13:58 +04:00
self.pipeo = self.pipei = self.pipee = None
if not os.path.exists(self.cachepath):
2013-08-15 21:59:11 +04:00
oldumask = os.umask(0o002)
try:
os.makedirs(self.cachepath)
2013-08-20 23:59:33 +04:00
groupname = ui.config("remotefilelog", "cachegroup")
if groupname:
gid = grp.getgrnam(groupname).gr_gid
if gid:
os.chown(self.cachepath, os.getuid(), gid)
os.chmod(self.cachepath, 0o2775)
2013-08-15 21:59:11 +04:00
finally:
os.umask(oldumask)
def request(self, repo, fileids):
"""Takes a list of filename/node pairs and fetches them from the
server. Files are stored in the self.cachepath.
A list of nodes that the server couldn't find is returned.
If the connection fails, an exception is raised.
"""
2013-06-08 02:13:58 +04:00
if not self.pipeo:
self.connect()
count = len(fileids)
2013-06-08 02:13:58 +04:00
request = "get\n%d\n" % count
idmap = {}
for file, id in fileids:
2013-05-18 05:08:53 +04:00
pathhash = util.sha1(file).hexdigest()
2013-06-08 02:13:58 +04:00
fullid = "%s/%s" % (pathhash, id)
request += fullid + "\n"
idmap[fullid] = file
2013-06-08 02:13:58 +04:00
self.pipei.write(request)
self.pipei.flush()
missing = []
2013-05-07 03:51:48 +04:00
total = count
self.ui.progress(_downloading, 0, total=count)
fallbackrepo = repo.ui.config("remotefilelog", "fallbackrepo",
repo.ui.config("paths", "default"))
2013-06-08 02:13:58 +04:00
remote = None
missed = []
count = 0
while True:
missingid = self.pipeo.readline()[:-1]
2013-06-08 02:13:58 +04:00
if not missingid:
raise util.Abort(_("error downloading file contents: " +
"connection closed early"))
if missingid == "0":
2013-06-08 02:13:58 +04:00
break
if missingid.startswith("_hits_"):
# receive progress reports
2013-06-08 02:13:58 +04:00
parts = missingid.split("_")
count += int(parts[2])
self.ui.progress(_downloading, count, total=total)
continue
2013-06-08 02:13:58 +04:00
missed.append(missingid)
2013-06-08 02:13:58 +04:00
# fetch from the master
if not remote:
verbose = self.ui.verbose
try:
# When verbose is true, sshpeer prints 'running ssh...'
# to stdout, which can interfere with some command
# outputs
self.ui.verbose = False
remote = sshpeer.sshpeer(self.ui, fallbackrepo)
remote._callstream("getfiles")
finally:
self.ui.verbose = verbose
2013-05-07 03:59:05 +04:00
2013-06-08 02:13:58 +04:00
id = missingid[-40:]
file = idmap[missingid]
sshrequest = "%s%s\n" % (id, file)
remote.pipeo.write(sshrequest)
remote.pipeo.flush()
2013-06-08 02:13:58 +04:00
count = total - len(missed)
self.ui.progress(_downloading, count, total=total)
2013-08-20 23:59:33 +04:00
uid = os.getuid()
oldumask = os.umask(0o002)
try:
# receive cache misses from master
if missed:
2013-07-02 04:37:55 +04:00
global fetchmisses
fetchmisses += len(missed)
# process remote
pipei = remote.pipei
for id in missed:
size = int(pipei.readline()[:-1])
data = pipei.read(size)
count += 1
self.ui.progress(_downloading, count, total=total)
idcachepath = os.path.join(self.cachepath, id)
dirpath = os.path.dirname(idcachepath)
if not os.path.exists(dirpath):
2013-08-20 23:59:33 +04:00
makedirs(self.cachepath, dirpath, uid)
f = open(idcachepath, "w")
try:
f.write(lz4.decompress(data))
finally:
f.close()
2013-08-20 23:59:33 +04:00
stat = os.stat(idcachepath)
if stat.st_uid == uid:
os.chmod(idcachepath, 0o0664)
remote.cleanup()
remote = None
# send to memcache
count = len(missed)
request = "set\n%d\n%s\n" % (count, "\n".join(missed))
self.pipei.write(request)
self.pipei.flush()
self.ui.progress(_downloading, None)
# mark ourselves as a user of this cache
repospath = os.path.join(self.cachepath, "repos")
reposfile = open(repospath, 'a')
reposfile.write(os.path.dirname(repo.path) + "\n")
reposfile.close()
2013-08-20 23:59:33 +04:00
stat = os.stat(repospath)
if stat.st_uid == uid:
os.chmod(repospath, 0o0664)
finally:
os.umask(oldumask)
return missing
def connect(self):
cmd = "%s %s" % (self.cacheprocess, self.cachepath)
self.pipei, self.pipeo, self.pipee, self.subprocess = util.popen4(cmd)
def close(self):
if fetches and self.debugoutput:
self.ui.warn(("%s files fetched over %d fetches - " +
2013-07-25 05:49:14 +04:00
"(%d misses, %0.2f%% hit ratio) over %0.2fs\n") % (
2013-05-07 03:59:05 +04:00
fetched,
fetches,
2013-07-02 04:37:55 +04:00
fetchmisses,
float(fetched - fetchmisses) / float(fetched) * 100.0,
fetchcost))
2013-05-07 03:59:05 +04:00
2013-06-08 02:13:58 +04:00
# if the process is still open, close the pipes
if self.pipeo and self.subprocess.poll() == None:
self.pipei.write("exit\n")
self.pipei.close()
self.pipeo.close()
self.pipee.close()
self.subprocess.wait()
2013-06-08 02:13:58 +04:00
del self.subprocess
self.pipeo = None
self.pipei = None
self.pipee = None
2013-05-07 03:49:55 +04:00
def prefetch(self, repo, fileids):
2013-05-07 03:49:55 +04:00
"""downloads the given file versions to the cache
"""
storepath = repo.sopener.vfs.base
2013-05-07 03:49:55 +04:00
missingids = []
for file, id in fileids:
# hack
# - we don't use .hgtags
# - workingctx produces ids with length 42,
# which we skip since they aren't in any cache
if file == '.hgtags' or len(id) == 42:
2013-05-07 03:49:55 +04:00
continue
2013-05-18 05:08:53 +04:00
key = getcachekey(file, id)
idcachepath = os.path.join(self.cachepath, key)
idlocalpath = os.path.join(storepath, 'data', key)
2013-05-07 03:49:55 +04:00
if os.path.exists(idcachepath) or os.path.exists(idlocalpath):
continue
missingids.append((file, id))
if missingids:
2013-05-07 03:59:05 +04:00
global fetches, fetched, fetchcost
fetches += 1
fetched += len(missingids)
start = time.time()
missingids = self.request(repo, missingids)
2013-05-07 03:49:55 +04:00
if missingids:
raise util.Abort(_("unable to download %d files") % len(missingids))
2013-05-07 03:59:05 +04:00
fetchcost += time.time() - start