sapling/eden/scm/tests/test-fb-hgext-remotefilelog-histpack.py

299 lines
9.8 KiB
Python
Raw Normal View History

#!/usr/bin/env python
from __future__ import absolute_import
[remotefilelog] use hashlib to compute sha1 hashes Summary: hg-crew's c27dc3c3122 and c27dc3c3122^ were breaking our extensions: ``` $ hg log -r c27dc3c3122^ changeset: 9010734b79911d2d2e7405d91a4df479b35b3841 user: Augie Fackler <raf@durin42.com> date: Thu, 09 Jun 2016 21:12:33 -0700 s.ummary: cleanup: replace uses of util.(md5|sha1|sha256|sha512) with hashlib.\1 ``` ``` $ hg log -r c27dc3c3122 changeset: 0d55a7b8d07bf948c935822e6eea85b044383f00 user: Augie Fackler <raf@durin42.com> date: Thu, 09 Jun 2016 21:13:23 -0700 s.ummary: util: drop local aliases for md5, sha1, sha256, and sha512 ``` I did a grep over facebook-hg-rpms to see what was affected: ``` $ grep "util\.\(md5\|sha1\|sha256\|sha512\)" -r ~/facebook-hg-rpms /home/jeroenv/facebook-hg-rpms/remotefilelog/remotefilelog/basestore.py: sha = util.sha1(filename).digest() /home/jeroenv/facebook-hg-rpms/remotefilelog/remotefilelog/basestore.py: sha = util.sha1(filename).digest() /home/jeroenv/facebook-hg-rpms/remotefilelog/remotefilelog/shallowutil.py: pathhash = util.sha1(file).hexdigest() /home/jeroenv/facebook-hg-rpms/remotefilelog/remotefilelog/shallowutil.py: pathhash = util.sha1(file).hexdigest() /home/jeroenv/facebook-hg-rpms/remotefilelog/remotefilelog/debugcommands.py: filekey = util.sha1(file).hexdigest() /home/jeroenv/facebook-hg-rpms/remotefilelog/remotefilelog/historypack.py: namehash = util.sha1(name).digest() /home/jeroenv/facebook-hg-rpms/remotefilelog/remotefilelog/historypack.py: node = util.sha1(filename).digest() /home/jeroenv/facebook-hg-rpms/remotefilelog/remotefilelog/historypack.py: files = ((util.sha1(filename).digest(), offset, size) /home/jeroenv/facebook-hg-rpms/remotefilelog/remotefilelog/fileserverclient.py: pathhash = util.sha1(file).hexdigest() /home/jeroenv/facebook-hg-rpms/remotefilelog/remotefilelog/fileserverclient.py: pathhash = util.sha1(file).hexdigest() /home/jeroenv/facebook-hg-rpms/remotefilelog/remotefilelog/basepack.py: self.sha = util.sha1() /home/jeroenv/facebook-hg-rpms/remotefilelog/tests/test-datapack.py: return util.sha1(content).digest() /home/jeroenv/facebook-hg-rpms/remotefilelog/tests/test-histpack.py: return util.sha1(content).digest() Binary file /home/jeroenv/facebook-hg-rpms/hg-crew/.hg/store/data/mercurial/revlog.py.i matches /home/jeroenv/facebook-hg-rpms/fb-hgext/sparse.py: return util.sha1(fh.read()).hexdigest() /home/jeroenv/facebook-hg-rpms/fb-hgext/sparse.py: sha1 = util.sha1() /home/jeroenv/facebook-hg-rpms/fb-hgext/sparse.py: sha1 = util.sha1() /home/jeroenv/facebook-hg-rpms/fb-hgext/sparse.py: sha1 = util.sha1() /home/jeroenv/facebook-hg-rpms/fb-hgext/sparse.py: sha1 = util.sha1() /home/jeroenv/facebook-hg-rpms/mutable-history/hgext/simple4server.py: sha = util.sha1() /home/jeroenv/facebook-hg-rpms/mutable-history/hgext/evolve.py: sha = util.sha1() ``` This diff is part of the fix. Test Plan: Ran the tests. ``` $MERCURIALRUNTEST -S -j 48 --with-hg ~/local/facebook-hg-rpms/hg-crew/hg ``` Reviewers: #sourcecontrol, ttung Differential Revision: https://phabricator.intern.facebook.com/D3440041 Tasks: 11762191
2016-06-16 01:48:16 +03:00
import hashlib
import os
import random
import shutil
import stat
import struct
import tempfile
import unittest
import silenttestrunner
from bindings import revisionstore
from edenscm.hgext.remotefilelog.metadatastore import unionmetadatastore
from edenscm.mercurial import error, pycompat, ui as uimod, util
from edenscm.mercurial.node import nullid
from hghave import require
SMALLFANOUTCUTOFF = int(2 ** 16 / 8)
LARGEFANOUTPREFIX = 2
flake8: enable F821 check Summary: This check is useful and detects real errors (ex. fbconduit). Unfortunately `arc lint` will run it with both py2 and py3 so a lot of py2 builtins will still be warned. I didn't find a clean way to disable py3 check. So this diff tries to fix them. For `xrange`, the change was done by a script: ``` import sys import redbaron headertypes = {'comment', 'endl', 'from_import', 'import', 'string', 'assignment', 'atomtrailers'} xrangefix = '''try: xrange(0) except NameError: xrange = range ''' def isxrange(x): try: return x[0].value == 'xrange' except Exception: return False def main(argv): for i, path in enumerate(argv): print('(%d/%d) scanning %s' % (i + 1, len(argv), path)) content = open(path).read() try: red = redbaron.RedBaron(content) except Exception: print(' warning: failed to parse') continue hasxrange = red.find('atomtrailersnode', value=isxrange) hasxrangefix = 'xrange = range' in content if hasxrangefix or not hasxrange: print(' no need to change') continue # find a place to insert the compatibility statement changed = False for node in red: if node.type in headertypes: continue # node.insert_before is an easier API, but it has bugs changing # other "finally" and "except" positions. So do the insert # manually. # # node.insert_before(xrangefix) line = node.absolute_bounding_box.top_left.line - 1 lines = content.splitlines(1) content = ''.join(lines[:line]) + xrangefix + ''.join(lines[line:]) changed = True break if changed: # "content" is faster than "red.dumps()" open(path, 'w').write(content) print(' updated') if __name__ == "__main__": sys.exit(main(sys.argv[1:])) ``` For other py2 builtins that do not have a py3 equivalent, some `# noqa` were added as a workaround for now. Reviewed By: DurhamG Differential Revision: D6934535 fbshipit-source-id: 546b62830af144bc8b46788d2e0fd00496838939
2018-02-10 04:31:44 +03:00
try:
xrange(0)
except NameError:
xrange = range
class histpacktestsbase(object):
def __init__(self, historypackreader, historypackwriter):
self.historypackreader = historypackreader
self.historypackwriter = historypackwriter
def setUp(self):
self.tempdirs = []
def tearDown(self):
for d in self.tempdirs:
shutil.rmtree(d)
def makeTempDir(self):
tempdir = tempfile.mkdtemp()
self.tempdirs.append(tempdir)
return tempdir
def getHash(self, content):
[remotefilelog] use hashlib to compute sha1 hashes Summary: hg-crew's c27dc3c3122 and c27dc3c3122^ were breaking our extensions: ``` $ hg log -r c27dc3c3122^ changeset: 9010734b79911d2d2e7405d91a4df479b35b3841 user: Augie Fackler <raf@durin42.com> date: Thu, 09 Jun 2016 21:12:33 -0700 s.ummary: cleanup: replace uses of util.(md5|sha1|sha256|sha512) with hashlib.\1 ``` ``` $ hg log -r c27dc3c3122 changeset: 0d55a7b8d07bf948c935822e6eea85b044383f00 user: Augie Fackler <raf@durin42.com> date: Thu, 09 Jun 2016 21:13:23 -0700 s.ummary: util: drop local aliases for md5, sha1, sha256, and sha512 ``` I did a grep over facebook-hg-rpms to see what was affected: ``` $ grep "util\.\(md5\|sha1\|sha256\|sha512\)" -r ~/facebook-hg-rpms /home/jeroenv/facebook-hg-rpms/remotefilelog/remotefilelog/basestore.py: sha = util.sha1(filename).digest() /home/jeroenv/facebook-hg-rpms/remotefilelog/remotefilelog/basestore.py: sha = util.sha1(filename).digest() /home/jeroenv/facebook-hg-rpms/remotefilelog/remotefilelog/shallowutil.py: pathhash = util.sha1(file).hexdigest() /home/jeroenv/facebook-hg-rpms/remotefilelog/remotefilelog/shallowutil.py: pathhash = util.sha1(file).hexdigest() /home/jeroenv/facebook-hg-rpms/remotefilelog/remotefilelog/debugcommands.py: filekey = util.sha1(file).hexdigest() /home/jeroenv/facebook-hg-rpms/remotefilelog/remotefilelog/historypack.py: namehash = util.sha1(name).digest() /home/jeroenv/facebook-hg-rpms/remotefilelog/remotefilelog/historypack.py: node = util.sha1(filename).digest() /home/jeroenv/facebook-hg-rpms/remotefilelog/remotefilelog/historypack.py: files = ((util.sha1(filename).digest(), offset, size) /home/jeroenv/facebook-hg-rpms/remotefilelog/remotefilelog/fileserverclient.py: pathhash = util.sha1(file).hexdigest() /home/jeroenv/facebook-hg-rpms/remotefilelog/remotefilelog/fileserverclient.py: pathhash = util.sha1(file).hexdigest() /home/jeroenv/facebook-hg-rpms/remotefilelog/remotefilelog/basepack.py: self.sha = util.sha1() /home/jeroenv/facebook-hg-rpms/remotefilelog/tests/test-datapack.py: return util.sha1(content).digest() /home/jeroenv/facebook-hg-rpms/remotefilelog/tests/test-histpack.py: return util.sha1(content).digest() Binary file /home/jeroenv/facebook-hg-rpms/hg-crew/.hg/store/data/mercurial/revlog.py.i matches /home/jeroenv/facebook-hg-rpms/fb-hgext/sparse.py: return util.sha1(fh.read()).hexdigest() /home/jeroenv/facebook-hg-rpms/fb-hgext/sparse.py: sha1 = util.sha1() /home/jeroenv/facebook-hg-rpms/fb-hgext/sparse.py: sha1 = util.sha1() /home/jeroenv/facebook-hg-rpms/fb-hgext/sparse.py: sha1 = util.sha1() /home/jeroenv/facebook-hg-rpms/fb-hgext/sparse.py: sha1 = util.sha1() /home/jeroenv/facebook-hg-rpms/mutable-history/hgext/simple4server.py: sha = util.sha1() /home/jeroenv/facebook-hg-rpms/mutable-history/hgext/evolve.py: sha = util.sha1() ``` This diff is part of the fix. Test Plan: Ran the tests. ``` $MERCURIALRUNTEST -S -j 48 --with-hg ~/local/facebook-hg-rpms/hg-crew/hg ``` Reviewers: #sourcecontrol, ttung Differential Revision: https://phabricator.intern.facebook.com/D3440041 Tasks: 11762191
2016-06-16 01:48:16 +03:00
return hashlib.sha1(content).digest()
def getFakeHash(self):
return os.urandom(20)
def createPack(self, revisions=None):
"""Creates and returns a historypack containing the specified revisions.
`revisions` is a list of tuples, where each tuple contains a filanem,
node, p1node, p2node, and linknode.
"""
if revisions is None:
revisions = [
(
"filename",
self.getFakeHash(),
nullid,
nullid,
self.getFakeHash(),
None,
)
]
packdir = self.makeTempDir()
packer = self.historypackwriter(packdir)
for filename, node, p1, p2, linknode, copyfrom in revisions:
packer.add(filename, node, p1, p2, linknode, copyfrom)
path = packer.flush()
return self.historypackreader(path)
def testAddSingle(self):
"""Test putting a single entry into a pack and reading it out.
"""
filename = "foo"
node = self.getFakeHash()
p1 = self.getFakeHash()
p2 = self.getFakeHash()
linknode = self.getFakeHash()
revisions = [(filename, node, p1, p2, linknode, None)]
pack = self.createPack(revisions)
actual = pack.getnodeinfo(filename, node)
self.assertEqual(p1, actual[0])
self.assertEqual(p2, actual[1])
self.assertEqual(linknode, actual[2])
def testAddMultiple(self):
"""Test putting multiple unrelated revisions into a pack and reading
them out.
"""
revisions = []
for i in range(10):
filename = "foo-%s" % i
node = self.getFakeHash()
p1 = self.getFakeHash()
p2 = self.getFakeHash()
linknode = self.getFakeHash()
revisions.append((filename, node, p1, p2, linknode, None))
pack = self.createPack(revisions)
for filename, node, p1, p2, linknode, copyfrom in revisions:
actual = pack.getnodeinfo(filename, node)
self.assertEqual(p1, actual[0])
self.assertEqual(p2, actual[1])
self.assertEqual(linknode, actual[2])
self.assertEqual(copyfrom, actual[3])
def testPackMany(self):
"""Pack many related and unrelated ancestors.
"""
# Build a random pack file
allentries = {}
ancestorcounts = {}
revisions = []
random.seed(0)
for i in range(100):
filename = "filename-%s" % i
entries = []
p2 = nullid
linknode = nullid
for j in range(random.randint(1, 100)):
node = self.getFakeHash()
p1 = nullid
if len(entries) > 0:
p1 = entries[random.randint(0, len(entries) - 1)]
entries.append(node)
revisions.append((filename, node, p1, p2, linknode, None))
allentries[(filename, node)] = (p1, p2, linknode)
if p1 == nullid:
ancestorcounts[(filename, node)] = 1
else:
newcount = ancestorcounts[(filename, p1)] + 1
ancestorcounts[(filename, node)] = newcount
# Must add file entries in reverse topological order
revisions = list(reversed(revisions))
pack = self.createPack(revisions)
store = unionmetadatastore(pack)
# Verify the pack contents
for (filename, node), (p1, p2, lastnode) in pycompat.iteritems(allentries):
ap1, ap2, alinknode, acopyfrom = store.getnodeinfo(filename, node)
ep1, ep2, elinknode = allentries[(filename, node)]
self.assertEqual(ap1, ep1)
self.assertEqual(ap2, ep2)
self.assertEqual(alinknode, elinknode)
self.assertEqual(acopyfrom, None)
def testGetNodeInfo(self):
revisions = []
filename = "foo"
lastnode = nullid
for i in range(10):
node = self.getFakeHash()
revisions.append((filename, node, lastnode, nullid, nullid, None))
lastnode = node
pack = self.createPack(revisions)
# Test that getnodeinfo returns the expected results
for filename, node, p1, p2, linknode, copyfrom in revisions:
ap1, ap2, alinknode, acopyfrom = pack.getnodeinfo(filename, node)
self.assertEqual(ap1, p1)
self.assertEqual(ap2, p2)
self.assertEqual(alinknode, linknode)
self.assertEqual(acopyfrom, copyfrom)
def testGetMissing(self):
"""Test the getmissing() api.
"""
revisions = []
filename = "foo"
for i in range(10):
node = self.getFakeHash()
p1 = self.getFakeHash()
p2 = self.getFakeHash()
linknode = self.getFakeHash()
revisions.append((filename, node, p1, p2, linknode, None))
pack = self.createPack(revisions)
missing = pack.getmissing([(filename, revisions[0][1])])
self.assertFalse(missing)
missing = pack.getmissing(
[(filename, revisions[0][1]), (filename, revisions[1][1])]
)
self.assertFalse(missing)
fakenode = self.getFakeHash()
missing = pack.getmissing([(filename, revisions[0][1]), (filename, fakenode)])
self.assertEqual(missing, [(filename, fakenode)])
# Test getmissing on a non-existant filename
missing = pack.getmissing([("bar", fakenode)])
self.assertEqual(missing, [("bar", fakenode)])
def testBadVersionThrows(self):
pack = self.createPack()
path = pack.path() + ".histpack"
with open(path, "rb") as f:
raw = f.read()
raw = struct.pack("!B", 255) + raw[1:]
os.chmod(path, os.stat(path).st_mode | stat.S_IWRITE)
with open(path, "wb+") as f:
f.write(raw)
try:
pack = self.historypackreader(pack.path())
self.assertTrue(False, "bad version number should have thrown")
except error.RustError:
pass
def testLargePack(self):
"""Test creating and reading from a large pack with over X entries.
This causes it to use a 2^16 fanout table instead."""
total = SMALLFANOUTCUTOFF + 1
revisions = []
for i in xrange(total):
filename = "foo-%s" % i
node = self.getFakeHash()
p1 = self.getFakeHash()
p2 = self.getFakeHash()
linknode = self.getFakeHash()
revisions.append((filename, node, p1, p2, linknode, None))
pack = self.createPack(revisions)
if util.safehasattr(pack, "params"):
self.assertEqual(pack.params.fanoutprefix, LARGEFANOUTPREFIX)
for filename, node, p1, p2, linknode, copyfrom in revisions:
actual = pack.getnodeinfo(filename, node)
self.assertEqual(p1, actual[0])
self.assertEqual(p2, actual[1])
self.assertEqual(linknode, actual[2])
self.assertEqual(copyfrom, actual[3])
def testReadingMutablePack(self):
"""Tests that the data written into a mutablehistorypack can be read out
before it has been finalized."""
packdir = self.makeTempDir()
packer = self.historypackwriter(packdir)
revisions = []
filename = "foo"
lastnode = nullid
for i in range(5):
node = self.getFakeHash()
revisions.append((filename, node, lastnode, nullid, nullid, None))
lastnode = node
filename = "bar"
lastnode = nullid
for i in range(5):
node = self.getFakeHash()
revisions.append((filename, node, lastnode, nullid, nullid, None))
lastnode = node
for filename, node, p1, p2, linknode, copyfrom in revisions:
packer.add(filename, node, p1, p2, linknode, copyfrom)
# Test getnodeinfo()
for filename, node, p1, p2, linknode, copyfrom in revisions:
entry = packer.getnodeinfo(filename, node)
self.assertEqual(entry, (p1, p2, linknode, copyfrom))
# Test getmissing()
missingcheck = [(revisions[0][0], revisions[0][1]), ("foo", self.getFakeHash())]
missing = packer.getmissing(missingcheck)
self.assertEqual(missing, missingcheck[1:])
class rusthistpacktests(histpacktestsbase, unittest.TestCase):
def __init__(self, *args, **kwargs):
histpacktestsbase.__init__(
self, revisionstore.historypack, revisionstore.mutablehistorystore
)
unittest.TestCase.__init__(self, *args, **kwargs)
# TODO:
# histpack store:
# - repack two packs into one
if __name__ == "__main__":
silenttestrunner.main(__name__)