2017-02-24 01:03:02 +03:00
|
|
|
#!/usr/bin/env python
|
2017-11-16 21:56:44 +03:00
|
|
|
from __future__ import absolute_import
|
|
|
|
|
2016-06-16 01:48:16 +03:00
|
|
|
import hashlib
|
2017-01-13 20:42:25 +03:00
|
|
|
import os
|
2016-04-29 03:37:03 +03:00
|
|
|
import random
|
|
|
|
import shutil
|
2017-01-13 20:42:25 +03:00
|
|
|
import stat
|
2016-04-29 03:37:03 +03:00
|
|
|
import struct
|
2017-01-13 20:42:25 +03:00
|
|
|
import sys
|
2016-04-29 03:37:03 +03:00
|
|
|
import tempfile
|
|
|
|
import unittest
|
|
|
|
|
|
|
|
import silenttestrunner
|
|
|
|
|
2017-01-13 20:42:25 +03:00
|
|
|
# Load the local remotefilelog, not the system one
|
|
|
|
sys.path[0:0] = [os.path.join(os.path.dirname(__file__), '..')]
|
2017-11-16 21:56:44 +03:00
|
|
|
import pythonpath
|
|
|
|
pythonpath.setcstorepath()
|
|
|
|
|
2018-01-10 02:23:52 +03:00
|
|
|
from hgext.remotefilelog.historypack import historypack, mutablehistorypack
|
2016-04-29 03:37:03 +03:00
|
|
|
|
2016-11-25 03:23:21 +03:00
|
|
|
from mercurial.node import nullid
|
2016-09-28 21:53:29 +03:00
|
|
|
import mercurial.ui
|
2016-04-29 03:37:03 +03:00
|
|
|
|
2018-01-10 02:23:52 +03:00
|
|
|
from hgext.remotefilelog.basepack import (
|
2016-05-24 12:15:58 +03:00
|
|
|
SMALLFANOUTCUTOFF,
|
|
|
|
LARGEFANOUTPREFIX,
|
|
|
|
)
|
|
|
|
|
2016-04-29 03:37:03 +03:00
|
|
|
class histpacktests(unittest.TestCase):
|
|
|
|
def setUp(self):
|
|
|
|
self.tempdirs = []
|
|
|
|
|
|
|
|
def tearDown(self):
|
|
|
|
for d in self.tempdirs:
|
|
|
|
shutil.rmtree(d)
|
|
|
|
|
|
|
|
def makeTempDir(self):
|
|
|
|
tempdir = tempfile.mkdtemp()
|
|
|
|
self.tempdirs.append(tempdir)
|
|
|
|
return tempdir
|
|
|
|
|
|
|
|
def getHash(self, content):
|
2016-06-16 01:48:16 +03:00
|
|
|
return hashlib.sha1(content).digest()
|
2016-04-29 03:37:03 +03:00
|
|
|
|
|
|
|
def getFakeHash(self):
|
|
|
|
return ''.join(chr(random.randint(0, 255)) for _ in range(20))
|
|
|
|
|
|
|
|
def createPack(self, revisions=None):
|
|
|
|
"""Creates and returns a historypack containing the specified revisions.
|
|
|
|
|
|
|
|
`revisions` is a list of tuples, where each tuple contains a filanem,
|
|
|
|
node, p1node, p2node, and linknode.
|
|
|
|
"""
|
|
|
|
if revisions is None:
|
|
|
|
revisions = [("filename", self.getFakeHash(), nullid, nullid,
|
2016-05-16 20:59:09 +03:00
|
|
|
self.getFakeHash(), None)]
|
2016-04-29 03:37:03 +03:00
|
|
|
|
|
|
|
packdir = self.makeTempDir()
|
2017-05-03 20:19:46 +03:00
|
|
|
packer = mutablehistorypack(mercurial.ui.ui(), packdir,
|
|
|
|
version=1)
|
2016-04-29 03:37:03 +03:00
|
|
|
|
2016-05-16 20:59:09 +03:00
|
|
|
for filename, node, p1, p2, linknode, copyfrom in revisions:
|
|
|
|
packer.add(filename, node, p1, p2, linknode, copyfrom)
|
2016-04-29 03:37:03 +03:00
|
|
|
|
|
|
|
path = packer.close()
|
|
|
|
return historypack(path)
|
|
|
|
|
|
|
|
def testAddSingle(self):
|
|
|
|
"""Test putting a single entry into a pack and reading it out.
|
|
|
|
"""
|
|
|
|
filename = "foo"
|
|
|
|
node = self.getFakeHash()
|
|
|
|
p1 = self.getFakeHash()
|
|
|
|
p2 = self.getFakeHash()
|
|
|
|
linknode = self.getFakeHash()
|
|
|
|
|
2016-05-16 20:59:09 +03:00
|
|
|
revisions = [(filename, node, p1, p2, linknode, None)]
|
2016-04-29 03:37:03 +03:00
|
|
|
pack = self.createPack(revisions)
|
|
|
|
|
history: remove getparents and getlinknode apis
Summary:
These APIs weren't actually used, and the questions can be answered via
the existing getancestors() api anyway.
They were originally put in place because they are the type of question that
doesn't require the full ancestor tree, so we could answer them without doing in
traversal. In an upcoming patch we add the concept of copyfrom back into the
historypack, and getparents becomes confusing since it doesn't expose knowledge
of copy information. So I just decided to delete it all until we need it.
In the future we may want a 'gethistoryinfo(filename, node)' api that just
returns (p1, p2, linknode, copyfrom), to fulfill that original need of history
information without a full ancestor traversal.
Test Plan: Ran the tests
Reviewers: #mercurial, ttung, mitrandir
Reviewed By: mitrandir
Differential Revision: https://phabricator.intern.facebook.com/D3261734
Signature: t1:3261734:1462413665:987c4703e53468a75346aa323188107a5c070fde
2016-05-16 20:59:09 +03:00
|
|
|
actual = pack.getancestors(filename, node)[node]
|
|
|
|
self.assertEquals(p1, actual[0])
|
|
|
|
self.assertEquals(p2, actual[1])
|
|
|
|
self.assertEquals(linknode, actual[2])
|
2016-04-29 03:37:03 +03:00
|
|
|
|
|
|
|
def testAddMultiple(self):
|
|
|
|
"""Test putting multiple unrelated revisions into a pack and reading
|
|
|
|
them out.
|
|
|
|
"""
|
|
|
|
revisions = []
|
|
|
|
for i in range(10):
|
|
|
|
filename = "foo-%s" % i
|
|
|
|
node = self.getFakeHash()
|
|
|
|
p1 = self.getFakeHash()
|
|
|
|
p2 = self.getFakeHash()
|
|
|
|
linknode = self.getFakeHash()
|
2016-05-16 20:59:09 +03:00
|
|
|
revisions.append((filename, node, p1, p2, linknode, None))
|
2016-04-29 03:37:03 +03:00
|
|
|
|
|
|
|
pack = self.createPack(revisions)
|
|
|
|
|
2016-05-16 20:59:09 +03:00
|
|
|
for filename, node, p1, p2, linknode, copyfrom in revisions:
|
history: remove getparents and getlinknode apis
Summary:
These APIs weren't actually used, and the questions can be answered via
the existing getancestors() api anyway.
They were originally put in place because they are the type of question that
doesn't require the full ancestor tree, so we could answer them without doing in
traversal. In an upcoming patch we add the concept of copyfrom back into the
historypack, and getparents becomes confusing since it doesn't expose knowledge
of copy information. So I just decided to delete it all until we need it.
In the future we may want a 'gethistoryinfo(filename, node)' api that just
returns (p1, p2, linknode, copyfrom), to fulfill that original need of history
information without a full ancestor traversal.
Test Plan: Ran the tests
Reviewers: #mercurial, ttung, mitrandir
Reviewed By: mitrandir
Differential Revision: https://phabricator.intern.facebook.com/D3261734
Signature: t1:3261734:1462413665:987c4703e53468a75346aa323188107a5c070fde
2016-05-16 20:59:09 +03:00
|
|
|
actual = pack.getancestors(filename, node)[node]
|
|
|
|
self.assertEquals(p1, actual[0])
|
|
|
|
self.assertEquals(p2, actual[1])
|
|
|
|
self.assertEquals(linknode, actual[2])
|
2016-05-16 20:59:09 +03:00
|
|
|
self.assertEquals(copyfrom, actual[3])
|
2016-04-29 03:37:03 +03:00
|
|
|
|
|
|
|
def testAddAncestorChain(self):
|
|
|
|
"""Test putting multiple revisions in into a pack and read the ancestor
|
|
|
|
chain.
|
|
|
|
"""
|
|
|
|
revisions = []
|
|
|
|
filename = "foo"
|
|
|
|
lastnode = nullid
|
|
|
|
for i in range(10):
|
|
|
|
node = self.getFakeHash()
|
2016-05-16 20:59:09 +03:00
|
|
|
revisions.append((filename, node, lastnode, nullid, nullid, None))
|
2016-04-29 03:37:03 +03:00
|
|
|
lastnode = node
|
|
|
|
|
|
|
|
# revisions must be added in topological order, newest first
|
|
|
|
revisions = list(reversed(revisions))
|
|
|
|
pack = self.createPack(revisions)
|
|
|
|
|
|
|
|
# Test that the chain has all the entries
|
|
|
|
ancestors = pack.getancestors(revisions[0][0], revisions[0][1])
|
2016-05-16 20:59:09 +03:00
|
|
|
for filename, node, p1, p2, linknode, copyfrom in revisions:
|
|
|
|
ap1, ap2, alinknode, acopyfrom = ancestors[node]
|
2016-04-29 03:37:03 +03:00
|
|
|
self.assertEquals(ap1, p1)
|
|
|
|
self.assertEquals(ap2, p2)
|
|
|
|
self.assertEquals(alinknode, linknode)
|
2016-05-16 20:59:09 +03:00
|
|
|
self.assertEquals(acopyfrom, copyfrom)
|
2016-04-29 03:37:03 +03:00
|
|
|
|
|
|
|
def testPackMany(self):
|
|
|
|
"""Pack many related and unrelated ancestors.
|
|
|
|
"""
|
|
|
|
# Build a random pack file
|
|
|
|
allentries = {}
|
|
|
|
ancestorcounts = {}
|
|
|
|
revisions = []
|
|
|
|
random.seed(0)
|
|
|
|
for i in range(100):
|
|
|
|
filename = "filename-%s" % i
|
|
|
|
entries = []
|
|
|
|
p2 = nullid
|
|
|
|
linknode = nullid
|
|
|
|
for j in range(random.randint(1, 100)):
|
|
|
|
node = self.getFakeHash()
|
|
|
|
p1 = nullid
|
|
|
|
if len(entries) > 0:
|
|
|
|
p1 = entries[random.randint(0, len(entries) - 1)]
|
|
|
|
entries.append(node)
|
2016-05-16 20:59:09 +03:00
|
|
|
revisions.append((filename, node, p1, p2, linknode, None))
|
2016-04-29 03:37:03 +03:00
|
|
|
allentries[(filename, node)] = (p1, p2, linknode)
|
|
|
|
if p1 == nullid:
|
|
|
|
ancestorcounts[(filename, node)] = 1
|
|
|
|
else:
|
|
|
|
newcount = ancestorcounts[(filename, p1)] + 1
|
|
|
|
ancestorcounts[(filename, node)] = newcount
|
|
|
|
|
|
|
|
# Must add file entries in reverse topological order
|
|
|
|
revisions = list(reversed(revisions))
|
|
|
|
pack = self.createPack(revisions)
|
|
|
|
|
|
|
|
# Verify the pack contents
|
|
|
|
for (filename, node), (p1, p2, lastnode) in allentries.iteritems():
|
|
|
|
ancestors = pack.getancestors(filename, node)
|
|
|
|
self.assertEquals(ancestorcounts[(filename, node)],
|
|
|
|
len(ancestors))
|
|
|
|
for anode, (ap1, ap2, alinknode, copyfrom) in ancestors.iteritems():
|
|
|
|
ep1, ep2, elinknode = allentries[(filename, anode)]
|
|
|
|
self.assertEquals(ap1, ep1)
|
|
|
|
self.assertEquals(ap2, ep2)
|
|
|
|
self.assertEquals(alinknode, elinknode)
|
|
|
|
self.assertEquals(copyfrom, None)
|
|
|
|
|
2017-04-27 20:44:34 +03:00
|
|
|
def testGetNodeInfo(self):
|
|
|
|
revisions = []
|
|
|
|
filename = "foo"
|
|
|
|
lastnode = nullid
|
|
|
|
for i in range(10):
|
|
|
|
node = self.getFakeHash()
|
|
|
|
revisions.append((filename, node, lastnode, nullid, nullid, None))
|
|
|
|
lastnode = node
|
|
|
|
|
|
|
|
pack = self.createPack(revisions)
|
|
|
|
|
|
|
|
# Test that getnodeinfo returns the expected results
|
|
|
|
for filename, node, p1, p2, linknode, copyfrom in revisions:
|
|
|
|
ap1, ap2, alinknode, acopyfrom = pack.getnodeinfo(filename, node)
|
|
|
|
self.assertEquals(ap1, p1)
|
|
|
|
self.assertEquals(ap2, p2)
|
|
|
|
self.assertEquals(alinknode, linknode)
|
|
|
|
self.assertEquals(acopyfrom, copyfrom)
|
|
|
|
|
2016-04-29 03:37:03 +03:00
|
|
|
def testGetMissing(self):
|
|
|
|
"""Test the getmissing() api.
|
|
|
|
"""
|
|
|
|
revisions = []
|
|
|
|
filename = "foo"
|
|
|
|
for i in range(10):
|
|
|
|
node = self.getFakeHash()
|
|
|
|
p1 = self.getFakeHash()
|
|
|
|
p2 = self.getFakeHash()
|
|
|
|
linknode = self.getFakeHash()
|
2016-05-16 20:59:09 +03:00
|
|
|
revisions.append((filename, node, p1, p2, linknode, None))
|
2016-04-29 03:37:03 +03:00
|
|
|
|
|
|
|
pack = self.createPack(revisions)
|
|
|
|
|
|
|
|
missing = pack.getmissing([(filename, revisions[0][1])])
|
|
|
|
self.assertFalse(missing)
|
|
|
|
|
|
|
|
missing = pack.getmissing([(filename, revisions[0][1]),
|
|
|
|
(filename, revisions[1][1])])
|
|
|
|
self.assertFalse(missing)
|
|
|
|
|
|
|
|
fakenode = self.getFakeHash()
|
|
|
|
missing = pack.getmissing([(filename, revisions[0][1]),
|
|
|
|
(filename, fakenode)])
|
|
|
|
self.assertEquals(missing, [(filename, fakenode)])
|
|
|
|
|
2016-05-16 20:59:09 +03:00
|
|
|
# Test getmissing on a non-existant filename
|
|
|
|
missing = pack.getmissing([("bar", fakenode)])
|
|
|
|
self.assertEquals(missing, [("bar", fakenode)])
|
|
|
|
|
2016-04-29 03:37:03 +03:00
|
|
|
def testAddThrows(self):
|
|
|
|
pack = self.createPack()
|
|
|
|
|
|
|
|
try:
|
2016-05-16 20:59:09 +03:00
|
|
|
pack.add('filename', nullid, nullid, nullid, nullid, None)
|
2016-04-29 03:37:03 +03:00
|
|
|
self.assertTrue(False, "historypack.add should throw")
|
|
|
|
except RuntimeError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
def testBadVersionThrows(self):
|
|
|
|
pack = self.createPack()
|
|
|
|
path = pack.path + '.histpack'
|
|
|
|
with open(path) as f:
|
|
|
|
raw = f.read()
|
2017-05-03 20:19:46 +03:00
|
|
|
raw = struct.pack('!B', 255) + raw[1:]
|
2017-01-13 20:42:25 +03:00
|
|
|
os.chmod(path, os.stat(path).st_mode | stat.S_IWRITE)
|
2016-04-29 03:37:03 +03:00
|
|
|
with open(path, 'w+') as f:
|
|
|
|
f.write(raw)
|
|
|
|
|
|
|
|
try:
|
|
|
|
pack = historypack(pack.path)
|
|
|
|
self.assertTrue(False, "bad version number should have thrown")
|
|
|
|
except RuntimeError:
|
|
|
|
pass
|
|
|
|
|
2016-05-24 12:15:58 +03:00
|
|
|
def testLargePack(self):
|
|
|
|
"""Test creating and reading from a large pack with over X entries.
|
|
|
|
This causes it to use a 2^16 fanout table instead."""
|
|
|
|
total = SMALLFANOUTCUTOFF + 1
|
|
|
|
revisions = []
|
|
|
|
for i in xrange(total):
|
|
|
|
filename = "foo-%s" % i
|
|
|
|
node = self.getFakeHash()
|
|
|
|
p1 = self.getFakeHash()
|
|
|
|
p2 = self.getFakeHash()
|
|
|
|
linknode = self.getFakeHash()
|
|
|
|
revisions.append((filename, node, p1, p2, linknode, None))
|
|
|
|
|
|
|
|
pack = self.createPack(revisions)
|
|
|
|
self.assertEquals(pack.params.fanoutprefix, LARGEFANOUTPREFIX)
|
|
|
|
|
|
|
|
for filename, node, p1, p2, linknode, copyfrom in revisions:
|
|
|
|
actual = pack.getancestors(filename, node)[node]
|
|
|
|
self.assertEquals(p1, actual[0])
|
|
|
|
self.assertEquals(p2, actual[1])
|
|
|
|
self.assertEquals(linknode, actual[2])
|
|
|
|
self.assertEquals(copyfrom, actual[3])
|
2016-04-29 03:37:03 +03:00
|
|
|
# TODO:
|
|
|
|
# histpack store:
|
|
|
|
# - repack two packs into one
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
silenttestrunner.main(__name__)
|