hg: make mutablehistorypack implement the history store api

Summary:
This makes the mutable history pack implement the history store read
api so we can add it to the union store and read the contents of things that
have been written but not yet committed.

The mutablehistorypack fileentries variable has been changed to contain a dict
instead of a list so we can access it quickly during reads. The list is from a
legacy requirement where we used to maintain the order that the writer wrote in.
We no longer do that (instead we topologically sort what they've given us), so
switching from a list to a dict should be fine.

Differential Revision: D7083036

fbshipit-source-id: ae511db60ab6432059714a2271c175dc9683b8e1
This commit is contained in:
Durham Goode 2018-02-27 11:10:28 -08:00 committed by Saurabh Singh
parent ac2fc36da3
commit 0cb760c0ea
2 changed files with 66 additions and 7 deletions

View File

@ -431,18 +431,15 @@ class mutablehistorypack(basepack.mutablebasepack):
def add(self, filename, node, p1, p2, linknode, copyfrom):
copyfrom = copyfrom or ''
copyfromlen = struct.pack('!H', len(copyfrom))
self.fileentries.setdefault(filename, []).append((node, p1, p2,
linknode,
copyfromlen,
copyfrom))
entrymap = self.fileentries.setdefault(filename, {})
entrymap[node] = (node, p1, p2, linknode, copyfromlen, copyfrom)
def _write(self):
for filename in sorted(self.fileentries):
entries = self.fileentries[filename]
entrymap = self.fileentries[filename]
sectionstart = self.packfp.tell()
# Write the file section content
entrymap = dict((e[0], e) for e in entries)
def parentfunc(node):
x, p1, p2, x, x, x = entrymap[node]
parents = []
@ -453,7 +450,7 @@ class mutablehistorypack(basepack.mutablebasepack):
return parents
sortednodes = list(reversed(shallowutil.sortnodes(
(e[0] for e in entries),
entrymap.iterkeys(),
parentfunc)))
# Write the file section header
@ -543,3 +540,28 @@ class mutablehistorypack(basepack.mutablebasepack):
nodecountraw = struct.pack('!Q', nodecount)
return (''.join(fileindexentries) + nodecountraw +
''.join(nodeindexentries))
def getancestors(self, name, node, known=None):
entrymap = self.fileentries.get(name)
if entrymap is None:
raise KeyError((name, hex(node)))
entry = entrymap.get(node)
if entry is not None:
enode, p1, p2, linknode, copyfromlen, copyfrom = entry
return {node: (p1, p2, linknode, copyfrom)}
raise KeyError((name, hex(node)))
def getnodeinfo(self, name, node):
return self.getancestors(name, node)[node]
def getmissing(self, keys):
missing = []
fileentries = self.fileentries
for name, node in keys:
entrymap = fileentries.get(name)
if node not in entrymap:
missing.append((name, node))
return missing

View File

@ -269,6 +269,43 @@ class histpacktests(unittest.TestCase):
self.assertEquals(p2, actual[1])
self.assertEquals(linknode, actual[2])
self.assertEquals(copyfrom, actual[3])
def testReadingMutablePack(self):
"""Tests that the data written into a mutablehistorypack can be read out
before it has been finalized."""
packdir = self.makeTempDir()
packer = mutablehistorypack(mercurial.ui.ui(), packdir, version=1)
revisions = []
filename = "foo"
lastnode = nullid
for i in range(5):
node = self.getFakeHash()
revisions.append((filename, node, lastnode, nullid, nullid, ''))
lastnode = node
filename = "bar"
lastnode = nullid
for i in range(5):
node = self.getFakeHash()
revisions.append((filename, node, lastnode, nullid, nullid, ''))
lastnode = node
for filename, node, p1, p2, linknode, copyfrom in revisions:
packer.add(filename, node, p1, p2, linknode, copyfrom)
# Test getancestors()
for filename, node, p1, p2, linknode, copyfrom in revisions:
entry = packer.getancestors(filename, node)
self.assertEquals(entry, {node: (p1, p2, linknode, copyfrom)})
# Test getmissing()
missingcheck = [(revisions[0][0], revisions[0][1]),
('foo', self.getFakeHash())]
missing = packer.getmissing(missingcheck)
self.assertEquals(missing, missingcheck[1:])
# TODO:
# histpack store:
# - repack two packs into one