mirror of
https://github.com/facebook/sapling.git
synced 2024-10-10 00:45:18 +03:00
d404b0a228
Summary: One of the main drawback of the current version of repack is that it writes back the data to a packfile, making it hard to change file format. Currently, 2 file format changes are ongoing: moving away from packfiles entirely, and moving from having LFS pointers stored in the packfiles, to a separate storage. While an ad-hoc solution could be designed for this purpose, repack can fullfill this goal easily by simply writing to the ContentStore, the configuration of the ContentStore will then decide where this data will be written into. The main drawback of this code is the unfortunate added duplication of code. I'm sure there is a way to avoid it by having new traits, I decided against it for now from a code readability point of view. Reviewed By: DurhamG Differential Revision: D20567118 fbshipit-source-id: d67282dae31db93739e50f8cc64f9ecce92d2d30
107 lines
2.9 KiB
Python
107 lines
2.9 KiB
Python
# Copyright (c) Facebook, Inc. and its affiliates.
|
|
#
|
|
# This software may be used and distributed according to the terms of the
|
|
# GNU General Public License version 2.
|
|
|
|
from __future__ import absolute_import
|
|
|
|
import os
|
|
import struct
|
|
|
|
from bindings import revisionstore
|
|
from edenscm.mercurial.i18n import _
|
|
from edenscm.mercurial.node import hex, nullid
|
|
|
|
from . import basepack, constants, shallowutil
|
|
from .lz4wrapper import lz4compress, lz4decompress
|
|
|
|
|
|
NODELENGTH = 20
|
|
|
|
# The indicator value in the index for a fulltext entry.
|
|
FULLTEXTINDEXMARK = -1
|
|
NOBASEINDEXMARK = -2
|
|
|
|
INDEXSUFFIX = ".dataidx"
|
|
PACKSUFFIX = ".datapack"
|
|
|
|
|
|
class datapackstore(basepack.basepackstore):
|
|
INDEXSUFFIX = INDEXSUFFIX
|
|
PACKSUFFIX = PACKSUFFIX
|
|
|
|
def __init__(self, ui, path, shared, deletecorruptpacks=False):
|
|
super(datapackstore, self).__init__(
|
|
ui, path, shared, deletecorruptpacks=deletecorruptpacks
|
|
)
|
|
|
|
def getpack(self, path):
|
|
return revisionstore.datapack(path)
|
|
|
|
def get(self, name, node):
|
|
raise RuntimeError("must use getdeltachain with datapackstore")
|
|
|
|
def getmeta(self, name, node):
|
|
def func(pack):
|
|
return pack.getmeta(name, node)
|
|
|
|
for meta in self.runonpacks(func):
|
|
return meta
|
|
|
|
raise KeyError((name, hex(node)))
|
|
|
|
def getdelta(self, name, node):
|
|
def func(pack):
|
|
return pack.getdelta(name, node)
|
|
|
|
for delta in self.runonpacks(func):
|
|
return delta
|
|
|
|
raise KeyError((name, hex(node)))
|
|
|
|
def getdeltachain(self, name, node):
|
|
def func(pack):
|
|
return pack.getdeltachain(name, node)
|
|
|
|
for deltachain in self.runonpacks(func):
|
|
return deltachain
|
|
|
|
raise KeyError((name, hex(node)))
|
|
|
|
def add(self, name, node, data):
|
|
raise RuntimeError("cannot add to datapackstore")
|
|
|
|
|
|
def makedatapackstore(ui, path, shared, deletecorruptpacks=False):
|
|
if ui.configbool("remotefilelog", "userustpackstore", False):
|
|
return revisionstore.datapackstore(path, deletecorruptpacks)
|
|
else:
|
|
return datapackstore(ui, path, shared, deletecorruptpacks=deletecorruptpacks)
|
|
|
|
|
|
class memdatapack(object):
|
|
def __init__(self):
|
|
self.data = {}
|
|
self.meta = {}
|
|
|
|
def add(self, name, node, deltabase, delta):
|
|
self.data[(name, node)] = (deltabase, delta)
|
|
|
|
def getdelta(self, name, node):
|
|
deltabase, delta = self.data[(name, node)]
|
|
return (delta, name, deltabase, self.getmeta(name, node))
|
|
|
|
def getdeltachain(self, name, node):
|
|
deltabase, delta = self.data[(name, node)]
|
|
return [(name, node, name, deltabase, delta)]
|
|
|
|
def getmeta(self, name, node):
|
|
return self.meta[(name, node)]
|
|
|
|
def getmissing(self, keys):
|
|
missing = []
|
|
for key in keys:
|
|
if key not in self.data:
|
|
missing.append(key)
|
|
return missing
|