2018-03-14 19:03:01 +03:00
|
|
|
# Copyright 2018 Facebook, Inc.
|
|
|
|
#
|
|
|
|
# This software may be used and distributed according to the terms of the
|
|
|
|
# GNU General Public License version 2 or any later version.
|
|
|
|
|
|
|
|
from __future__ import absolute_import
|
2018-04-05 00:42:14 +03:00
|
|
|
|
|
|
|
# Standard Library
|
2018-04-05 02:42:31 +03:00
|
|
|
import hashlib
|
2018-03-14 19:03:01 +03:00
|
|
|
import json
|
|
|
|
|
2018-04-05 02:42:31 +03:00
|
|
|
from mercurial.i18n import _
|
|
|
|
|
2018-05-29 21:13:08 +03:00
|
|
|
from . import commitcloudcommon, commitcloudutil
|
|
|
|
|
2018-03-14 19:03:01 +03:00
|
|
|
|
|
|
|
class SyncState(object):
|
|
|
|
"""
|
|
|
|
Stores the local record of what state was stored in the cloud at the
|
|
|
|
last sync.
|
|
|
|
"""
|
2018-05-29 21:13:08 +03:00
|
|
|
|
2018-06-20 22:37:03 +03:00
|
|
|
prefix = "commitcloudstate."
|
|
|
|
|
|
|
|
@classmethod
|
2019-01-21 14:30:52 +03:00
|
|
|
def _filename(cls, workspacename):
|
2018-04-05 03:04:30 +03:00
|
|
|
# make a unique valid filename
|
2018-05-29 21:13:08 +03:00
|
|
|
return (
|
2018-06-20 22:37:03 +03:00
|
|
|
cls.prefix
|
2019-01-21 14:30:52 +03:00
|
|
|
+ "".join(x for x in workspacename if x.isalnum())
|
|
|
|
+ ".%s" % (hashlib.sha256(workspacename).hexdigest()[0:5])
|
2018-05-29 21:13:08 +03:00
|
|
|
)
|
2018-03-14 19:03:01 +03:00
|
|
|
|
2018-06-20 22:37:03 +03:00
|
|
|
@classmethod
|
2019-01-21 14:30:52 +03:00
|
|
|
def erasestate(cls, repo, workspacename):
|
|
|
|
filename = cls._filename(workspacename)
|
2018-04-05 03:04:30 +03:00
|
|
|
# clean up the current state in force recover mode
|
2018-06-20 22:37:03 +03:00
|
|
|
repo.svfs.tryunlink(filename)
|
2018-04-05 02:42:31 +03:00
|
|
|
|
2019-01-21 14:30:52 +03:00
|
|
|
def __init__(self, repo, workspacename):
|
|
|
|
self.filename = self._filename(workspacename)
|
2018-04-05 03:04:30 +03:00
|
|
|
self.repo = repo
|
|
|
|
if repo.svfs.exists(self.filename):
|
2018-05-29 21:13:08 +03:00
|
|
|
with repo.svfs.open(self.filename, "r") as f:
|
2018-04-05 03:04:30 +03:00
|
|
|
try:
|
|
|
|
data = json.load(f)
|
|
|
|
except Exception:
|
|
|
|
raise commitcloudcommon.InvalidWorkspaceDataError(
|
2018-05-29 21:13:08 +03:00
|
|
|
repo.ui, _("failed to parse %s") % self.filename
|
|
|
|
)
|
2018-04-05 02:42:31 +03:00
|
|
|
|
2018-05-29 21:13:08 +03:00
|
|
|
self.version = data["version"]
|
|
|
|
self.heads = [h.encode() for h in data["heads"]]
|
|
|
|
self.bookmarks = {
|
|
|
|
n.encode("utf-8"): v.encode() for n, v in data["bookmarks"].items()
|
|
|
|
}
|
2018-11-15 20:48:14 +03:00
|
|
|
self.omittedheads = [h.encode() for h in data.get("omittedheads", ())]
|
|
|
|
self.omittedbookmarks = [
|
|
|
|
n.encode("utf-8") for n in data.get("omittedbookmarks", ())
|
|
|
|
]
|
2018-11-27 18:18:29 +03:00
|
|
|
self.maxage = data.get("maxage", None)
|
2018-03-14 19:03:01 +03:00
|
|
|
else:
|
|
|
|
self.version = 0
|
|
|
|
self.heads = []
|
|
|
|
self.bookmarks = {}
|
2018-11-15 20:48:14 +03:00
|
|
|
self.omittedheads = []
|
|
|
|
self.omittedbookmarks = []
|
2018-11-27 18:18:29 +03:00
|
|
|
self.maxage = None
|
2018-03-14 19:03:01 +03:00
|
|
|
|
2018-11-15 20:48:14 +03:00
|
|
|
def update(
|
2018-11-27 18:18:29 +03:00
|
|
|
self,
|
|
|
|
newversion,
|
|
|
|
newheads,
|
|
|
|
newbookmarks,
|
|
|
|
newomittedheads,
|
|
|
|
newomittedbookmarks,
|
|
|
|
newmaxage,
|
2018-11-15 20:48:14 +03:00
|
|
|
):
|
|
|
|
data = {
|
|
|
|
"version": newversion,
|
|
|
|
"heads": newheads,
|
|
|
|
"bookmarks": newbookmarks,
|
|
|
|
"omittedheads": newomittedheads,
|
|
|
|
"omittedbookmarks": newomittedbookmarks,
|
2018-11-27 18:18:29 +03:00
|
|
|
"maxage": newmaxage,
|
2018-11-15 20:48:14 +03:00
|
|
|
}
|
2018-06-20 22:37:03 +03:00
|
|
|
with self.repo.svfs.open(self.filename, "w", atomictemp=True) as f:
|
|
|
|
json.dump(data, f)
|
2018-03-14 19:03:01 +03:00
|
|
|
self.version = newversion
|
|
|
|
self.heads = newheads
|
|
|
|
self.bookmarks = newbookmarks
|
2018-11-15 20:48:14 +03:00
|
|
|
self.omittedheads = newomittedheads
|
|
|
|
self.omittedbookmarks = newomittedbookmarks
|
2018-11-27 18:18:29 +03:00
|
|
|
self.maxage = newmaxage
|