pyworker: do not try to write LFS pointers to the working copy

Summary:
The ContentStore cannot deserialize LFS pointers stored in packfiles, to avoid
potential damage, let's refuse to update LFS blobs. A proper solution will be
built in a separate diff.

Reviewed By: DurhamG

Differential Revision: D20576575

fbshipit-source-id: 4e4ce6a9432157e2ce69881c0079e943ea3f3acd
This commit is contained in:
Xavier Deguillard 2020-03-25 12:26:10 -07:00 committed by Facebook GitHub Bot
parent 6372a4a4fc
commit 226a30236b

View File

@ -32,7 +32,7 @@ use crossbeam::channel::{bounded, Receiver, Sender};
use cpython_ext::{PyNone, PyPath, ResultPyErrExt};
use fsinfo::{fstype, FsType};
use pyrevisionstore::contentstore;
use revisionstore::ContentStore;
use revisionstore::{ContentStore, HgIdDataStore};
use types::{HgId, Key, RepoPath, RepoPathBuf};
use util::path::remove_file;
@ -406,6 +406,15 @@ fn update(
.get_file_content(&key)?
.ok_or_else(|| format_err!("Can't find key: {}", key))?;
let meta = state
.store
.get_meta(&key)?
.ok_or_else(|| format_err!("Can't find metadata for key: {}", key))?;
if meta.is_lfs() {
bail!("LFS pointers cannot be deserialized properly yet");
}
// Fast path: let's try to open the file directly, we'll handle the failure only if this fails.
match state.working_copy.write(path, &content, flag) {
Ok(size) => Ok(size),