types: remove Key::node()

Summary:
This function is difficult to justify in the context of the Rust borrow checker.
The primary concern for this pattern is preventing mutation when the object is
passed around.

We can always add the function back if it has to more than just return the
underlying value.

Reviewed By: quark-zju

Differential Revision: D14877545

fbshipit-source-id: acdd796e1bee5445c1bce5ce0ceb41a7334e4966
This commit is contained in:
Stefan Filip 2019-04-14 19:51:33 -07:00 committed by Facebook Github Bot
parent 9014310969
commit 78d11002eb
12 changed files with 54 additions and 57 deletions

View File

@ -3,8 +3,9 @@
// This software may be used and distributed according to the terms of the
// GNU General Public License version 2 or any later version.
use cpython::{PyBytes, PyDict, PyErr, PyIterator, PyList, PyResult, PyTuple, Python, PythonObject,
ToPyObject};
use cpython::{
PyBytes, PyDict, PyErr, PyIterator, PyList, PyResult, PyTuple, Python, PythonObject, ToPyObject,
};
use revisionstore::historystore::HistoryStore;
use types::{Key, NodeInfo};
@ -21,12 +22,9 @@ impl<T: HistoryStore> HistoryStorePyExt for T {
fn get_ancestors(&self, py: Python, name: &PyBytes, node: &PyBytes) -> PyResult<PyDict> {
let key = to_key(py, name, node);
let ancestors = self.get_ancestors(&key).map_err(|e| to_pyerr(py, &e))?;
let ancestors = ancestors.iter().map(|(k, v)| {
(
PyBytes::new(py, k.node().as_ref()),
from_node_info(py, k, v),
)
});
let ancestors = ancestors
.iter()
.map(|(k, v)| (PyBytes::new(py, k.node.as_ref()), from_node_info(py, k, v)));
let pyancestors = PyDict::new(py);
for (node, value) in ancestors {
pyancestors.set_item(py, node, value)?;
@ -37,7 +35,8 @@ impl<T: HistoryStore> HistoryStorePyExt for T {
fn get_missing(&self, py: Python, keys: &mut PyIterator) -> PyResult<PyList> {
// Copy the PyObjects into a vector so we can get a reference iterator.
// This lets us get a Vector of Keys without copying the strings.
let keys = keys.map(|k| k.and_then(|k| from_tuple_to_key(py, &k)))
let keys = keys
.map(|k| k.and_then(|k| from_tuple_to_key(py, &k)))
.collect::<Result<Vec<Key>, PyErr>>()?;
let missing = self.get_missing(&keys[..]).map_err(|e| to_pyerr(py, &e))?;
@ -59,13 +58,14 @@ impl<T: HistoryStore> HistoryStorePyExt for T {
fn from_node_info(py: Python, key: &Key, info: &NodeInfo) -> PyTuple {
(
PyBytes::new(py, info.parents[0].node().as_ref()),
PyBytes::new(py, info.parents[1].node().as_ref()),
PyBytes::new(py, info.parents[0].node.as_ref()),
PyBytes::new(py, info.parents[1].node.as_ref()),
PyBytes::new(py, info.linknode.as_ref().as_ref()),
if key.name() != info.parents[0].name() {
PyBytes::new(py, info.parents[0].name()).into_object()
} else {
Python::None(py)
},
).into_py_object(py)
)
.into_py_object(py)
}

View File

@ -36,7 +36,7 @@ impl DataStore for PythonDataStore {
let gil = Python::acquire_gil();
let py = gil.python();
let py_name = PyBytes::new(py, key.name());
let py_node = PyBytes::new(py, key.node().as_ref());
let py_node = PyBytes::new(py, key.node.as_ref());
let py_data = self
.py_store
@ -52,7 +52,7 @@ impl DataStore for PythonDataStore {
let gil = Python::acquire_gil();
let py = gil.python();
let py_name = PyBytes::new(py, key.name());
let py_node = PyBytes::new(py, key.node().as_ref());
let py_node = PyBytes::new(py, key.node.as_ref());
let py_delta = self
.py_store
.call_method(py, "getdelta", (py_name, py_node), None)
@ -68,7 +68,7 @@ impl DataStore for PythonDataStore {
let base_key = to_key(py, &py_delta_name, &py_delta_node);
Ok(Delta {
data: py_bytes.data(py).to_vec().into(),
base: if base_key.node().is_null() {
base: if base_key.node.is_null() {
None
} else {
Some(base_key)
@ -81,7 +81,7 @@ impl DataStore for PythonDataStore {
let gil = Python::acquire_gil();
let py = gil.python();
let py_name = PyBytes::new(py, key.name());
let py_node = PyBytes::new(py, key.node().as_ref());
let py_node = PyBytes::new(py, key.node.as_ref());
let py_chain = self
.py_store
.call_method(py, "getdeltachain", (py_name, py_node), None)
@ -98,7 +98,7 @@ impl DataStore for PythonDataStore {
let gil = Python::acquire_gil();
let py = gil.python();
let py_name = PyBytes::new(py, key.name());
let py_node = PyBytes::new(py, key.node().as_ref());
let py_node = PyBytes::new(py, key.node.as_ref());
let py_meta = self
.py_store
.call_method(py, "getmeta", (py_name, py_node), None)
@ -129,7 +129,8 @@ impl Store for PythonDataStore {
py_missing.insert_item(py, py_missing.len(py), py_key.into_object());
}
let py_missing = self.py_store
let py_missing = self
.py_store
.call_method(py, "getmissing", (py_missing,), None)
.map_err(|e| pyerr_to_error(py, e))?;
let py_list = PyList::extract(py, &py_missing).map_err(|e| pyerr_to_error(py, e))?;

View File

@ -38,7 +38,7 @@ pub fn to_key(py: Python, name: &PyBytes, node: &PyBytes) -> Key {
pub fn from_key(py: Python, key: &Key) -> (PyBytes, PyBytes) {
(
PyBytes::new(py, key.name()),
PyBytes::new(py, key.node().as_ref()),
PyBytes::new(py, key.node.as_ref()),
)
}
@ -54,7 +54,7 @@ pub fn from_tuple_to_delta<'a>(py: Python, py_delta: &PyObject) -> PyResult<Delt
let base_key = to_key(py, &py_delta_name, &py_delta_node);
Ok(Delta {
data: py_bytes.data(py).to_vec().into(),
base: if base_key.node().is_null() {
base: if base_key.node.is_null() {
None
} else {
Some(base_key)

View File

@ -169,7 +169,7 @@ fn get_file(
key: Key,
) -> impl Future<Item = (Key, Bytes), Error = Error> {
log::debug!("Fetching file content for key: {}", &key);
let filenode = key.node().to_hex();
let filenode = key.node.to_hex();
url_prefix
.join(&filenode)
.into_future()
@ -206,7 +206,7 @@ fn get_history(
max_depth: Option<u32>,
) -> impl Stream<Item = HistoryEntry, Error = Error> {
log::debug!("Fetching history for key: {}", &key);
let filenode = key.node().to_hex();
let filenode = key.node.to_hex();
let filename = url_encode(&key.name());
url_prefix
.join(&format!("{}/", &filenode))

View File

@ -311,7 +311,7 @@ impl DataStore for DataPack {
}
fn get_delta(&self, key: &Key) -> Fallible<Delta> {
let entry = self.index.get_entry(key.node())?;
let entry = self.index.get_entry(&key.node)?;
let data_entry = self.read_entry(entry.pack_entry_offset())?;
Ok(Delta {
@ -325,7 +325,7 @@ impl DataStore for DataPack {
fn get_delta_chain(&self, key: &Key) -> Fallible<Vec<Delta>> {
let mut chain: Vec<Delta> = Default::default();
let mut next_entry = self.index.get_entry(key.node())?;
let mut next_entry = self.index.get_entry(&key.node)?;
loop {
let data_entry = self.read_entry(next_entry.pack_entry_offset())?;
chain.push(Delta {
@ -347,7 +347,7 @@ impl DataStore for DataPack {
}
fn get_meta(&self, key: &Key) -> Fallible<Metadata> {
let index_entry = self.index.get_entry(key.node())?;
let index_entry = self.index.get_entry(&key.node)?;
Ok(self.read_entry(index_entry.pack_entry_offset())?.metadata)
}
}
@ -360,7 +360,7 @@ impl Store for DataPack {
fn get_missing(&self, keys: &[Key]) -> Fallible<Vec<Key>> {
Ok(keys
.iter()
.filter(|k| self.index.get_entry(k.node()).is_err())
.filter(|k| self.index.get_entry(&k.node).is_err())
.map(|k| k.clone())
.collect())
}

View File

@ -297,11 +297,11 @@ impl HistoryIndex {
})?;
let mut file_nodes: Vec<(&Key, &NodeLocation)> =
file_nodes.iter().collect::<Vec<(&Key, &NodeLocation)>>();
file_nodes.sort_by_key(|x| x.0.node());
file_nodes.sort_by_key(|x| x.0.node);
for &(key, location) in file_nodes.iter() {
NodeIndexEntry {
node: key.node().clone(),
node: key.node.clone(),
offset: location.offset,
}
.write(writer)?;
@ -330,7 +330,7 @@ impl HistoryIndex {
let end = start + file_entry.node_index_size as usize;
let buf = self.mmap.get_err(start..end)?;
let entry_offset = self.binary_search_nodes(key.node(), &buf)?;
let entry_offset = self.binary_search_nodes(&key.node, &buf)?;
self.read_node_entry((start + entry_offset) - self.index_start)
}
@ -497,7 +497,7 @@ mod tests {
file_sections.push((name_slice, location.clone()));
let mut node_map: HashMap<Key, NodeLocation> = HashMap::new();
for (key, node_location) in nodes.iter() {
let key = Key::new(name_slice.to_vec(), key.node().clone());
let key = Key::new(name_slice.to_vec(), key.node.clone());
node_map.insert(key, node_location.clone());
}
file_nodes.insert(name_slice, node_map);
@ -517,7 +517,7 @@ mod tests {
for (ref key, ref location) in node_map.iter() {
assert_eq!(name.as_ref(), key.name());
let entry = index.get_node_entry(key).unwrap();
assert_eq!(key.node(), &entry.node);
assert_eq!(key.node, entry.node);
assert_eq!(location.offset, entry.offset);
}
}

View File

@ -229,7 +229,7 @@ impl HistoryPack {
fn read_node_info(&self, key: &Key, offset: u64) -> Fallible<NodeInfo> {
let entry = self.read_history_entry(offset)?;
assert_eq!(&entry.node, key.node());
assert_eq!(entry.node, key.node);
let p1 = Key::new(
match entry.copy_from {
Some(value) => value.to_vec(),

View File

@ -39,7 +39,7 @@ impl Entry {
}
pub fn from_log(key: &Key, log: &LogRotate) -> Fallible<Self> {
let mut log_entry = log.lookup(0, key.node().as_ref())?;
let mut log_entry = log.lookup(0, key.node.as_ref())?;
let buf = log_entry.nth(0).ok_or_else(|| format_err!("Not found"))??;
let mut cur = Cursor::new(buf);
@ -73,14 +73,14 @@ impl Entry {
pub fn write_to_log(self, log: &mut LogRotate) -> Fallible<()> {
let mut buf = Vec::new();
buf.write_all(self.delta.key.node().as_ref())?;
buf.write_all(self.delta.key.node.as_ref())?;
buf.write_u16::<BigEndian>(self.delta.key.name().len() as u16)?;
buf.write_all(self.delta.key.name())?;
buf.write_all(
self.delta
.base
.as_ref()
.map_or_else(|| Node::null_id(), |k| k.node())
.map_or_else(|| Node::null_id(), |k| &k.node)
.as_ref(),
)?;
self.metadata.write(&mut buf)?;

View File

@ -177,14 +177,14 @@ mod tests {
);
}
for (key, nodeinfo) in &loose_file.ancestors {
if key.node() == &anode {
assert_eq!(nodeinfo.parents[0].node(), &ap0);
assert_eq!(nodeinfo.parents[1].node(), &ap1);
if key.node == anode {
assert_eq!(nodeinfo.parents[0].node, ap0);
assert_eq!(nodeinfo.parents[1].node, ap1);
assert_eq!(nodeinfo.linknode, alinknode);
} else {
assert_eq!(key.node(), &bnode);
assert_eq!(nodeinfo.parents[0].node(), &bp0);
assert_eq!(nodeinfo.parents[1].node(), &bp1);
assert_eq!(key.node, bnode);
assert_eq!(nodeinfo.parents[0].node, bp0);
assert_eq!(nodeinfo.parents[1].node, bp1);
assert_eq!(nodeinfo.linknode, blinknode);
}
}

View File

@ -87,13 +87,13 @@ impl MutableDataPack {
let mut buf = Vec::with_capacity(delta.key.name().len() + compressed.len() + 72);
buf.write_u16::<BigEndian>(delta.key.name().len() as u16)?;
buf.write_all(delta.key.name())?;
buf.write_all(delta.key.node().as_ref())?;
buf.write_all(delta.key.node.as_ref())?;
buf.write_all(
delta
.base
.as_ref()
.map_or_else(|| Node::null_id(), |k| k.node())
.map_or_else(|| Node::null_id(), |k| &k.node)
.as_ref(),
)?;
buf.write_u64::<BigEndian>(compressed.len() as u64)?;
@ -105,17 +105,17 @@ impl MutableDataPack {
self.hasher.input(&buf);
let delta_location = DeltaLocation {
delta_base: delta.base.as_ref().map_or(None, |k| Some(k.node().clone())),
delta_base: delta.base.as_ref().map_or(None, |k| Some(k.node.clone())),
offset,
size: buf.len() as u64,
};
self.mem_index
.insert(delta.key.node().clone(), delta_location);
.insert(delta.key.node.clone(), delta_location);
Ok(())
}
fn read_entry(&self, key: &Key) -> Fallible<(Delta, Metadata)> {
let location: &DeltaLocation = self.mem_index.get(key.node()).ok_or::<Error>(
let location: &DeltaLocation = self.mem_index.get(&key.node).ok_or::<Error>(
MutableDataPackError(format!("Unable to find key {:?} in mutable datapack", key))
.into(),
)?;
@ -202,7 +202,7 @@ impl Store for MutableDataPack {
fn get_missing(&self, keys: &[Key]) -> Fallible<Vec<Key>> {
Ok(keys
.iter()
.filter(|k| self.mem_index.get(k.node()).is_none())
.filter(|k| self.mem_index.get(&k.node).is_none())
.map(|k| k.clone())
.collect())
}
@ -287,7 +287,7 @@ mod tests {
mutdatapack.add(&delta, &Default::default()).unwrap();
let delta2 = Delta {
data: Bytes::from(&[0, 1, 2][..]),
base: Some(Key::new(Vec::new(), delta.key.node().clone())),
base: Some(Key::new(Vec::new(), delta.key.node.clone())),
key: Key::new(Vec::new(), Node::random(&mut rng)),
};
mutdatapack.add(&delta2, &Default::default()).unwrap();

View File

@ -105,7 +105,7 @@ impl MutableHistoryPack {
// Write nodes
for (key, node_info) in node_map.iter() {
let p1 = &node_info.parents[0];
let copyfrom = if !p1.node().is_null() && p1.name() != key.name() {
let copyfrom = if !p1.node.is_null() && p1.name() != key.name() {
Some(p1.name())
} else {
None
@ -114,9 +114,9 @@ impl MutableHistoryPack {
let node_offset = section_offset + writer.len() as usize;
HistoryEntry::write(
writer,
key.node(),
node_info.parents[0].node(),
node_info.parents[1].node(),
&key.node,
&node_info.parents[0].node,
&node_info.parents[1].node,
&node_info.linknode,
&copyfrom,
)?;

View File

@ -25,7 +25,7 @@ pub struct Key {
// Name is usually a file or directory path
pub(crate) name: Vec<u8>,
// Node is always a 20 byte hash. This will be changed to a fix length array later.
pub(crate) node: Node,
pub node: Node,
}
impl Key {
@ -36,10 +36,6 @@ impl Key {
pub fn name(&self) -> &[u8] {
&self.name
}
pub fn node(&self) -> &Node {
&self.node
}
}
impl fmt::Display for Key {