mirror of
https://github.com/enso-org/enso.git
synced 2024-11-22 03:32:23 +03:00
Extract common libraries to rust-lib (#1191)
This commit is contained in:
parent
3d65ffd3cd
commit
f00b187438
24
Cargo.toml
24
Cargo.toml
@ -2,24 +2,30 @@
|
||||
|
||||
members = [
|
||||
"lib/rust/ast",
|
||||
"lib/rust/enso-data",
|
||||
"lib/rust/enso-generics",
|
||||
"lib/rust/enso-logger",
|
||||
"lib/rust/enso-macro-utils",
|
||||
"lib/rust/enso-optics",
|
||||
"lib/rust/enso-prelude",
|
||||
"lib/rust/enso-shapely/impl",
|
||||
"lib/rust/enso-shapely/macros",
|
||||
"lib/rust/flexer",
|
||||
"lib/rust/flexer-testing/definition",
|
||||
"lib/rust/flexer-testing/generation",
|
||||
"lib/rust/launcher-shims",
|
||||
"lib/rust/lazy-reader",
|
||||
"lib/rust/lexer/definition",
|
||||
"lib/rust/lexer/generation",
|
||||
"lib/rust/parser",
|
||||
]
|
||||
|
||||
# These patch versions exist to allow local development of these libraries alongside Enso. It
|
||||
# assumes you have `rust-lib` in the same directory as `enso`. See:
|
||||
# https://github.com/enso-org/rust-lib/blob/main/docs/CONTRIBUTING.md#developing-in-conjunction-with-enso--ide
|
||||
[patch.crates-io]
|
||||
# enso-automata = { path = '../rust-lib/src/automata' }
|
||||
# enso-data = { path = '../rust-lib/src/data' }
|
||||
# enso-generics = { path = '../rust-lib/src/generics' }
|
||||
# enso-lazy-reader = { path = '../rust-lib/src/lazy-reader' }
|
||||
# enso-logger = { path = '../rust-lib/src/logger' }
|
||||
# enso-macro-utils = { path = '../rust-lib/src/macro-utils' }
|
||||
# enso-optics = { path = '../rust-lib/src/optics' }
|
||||
# enso-prelude = { path = '../rust-lib/src/prelude' }
|
||||
# enso-shapely = { path = '../rust-lib/src/shapely/impl' }
|
||||
# enso-shapely-macros = { path = '../rust-lib/src/shapely/macros' }
|
||||
|
||||
[profile.dev]
|
||||
opt-level = 0
|
||||
lto = false
|
||||
|
@ -1,19 +0,0 @@
|
||||
[package]
|
||||
name = "enso-data"
|
||||
version = "0.1.0"
|
||||
authors = ["Enso Team <enso-dev@enso.org>"]
|
||||
edition = "2018"
|
||||
|
||||
description = "Useful data-types."
|
||||
readme = "README.md"
|
||||
homepage = "https://github.com/enso-org/enso/lib/rust/enso-data"
|
||||
repository = "https://github.com/enso-org/enso"
|
||||
license-file = "../../../LICENSE"
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib", "rlib"]
|
||||
|
||||
[dependencies]
|
||||
enso-prelude = { version = "0.1.0" , path = "../enso-prelude" }
|
||||
|
||||
serde = { version = "1.0" , features = ["derive"] }
|
@ -1,3 +0,0 @@
|
||||
# Enso Data
|
||||
|
||||
Useful data types.
|
@ -1,331 +0,0 @@
|
||||
//! A tree structure build on top of the `HashMap`.
|
||||
|
||||
use crate::prelude::*;
|
||||
|
||||
use std::collections::hash_map::RandomState;
|
||||
use std::hash::BuildHasher;
|
||||
|
||||
|
||||
|
||||
// ===================
|
||||
// === HashMapTree ===
|
||||
// ===================
|
||||
|
||||
/// A tree build on top of the `HashMap`. Each node in the tree can have zero or more branches
|
||||
/// accessible by the given key type.
|
||||
#[derive(Derivative)]
|
||||
#[derivative(Debug (bound="K:Eq+Hash+Debug , V:Debug , S:BuildHasher"))]
|
||||
#[derivative(Default (bound="K:Eq+Hash , V:Default , S:BuildHasher+Default"))]
|
||||
#[derivative(Clone (bound="K:Clone , V:Clone , S:Clone"))]
|
||||
pub struct HashMapTree<K,V,S=RandomState> {
|
||||
/// Value of the current tree node.
|
||||
pub value : V,
|
||||
/// Branches of the current tree node.
|
||||
pub branches : HashMap<K,HashMapTree<K,V,S>,S>
|
||||
}
|
||||
|
||||
impl<K,T,S> HashMapTree<K,T,S>
|
||||
where K : Eq+Hash,
|
||||
S : BuildHasher+Default {
|
||||
/// Constructor.
|
||||
pub fn new() -> Self where T:Default {
|
||||
default()
|
||||
}
|
||||
|
||||
/// Constructor with explicit root value.
|
||||
pub fn from_value(value:T) -> Self {
|
||||
let branches = default();
|
||||
Self {value,branches}
|
||||
}
|
||||
|
||||
/// Sets the value at position described by `path`. In case a required sub-branch does not
|
||||
/// exist, a default instance will be created.
|
||||
#[inline]
|
||||
pub fn set<P,I>(&mut self, path:P, value:T)
|
||||
where P:IntoIterator<Item=I>, T:Default, I:Into<K> {
|
||||
self.get_or_create_node(path).value = value;
|
||||
}
|
||||
|
||||
/// Sets the value at position described by `path`. In case a required sub-branch does not
|
||||
/// exist, uses `cons_missing` to create it.
|
||||
#[inline]
|
||||
pub fn set_with<P,I,F>(&mut self, path:P, value:T, cons_missing:F)
|
||||
where P:IntoIterator<Item=I>, T:Default, I:Into<K>, F:FnMut()->T {
|
||||
self.get_or_create_node_with(path,cons_missing).value = value;
|
||||
}
|
||||
|
||||
/// Gets a reference to a value at the specified path if the path exists in the tree.
|
||||
#[inline]
|
||||
pub fn get<P,I>(&self, segments:P) -> Option<&T>
|
||||
where P:IntoIterator<Item=I>, I:Into<K> {
|
||||
self.get_node(segments).map(|node| &node.value)
|
||||
}
|
||||
|
||||
/// Gets a mutable reference to a value at the specified path if the path exists in the tree.
|
||||
#[inline]
|
||||
pub fn get_mut<P,I>(&mut self, segments:P) -> Option<&mut T>
|
||||
where P:IntoIterator<Item=I>, I:Into<K> {
|
||||
self.get_node_mut(segments).map(|node| &mut node.value)
|
||||
}
|
||||
|
||||
/// Gets a reference to a node at the specified path if the node exists.
|
||||
#[inline]
|
||||
pub fn get_node<P,I>(&self, segments:P) -> Option<&HashMapTree<K,T,S>>
|
||||
where P:IntoIterator<Item=I>, I:Into<K> {
|
||||
segments.into_iter().fold(Some(self),|map,t| {
|
||||
map.and_then(|m| {
|
||||
let key = t.into();
|
||||
m.branches.get(&key)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
/// Gets a mutable reference to a node at the specified path if the node exists.
|
||||
#[inline]
|
||||
pub fn get_node_mut<P,I>(&mut self, segments:P) -> Option<&mut HashMapTree<K,T,S>>
|
||||
where P:IntoIterator<Item=I>, I:Into<K> {
|
||||
segments.into_iter().fold(Some(self),|map,t| {
|
||||
map.and_then(|m| {
|
||||
let key = t.into();
|
||||
m.branches.get_mut(&key)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
/// Removes the node at the specified path.
|
||||
#[inline]
|
||||
pub fn remove<P,I>(&mut self, segments:P) -> Option<T>
|
||||
where P:IntoIterator<Item=I>, I:Into<K> {
|
||||
let mut segments = segments.into_iter().map(|t|t.into()).collect_vec();
|
||||
segments.pop().and_then(|last| {
|
||||
self.get_node_mut(segments).and_then(|node| {
|
||||
node.branches.remove(&last).map(|branch| branch.value)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
/// Iterates over keys in `path`. For each key, traverses into the appropriate branch. In case
|
||||
/// the branch does not exist, a default instance will be created. Returns mutable reference to
|
||||
/// the target tree node.
|
||||
#[inline]
|
||||
pub fn get_or_create_node<P,I>(&mut self, path:P) -> &mut HashMapTree<K,T,S>
|
||||
where P:IntoIterator<Item=I>, T:Default, I:Into<K> {
|
||||
self.get_or_create_node_with(path,default)
|
||||
}
|
||||
|
||||
/// Iterates over keys in `path`. For each key, traverses into the appropriate branch. In case
|
||||
/// the branch does not exist, uses `cons_missing` to construct it. Returns mutable reference to
|
||||
/// the target tree node.
|
||||
#[inline]
|
||||
pub fn get_or_create_node_with<P,I,F>
|
||||
(&mut self, path:P, cons_missing:F) -> &mut HashMapTree<K,T,S>
|
||||
where P:IntoIterator<Item=I>, I:Into<K>, F:FnMut()->T {
|
||||
self.get_or_create_node_traversing_with(path,cons_missing,|_|{})
|
||||
}
|
||||
|
||||
/// Iterates over keys in `path`. For each key, traverses into the appropriate branch. In case
|
||||
/// the branch does not exist, uses `cons_missing` provided with the current path to construct
|
||||
/// it. Returns mutable reference to the target tree node.
|
||||
#[inline]
|
||||
pub fn get_or_create_node_path_with<P,I,F>
|
||||
(&mut self, path:P, cons_missing:F) -> &mut HashMapTree<K,T,S>
|
||||
where K:Clone, P:IntoIterator<Item=I>, I:Into<K>, F:FnMut(&[K])->T {
|
||||
self.get_or_create_node_traversing_path_with(path,cons_missing,|_|{})
|
||||
}
|
||||
|
||||
/// Iterates over keys in `path`. For each key, traverses into the appropriate branch. In case
|
||||
/// the branch does not exist, uses `cons_missing` to construct it. Moreover, for each traversed
|
||||
/// branch the `callback` is evaluated. Returns mutable reference to the target tree node.
|
||||
#[inline]
|
||||
pub fn get_or_create_node_traversing_with<P,I,F,M>
|
||||
(&mut self, segments:P, mut cons_missing:F, mut callback:M) -> &mut HashMapTree<K,T,S>
|
||||
where P:IntoIterator<Item=I>, I:Into<K>, F:FnMut()->T, M:FnMut(&mut HashMapTree<K,T,S>) {
|
||||
segments.into_iter().fold(self,|map,t| {
|
||||
let key = t.into();
|
||||
let entry = map.branches.entry(key);
|
||||
let node = entry.or_insert_with(|| HashMapTree::from_value(cons_missing()));
|
||||
callback(node);
|
||||
node
|
||||
})
|
||||
}
|
||||
|
||||
/// Iterates over keys in `path`. For each key, traverses into the appropriate branch. In case
|
||||
/// the branch does not exist, uses `cons_missing` provided with the current path to construct
|
||||
/// it. Moreover, for each traversed branch the `callback` is evaluated. Returns mutable
|
||||
/// reference to the target tree node.
|
||||
#[inline]
|
||||
pub fn get_or_create_node_traversing_path_with<P,I,F,M>
|
||||
(&mut self, segments:P, mut cons_missing:F, mut callback:M) -> &mut HashMapTree<K,T,S>
|
||||
where K : Clone,
|
||||
P : IntoIterator<Item=I>,
|
||||
I : Into<K>,
|
||||
F : FnMut(&[K])->T,
|
||||
M : FnMut(&mut HashMapTree<K,T,S>) {
|
||||
let mut path = Vec::new();
|
||||
segments.into_iter().fold(self,|map,t| {
|
||||
let key = t.into();
|
||||
path.push(key.clone());
|
||||
let entry = map.branches.entry(key);
|
||||
let node = entry.or_insert_with(|| HashMapTree::from_value(cons_missing(&path)));
|
||||
callback(node);
|
||||
node
|
||||
})
|
||||
}
|
||||
|
||||
/// Zips two trees together into a new tree with cloned values.
|
||||
#[inline]
|
||||
pub fn zip_clone<T2>
|
||||
(&self, other:&HashMapTree<K,T2,S>) -> HashMapTree<K,AtLeastOneOfTwo<T,T2>,S>
|
||||
where K:Clone, T:Clone, T2:Clone {
|
||||
Self::zip_clone_branches(Some(self),Some(other))
|
||||
}
|
||||
|
||||
fn zip_clone_branches<T2>
|
||||
(tree1:Option<&HashMapTree<K,T,S>>, tree2:Option<&HashMapTree<K,T2,S>>)
|
||||
-> HashMapTree<K,AtLeastOneOfTwo<T,T2>,S>
|
||||
where K:Clone, T:Clone, T2:Clone {
|
||||
match (tree1,tree2) {
|
||||
(Some(tree1),Some(tree2)) => {
|
||||
let value = AtLeastOneOfTwo::Both(tree1.value.clone(),tree2.value.clone());
|
||||
let mut keys = tree1.branches.keys().cloned().collect::<HashSet<K>>();
|
||||
keys.extend(tree2.branches.keys().cloned());
|
||||
let branches = keys.into_iter().map(|key| {
|
||||
let branch1 = tree1.branches.get(&key);
|
||||
let branch2 = tree2.branches.get(&key);
|
||||
(key,Self::zip_clone_branches(branch1,branch2))
|
||||
}).collect();
|
||||
HashMapTree {value,branches}
|
||||
}
|
||||
|
||||
(Some(tree),None) => {
|
||||
let value = AtLeastOneOfTwo::First(tree.value.clone());
|
||||
let mut keys = tree.branches.keys().cloned().collect::<HashSet<K>>();
|
||||
keys.extend(tree.branches.keys().cloned());
|
||||
let branches = tree.branches.iter().map(|(key,branch)| {
|
||||
(key.clone(),Self::zip_clone_branches(Some(branch),None))
|
||||
}).collect();
|
||||
HashMapTree {value,branches}
|
||||
}
|
||||
|
||||
(None,Some(tree)) => {
|
||||
let value = AtLeastOneOfTwo::Second(tree.value.clone());
|
||||
let mut keys = tree.branches.keys().cloned().collect::<HashSet<K>>();
|
||||
keys.extend(tree.branches.keys().cloned());
|
||||
let branches = tree.branches.iter().map(|(key,branch)| {
|
||||
(key.clone(),Self::zip_clone_branches(None,Some(branch)))
|
||||
}).collect();
|
||||
HashMapTree {value,branches}
|
||||
}
|
||||
_ => panic!("Impossible")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<K,T,S> HashMapTree<K,Option<T>,S>
|
||||
where K:Eq+Hash {
|
||||
/// Gets the current value or creates new default one if missing.
|
||||
pub fn value_or_default(&mut self) -> &mut T where T:Default {
|
||||
self.value_or_set_with(default)
|
||||
}
|
||||
|
||||
/// Gets the current value or creates new one if missing.
|
||||
pub fn value_or_set(&mut self, val:T) -> &mut T {
|
||||
self.value_or_set_with(move || val)
|
||||
}
|
||||
|
||||
/// Gets the current value or creates new one if missing.
|
||||
pub fn value_or_set_with<F>(&mut self, cons:F) -> &mut T
|
||||
where F:FnOnce()->T {
|
||||
if self.value.is_none() {
|
||||
self.value = Some(cons());
|
||||
};
|
||||
self.value.as_mut().unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// === Impls ===
|
||||
|
||||
impl<K,V,S> PartialSemigroup<HashMapTree<K,V,S>> for HashMapTree<K,V,S>
|
||||
where K : Eq + Hash + Clone,
|
||||
V : Semigroup,
|
||||
S : BuildHasher + Clone {
|
||||
fn concat_mut(&mut self, other:Self) {
|
||||
self.value.concat_mut(&other.value);
|
||||
PartialSemigroup::concat_mut(&mut self.branches, other.branches);
|
||||
}
|
||||
}
|
||||
|
||||
impl<K,V,S> PartialSemigroup<&HashMapTree<K,V,S>> for HashMapTree<K,V,S>
|
||||
where K : Eq + Hash + Clone,
|
||||
V : Semigroup,
|
||||
S : BuildHasher + Clone {
|
||||
fn concat_mut(&mut self, other:&Self) {
|
||||
self.value.concat_mut(&other.value);
|
||||
PartialSemigroup::concat_mut(&mut self.branches, &other.branches);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// === Iterators ===
|
||||
|
||||
macro_rules! define_borrow_iterator {
|
||||
($tp_name:ident $fn_name:ident $($mut:tt)?) => {
|
||||
/// Iterator.
|
||||
pub struct $tp_name<'a,K,V,S> {
|
||||
iters : Vec<std::collections::hash_map::$tp_name<'a,K,HashMapTree<K,V,S>>>,
|
||||
path : Vec<&'a K>,
|
||||
}
|
||||
|
||||
impl<'a,K,V,S> Iterator for $tp_name<'a,K,V,S> {
|
||||
type Item = (Vec<&'a K>, &'a $($mut)? V);
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
loop {
|
||||
match self.iters.pop() {
|
||||
None => break None,
|
||||
Some(mut iter) => {
|
||||
match iter.next() {
|
||||
None => { self.path.pop(); }
|
||||
Some((sub_key,sub_tree)) => {
|
||||
self.iters.push(iter);
|
||||
self.iters.push(sub_tree.branches.$fn_name());
|
||||
self.path.push(sub_key);
|
||||
break Some((self.path.clone(),& $($mut)? sub_tree.value))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a,K,V,S> IntoIterator for &'a $($mut)? HashMapTree<K,V,S> {
|
||||
type Item = (Vec<&'a K>,&'a $($mut)? V);
|
||||
type IntoIter = $tp_name<'a,K,V,S>;
|
||||
|
||||
#[inline]
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
let iters = vec![self.branches.$fn_name()];
|
||||
let path = default();
|
||||
$tp_name {iters,path}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a,K,V,S> Debug for $tp_name<'a,K,V,S> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f,stringify!($tp_name))
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
define_borrow_iterator!(Iter iter);
|
||||
define_borrow_iterator!(IterMut iter_mut mut);
|
||||
|
||||
|
||||
|
||||
// =============
|
||||
// === Tests ===
|
||||
// =============
|
||||
|
||||
// TODO: We should have tests here.
|
@ -1,84 +0,0 @@
|
||||
//! This module defines a typed index struct. Useful to introduce type safety when using indexes
|
||||
//! several indexable containers.
|
||||
|
||||
use crate::prelude::*;
|
||||
|
||||
|
||||
|
||||
// =============
|
||||
// === Index ===
|
||||
// =============
|
||||
|
||||
/// Typed newtype for `usize` meant to be used as a typed index.
|
||||
pub struct Index<T> {
|
||||
/// Raw value.
|
||||
pub raw : usize,
|
||||
phantom : PhantomData<T>
|
||||
}
|
||||
|
||||
impl<T> Index<T> {
|
||||
/// Constructor
|
||||
pub fn new(raw:usize) -> Self {
|
||||
let phantom = default();
|
||||
Self {raw,phantom}
|
||||
}
|
||||
}
|
||||
|
||||
// === Impls ===
|
||||
|
||||
impl<T> Copy for Index<T> {}
|
||||
impl<T> Eq for Index<T> {}
|
||||
|
||||
impl<T> Clone for Index<T> {
|
||||
fn clone(&self) -> Self {
|
||||
*self
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Hash for Index<T> {
|
||||
fn hash<H:std::hash::Hasher>(&self, state:&mut H) {
|
||||
self.raw.hash(state)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> PartialEq for Index<T> {
|
||||
fn eq(&self, other:&Self) -> bool {
|
||||
self.raw == other.raw
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> From<Index<T>> for usize {
|
||||
fn from(t:Index<T>) -> Self {
|
||||
t.raw
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> From<&Index<T>> for usize {
|
||||
fn from(t:&Index<T>) -> Self {
|
||||
t.raw
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> From<usize> for Index<T> {
|
||||
fn from(t:usize) -> Self {
|
||||
Self::new(t)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> From<&usize> for Index<T> {
|
||||
fn from(t:&usize) -> Self {
|
||||
Self::new(*t)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Debug for Index<T> {
|
||||
fn fmt(&self, f:&mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f,"{}",self.raw)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Display for Index<T> {
|
||||
fn fmt(&self, f:&mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f,"{}",self.raw)
|
||||
}
|
||||
}
|
@ -1,16 +0,0 @@
|
||||
//! Library of general data structures.
|
||||
|
||||
#![feature(associated_type_bounds)]
|
||||
#![feature(trait_alias)]
|
||||
|
||||
#![warn(unsafe_code)]
|
||||
#![warn(missing_copy_implementations)]
|
||||
#![warn(missing_debug_implementations)]
|
||||
#![warn(missing_docs)]
|
||||
|
||||
pub mod hash_map_tree;
|
||||
pub mod index;
|
||||
pub mod opt_vec;
|
||||
pub mod text;
|
||||
|
||||
pub use enso_prelude as prelude;
|
@ -1,259 +0,0 @@
|
||||
//! A sparse vector implementation.
|
||||
|
||||
use crate::prelude::*;
|
||||
use std::iter::FilterMap;
|
||||
use std::slice;
|
||||
|
||||
|
||||
|
||||
// ==============
|
||||
// === OptVec ===
|
||||
// ==============
|
||||
|
||||
// === Definition ===
|
||||
|
||||
/// A contiguous growable sparse array type. Similar to `Vec<T>`, but allowing missing values.
|
||||
/// After a value is removed, it remembers the index for reuse in the future. Unlike `Vec`, it is
|
||||
/// parametrized with optional `Index` type variable which will be used for indexing the vector.
|
||||
/// Index have to implement the `Index` trait.
|
||||
#[derive(Derivative)]
|
||||
#[derivative(Default(bound=""))]
|
||||
#[derive(Clone,Debug,Shrinkwrap)]
|
||||
pub struct OptVec<T,Index=usize> {
|
||||
#[shrinkwrap(main_field)]
|
||||
items : Vec<Option<T>>,
|
||||
free_ixs : SmallVec<[Index; 128]>,
|
||||
}
|
||||
|
||||
|
||||
// === Types ===
|
||||
|
||||
/// A trait for any vector index type.
|
||||
pub trait Index = Debug + Copy + Into<usize> where usize : Into<Self>;
|
||||
|
||||
/// Iterator type of this vector.
|
||||
pub type Iter<'t,T> = FilterMap<slice::Iter<'t,Option<T>>, OptionAsRef<T>>;
|
||||
|
||||
/// Mutable iterator type of this vector.
|
||||
pub type IterMut<'t,T> = FilterMap<slice::IterMut<'t, Option<T>>, OptionAsRefMut<T>>;
|
||||
|
||||
/// Subtype of `Iter`.
|
||||
pub type OptionAsRef <T> = for<'r> fn(&'r Option<T>) -> Option<&'r T>;
|
||||
|
||||
/// Subtype of `IterMut`.
|
||||
pub type OptionAsRefMut <T> = for<'r> fn(&'r mut Option<T>) -> Option<&'r mut T>;
|
||||
|
||||
|
||||
// === Construction ===
|
||||
|
||||
impl<T,I:Index> OptVec<T,I> {
|
||||
/// Constructs a new, empty `Vec<T>`. It will not allocate until elements are pushed onto it.
|
||||
pub fn new() -> Self {
|
||||
default()
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// === Status Checks ===
|
||||
|
||||
impl<T,I:Index> OptVec<T,I> {
|
||||
/// Returns the number of elements in the vector, including reserved indexes. Also referred to
|
||||
/// as its 'length'.
|
||||
pub fn len(&self) -> usize {
|
||||
self.items.len() - self.free_ixs.len()
|
||||
}
|
||||
|
||||
/// Returns true if vector contains no element.
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.items.len() == self.free_ixs.len()
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// === Modifiers ===
|
||||
|
||||
impl<T,I:Index> OptVec<T,I> {
|
||||
/// Inserts the provided element to the vector. It reuses free indexes if any.
|
||||
pub fn insert(&mut self, item: T) -> I {
|
||||
self.insert_with_ix(|_| item)
|
||||
}
|
||||
|
||||
/// Finds a free index and inserts the element. The index is re-used in case the array is sparse
|
||||
/// or is added in case of no free places.
|
||||
pub fn insert_with_ix<F:FnOnce(I) -> T>(&mut self, f: F) -> I {
|
||||
match self.free_ixs.pop() {
|
||||
None => {
|
||||
let index = self.items.len().into();
|
||||
self.items.push(Some(f(index)));
|
||||
index
|
||||
}
|
||||
Some(index) => {
|
||||
self.items[index.into()] = Some(f(index));
|
||||
index
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Reserve an index for further reuse. Please remember that you cannot use the index to read
|
||||
/// values unless the value is set.
|
||||
pub fn reserve_index(&mut self) -> I {
|
||||
self.free_ixs.pop().unwrap_or_else(|| {
|
||||
let index = self.items.len().into();
|
||||
self.items.push(None);
|
||||
index
|
||||
})
|
||||
}
|
||||
|
||||
/// Sets the value at given index. Panics if the index was already freed.
|
||||
pub fn set(&mut self, index:I, t:T) {
|
||||
self.items[index.into()] = Some(t);
|
||||
}
|
||||
|
||||
/// Removes the element at provided index and marks the index to be reused. Does nothing if the
|
||||
/// index was already empty. Panics if the index was out of bounds.
|
||||
pub fn remove(&mut self, index:I) -> Option<T> {
|
||||
let item = self.items[index.into()].take();
|
||||
item.iter().for_each(|_| self.free_ixs.push(index));
|
||||
item
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// === Indexing ===
|
||||
|
||||
impl<T,I:Index> OptVec<T,I> {
|
||||
/// Index into vector. Returns `None` if the key was already freed.
|
||||
pub fn safe_index(&self, index:I) -> Option<&T> {
|
||||
self.items[index.into()].as_ref()
|
||||
}
|
||||
|
||||
/// Index into vector. Returns `None` if the key was already freed.
|
||||
pub fn safe_index_mut(&mut self, index:I) -> Option<&mut T> {
|
||||
self.items[index.into()].as_mut()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T,I:Index> std::ops::Index<I> for OptVec<T,I> {
|
||||
type Output = T;
|
||||
fn index(&self, index:I) -> &Self::Output {
|
||||
let error = || panic!(format!("Trying to access removed index `{:?}`.",index));
|
||||
self.items.index(index.into()).as_ref().unwrap_or_else(error)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T,I:Index> std::ops::IndexMut<I> for OptVec<T,I> {
|
||||
fn index_mut(&mut self, index:I) -> &mut Self::Output {
|
||||
let error = || panic!(format!("Trying to access removed index `{:?}`.",index));
|
||||
self.items.index_mut(index.into()).as_mut().unwrap_or_else(error)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// === Iterators ===
|
||||
|
||||
impl<T,I:Index> OptVec<T,I> {
|
||||
/// Iterator.
|
||||
pub fn iter(&self) -> Iter<T> {
|
||||
self.items.iter().filter_map(Option::as_ref)
|
||||
}
|
||||
|
||||
/// Mutable iterator.
|
||||
pub fn iter_mut(&mut self) -> IterMut<T> {
|
||||
self.items.iter_mut().filter_map(Option::as_mut)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a,T,I:Index> IntoIterator for &'a OptVec<T,I> {
|
||||
type Item = &'a T;
|
||||
type IntoIter = Iter<'a,T>;
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.iter()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a,T,I:Index> IntoIterator for &'a mut OptVec<T,I> {
|
||||
type Item = &'a mut T;
|
||||
type IntoIter = IterMut<'a,T>;
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
self.iter_mut()
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// =============
|
||||
// === Tests ===
|
||||
// =============
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_add() {
|
||||
let mut v = OptVec::<usize>::new();
|
||||
assert!(v.is_empty());
|
||||
|
||||
let ix1 = v.insert(1);
|
||||
assert_eq!(ix1,0);
|
||||
assert_eq!(v.len(),1);
|
||||
assert!(!v.is_empty());
|
||||
|
||||
let ix2 = v.insert(2);
|
||||
assert_eq!(ix2,1);
|
||||
assert_eq!(v.len(),2);
|
||||
|
||||
v.remove(ix1);
|
||||
assert_eq!(v.len(),1);
|
||||
|
||||
v.remove(ix2);
|
||||
assert_eq!(v.len(),0);
|
||||
assert!(v.is_empty());
|
||||
|
||||
let ix3 = v.insert(3);
|
||||
assert_eq!(v.len(),1);
|
||||
|
||||
let ix4 = v.insert(4);
|
||||
assert_eq!(ix3,1);
|
||||
assert_eq!(ix4,0);
|
||||
assert_eq!(v.len(),2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_iter() {
|
||||
let mut v = OptVec::<usize>::new();
|
||||
|
||||
let ix1 = v.insert(0);
|
||||
let _ix2 = v.insert(1);
|
||||
let _ix3 = v.insert(2);
|
||||
assert_eq!(v.len(),3);
|
||||
|
||||
for (i,value) in v.into_iter().enumerate() {
|
||||
assert_eq!(i, *value);
|
||||
}
|
||||
|
||||
v.remove(ix1);
|
||||
assert_eq!(v.len(),2);
|
||||
for (i,value) in v.into_iter().enumerate() {
|
||||
assert_eq!(i + 1, *value);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_iter_mut() {
|
||||
let mut v = OptVec::<usize>::new();
|
||||
|
||||
let ix1 = v.insert(0);
|
||||
let _ix2 = v.insert(1);
|
||||
let _ix3 = v.insert(2);
|
||||
assert_eq!(v.len(),3);
|
||||
|
||||
v.remove(ix1);
|
||||
assert_eq!(v.len(),2);
|
||||
|
||||
for value in &mut v { *value *= 2; }
|
||||
for (i, value) in v.into_iter().enumerate() {
|
||||
assert_eq!((i + 1) * 2, *value);
|
||||
}
|
||||
}
|
||||
}
|
@ -1,525 +0,0 @@
|
||||
//! The common structures for text location and manipulation.
|
||||
|
||||
use enso_prelude::*;
|
||||
|
||||
use std::ops::Add;
|
||||
use std::ops::AddAssign;
|
||||
use std::ops::Range;
|
||||
use std::ops::Sub;
|
||||
use std::ops::SubAssign;
|
||||
use serde::Serialize;
|
||||
use serde::Deserialize;
|
||||
|
||||
|
||||
|
||||
/// ======================================
|
||||
/// === Text Coordinates And Distances ===
|
||||
/// ======================================
|
||||
|
||||
// === Index ===
|
||||
|
||||
/// Strongly typed index into container.
|
||||
#[allow(missing_docs)]
|
||||
#[derive(Clone,Copy,Debug,Default,Hash,PartialEq,Eq,PartialOrd,Ord,Serialize,Deserialize)]
|
||||
pub struct Index { pub value:usize }
|
||||
|
||||
impl Index {
|
||||
/// Initializes Index with given value.
|
||||
pub fn new(value:usize) -> Self {
|
||||
Index {value}
|
||||
}
|
||||
|
||||
/// Create char index from the byte index. It must traverse the content to count chars.
|
||||
pub fn convert_byte_index(content:impl Str, index:ByteIndex) -> Self {
|
||||
let slice = &content.as_ref()[..index.value];
|
||||
Self::new(slice.chars().count())
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// === ByteIndex ===
|
||||
|
||||
/// Strongly typed index of byte in String (which may differ with analogous character index,
|
||||
/// because some chars takes more than one byte).
|
||||
//TODO[ao] We should use structures from ensogl::,math::topology to represent different quantities
|
||||
// and units.
|
||||
#[allow(missing_docs)]
|
||||
#[derive(Clone,Copy,Debug,Default,Hash,PartialEq,Eq,PartialOrd,Ord,Serialize,Deserialize)]
|
||||
pub struct ByteIndex { pub value:usize }
|
||||
|
||||
impl ByteIndex {
|
||||
/// Initializes Index with given value.
|
||||
pub fn new(value:usize) -> Self {
|
||||
ByteIndex {value}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// === Size ===
|
||||
|
||||
/// Strongly typed size of container.
|
||||
#[allow(missing_docs)]
|
||||
#[derive(Clone,Copy,Debug,Default,Hash,PartialEq,Eq,PartialOrd,Ord,Serialize,Deserialize)]
|
||||
pub struct Size { pub value:usize }
|
||||
|
||||
impl Size {
|
||||
/// Initializes Size with given value.
|
||||
pub fn new(value:usize) -> Self {
|
||||
Size {value}
|
||||
}
|
||||
|
||||
/// Checks if this is a non-empty size (more than zero elements).
|
||||
pub fn non_empty(self) -> bool {
|
||||
self.value > 0
|
||||
}
|
||||
|
||||
/// Checks if this is an empty size (zero elements).
|
||||
pub fn is_empty(self) -> bool {
|
||||
self.value == 0
|
||||
}
|
||||
}
|
||||
|
||||
impl Add for Size {
|
||||
type Output = Size;
|
||||
fn add(self, rhs:Size) -> Size {
|
||||
Size {value:self.value + rhs.value}
|
||||
}
|
||||
}
|
||||
|
||||
impl AddAssign for Size {
|
||||
fn add_assign(&mut self, rhs: Size) {
|
||||
*self = *self + rhs;
|
||||
}
|
||||
}
|
||||
|
||||
impl Sub for Size {
|
||||
type Output = Size;
|
||||
fn sub(self, rhs:Size) -> Size {
|
||||
Size{value: self.value - rhs.value}
|
||||
}
|
||||
}
|
||||
|
||||
impl SubAssign for Size {
|
||||
fn sub_assign(&mut self, rhs: Size) {
|
||||
*self = *self - rhs;
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for Size {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f,"{}",self.value)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&str> for Size {
|
||||
fn from(text:&str) -> Self {
|
||||
Size::new(text.len())
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// === Span ===
|
||||
|
||||
/// Strongly typed span into container with index and size.
|
||||
#[allow(missing_docs)]
|
||||
#[derive(Clone,Copy,Debug,Default,Hash,PartialEq,Eq,PartialOrd,Ord,Serialize,Deserialize)]
|
||||
pub struct Span { pub index:Index, pub size:Size }
|
||||
|
||||
impl Span {
|
||||
/// Initializes Span with given values.
|
||||
pub fn new(index:Index, size:Size) -> Self {
|
||||
Span {index,size}
|
||||
}
|
||||
|
||||
/// Creates a span describing a range between two indices.
|
||||
pub fn from_indices(begin:Index, end:Index) -> Self {
|
||||
if end < begin {
|
||||
Self::from_indices(end,begin)
|
||||
} else {
|
||||
let index = begin;
|
||||
let size = end - begin;
|
||||
Span {index,size}
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a span from zero index with given length.
|
||||
pub fn from_beginning(size:Size) -> Self {
|
||||
Span {index:Index::new(0), size}
|
||||
}
|
||||
|
||||
/// Get the character after last character of this span.
|
||||
///
|
||||
/// If span has size 0, it returns the `index` field.
|
||||
pub fn end(&self) -> Index {
|
||||
self.index + self.size
|
||||
}
|
||||
|
||||
/// Check if this span contains character under `index`.
|
||||
pub fn contains(&self, index:Index) -> bool {
|
||||
self.index <= index && self.end() > index
|
||||
}
|
||||
|
||||
/// Check if this span contains the whole another span.
|
||||
pub fn contains_span(&self, span:&Span) -> bool {
|
||||
self.index <= span.index && self.end() >= span.end()
|
||||
}
|
||||
|
||||
/// Converts span to `Range<usize>`.
|
||||
pub fn range(self) -> Range<usize> {
|
||||
let start = self.index.value;
|
||||
let end = self.end().value;
|
||||
start .. end
|
||||
}
|
||||
|
||||
/// Expand the span by moving its left (start) index.
|
||||
pub fn extend_left(&mut self, size:Size) {
|
||||
self.index -= size;
|
||||
self.size += size;
|
||||
}
|
||||
|
||||
/// Expand the span by moving its right (end) index.
|
||||
pub fn extend_right(&mut self, size:Size) {
|
||||
self.size += size;
|
||||
}
|
||||
|
||||
/// Shrink the span by moving its left (start) index.
|
||||
pub fn shrink_left(&mut self, size:Size) {
|
||||
self.index += size;
|
||||
self.size -= size;
|
||||
}
|
||||
|
||||
/// Shrink the span by moving its right (end) index.
|
||||
pub fn shrink_right(&mut self, size:Size) {
|
||||
self.size -= size;
|
||||
}
|
||||
|
||||
/// Move the whole span left, maintaining its size.
|
||||
pub fn move_left(&mut self, size:Size) {
|
||||
self.index -= size;
|
||||
}
|
||||
|
||||
/// Move the whole span right, maintaining its size.
|
||||
pub fn move_right(&mut self, size:Size) {
|
||||
self.index += size;
|
||||
}
|
||||
|
||||
/// Move the start index of the span, adjusting the size.
|
||||
pub fn set_left(&mut self, new_left:Index) {
|
||||
let end = self.end();
|
||||
self.index = new_left;
|
||||
self.size = end - new_left;
|
||||
}
|
||||
|
||||
/// Move the end index of the span, adjusting the size.
|
||||
pub fn set_right(&mut self, new_right:Index) {
|
||||
self.size = new_right - self.index;
|
||||
}
|
||||
}
|
||||
|
||||
impls! { From + &From <Range<usize>> for Span { |range|
|
||||
Span::from_indices(Index::new(range.start), Index::new(range.end))
|
||||
}}
|
||||
|
||||
impls! { Into + &Into <Range<usize>> for Span { |this|
|
||||
this.range()
|
||||
}}
|
||||
|
||||
impl PartialEq<Range<usize>> for Span {
|
||||
fn eq(&self, other:&Range<usize>) -> bool {
|
||||
&self.range() == other
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for Span {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f,"{}..{}",self.index.value,self.end().value)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::ops::Index<Span> for str {
|
||||
type Output = str;
|
||||
|
||||
fn index(&self, index:Span) -> &Self::Output {
|
||||
&self[index.range()]
|
||||
}
|
||||
}
|
||||
|
||||
impl std::ops::Index<Span> for String {
|
||||
type Output = str;
|
||||
|
||||
fn index(&self, index:Span) -> &Self::Output {
|
||||
&self.as_str()[index]
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Range<Index>> for Span {
|
||||
fn from(range:Range<Index>) -> Self {
|
||||
Span::from_indices(range.start,range.end)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// === Operators for Index and Size ===
|
||||
|
||||
impl Add<Size> for Index {
|
||||
type Output = Index;
|
||||
fn add(self, rhs:Size) -> Index {
|
||||
Index {value:self.value + rhs.value}
|
||||
}
|
||||
}
|
||||
|
||||
impl AddAssign<Size> for Index {
|
||||
fn add_assign(&mut self, rhs: Size) {
|
||||
*self = *self + rhs;
|
||||
}
|
||||
}
|
||||
|
||||
impl Sub<Size> for Index {
|
||||
type Output = Index;
|
||||
fn sub(self, rhs:Size) -> Index {
|
||||
Index {value:self.value - rhs.value}
|
||||
}
|
||||
}
|
||||
|
||||
impl SubAssign<Size> for Index {
|
||||
fn sub_assign(&mut self, rhs: Size) {
|
||||
*self = *self - rhs;
|
||||
}
|
||||
}
|
||||
|
||||
impl Sub for Index {
|
||||
type Output = Size;
|
||||
fn sub(self, rhs:Index) -> Size {
|
||||
Size {value:self.value - rhs.value}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// === TextLocation ===
|
||||
|
||||
/// A position of character in a multiline text.
|
||||
#[derive(Copy,Clone,Debug,PartialEq,Eq,PartialOrd,Ord)]
|
||||
pub struct TextLocation {
|
||||
/// Line index.
|
||||
pub line: usize,
|
||||
/// Column is a index of char in given line.
|
||||
pub column: usize,
|
||||
}
|
||||
|
||||
/// Short pretty print representation in the form of `line:column`.
|
||||
impl Display for TextLocation {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f,"{}:{}",self.line,self.column)
|
||||
}
|
||||
}
|
||||
|
||||
impl TextLocation {
|
||||
/// Create location at begin of given line.
|
||||
pub fn at_line_begin(line_index:usize) -> Self {
|
||||
TextLocation {
|
||||
line : line_index,
|
||||
column : 0,
|
||||
}
|
||||
}
|
||||
|
||||
/// Create location at begin of the whole document.
|
||||
pub fn at_document_begin() -> Self {
|
||||
TextLocation {
|
||||
line : 0,
|
||||
column : 0,
|
||||
}
|
||||
}
|
||||
|
||||
/// Create location at and of the whole document. It iterates over all the content.
|
||||
pub fn at_document_end(content:impl Str) -> Self {
|
||||
Self::after_chars(content.as_ref().chars())
|
||||
}
|
||||
|
||||
/// Convert from index of document with `content`. It iterates over all characters before
|
||||
/// `index`.
|
||||
pub fn from_index(content:impl Str, index:Index) -> Self {
|
||||
let before = content.as_ref().chars().take(index.value);
|
||||
Self::after_chars(before)
|
||||
}
|
||||
|
||||
/// Converts a range of indices into a range of TextLocation. It iterates over all characters
|
||||
/// before range's end.
|
||||
pub fn convert_range(content:impl Str, range:&Range<Index>) -> Range<Self> {
|
||||
let content = content.as_ref();
|
||||
Self::from_index(content,range.start)..Self::from_index(content,range.end)
|
||||
}
|
||||
|
||||
/// Converts a range in bytes into a range of TextLocation. It iterates over all characters
|
||||
/// before range's end.
|
||||
pub fn convert_byte_range(content:impl Str, range:&Range<ByteIndex>) -> Range<Self> {
|
||||
let start = Index::convert_byte_index(content.as_ref(), range.start);
|
||||
let end = Index::convert_byte_index(content.as_ref(), range.end);
|
||||
Self::convert_range(content,&(start..end))
|
||||
}
|
||||
|
||||
fn after_chars<IntoCharsIter>(chars:IntoCharsIter) -> Self
|
||||
where IntoCharsIter : IntoIterator<Item=char, IntoIter:Clone> {
|
||||
let iter = chars.into_iter();
|
||||
let len = iter.clone().count();
|
||||
let newlines = iter.enumerate().filter(|(_,c)| *c == '\n');
|
||||
let newlines_indices = newlines.map(|(i,_)| i);
|
||||
TextLocation {
|
||||
line : newlines_indices.clone().count(),
|
||||
column : len - newlines_indices.last().map_or(0, |i| i + 1),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ==============
|
||||
// === Change ===
|
||||
// ==============
|
||||
|
||||
/// A template for structure describing a text operation in one place.
|
||||
///
|
||||
/// This is a generalized template, because we use different representation for both index
|
||||
/// (e.g. `Index` or `TextLocation`) and inserted content (it may be just String, but also e.g.
|
||||
/// Vec<char>, or Vec<Vec<char>> split by newlines).
|
||||
#[derive(Clone,Debug,Eq,Hash,PartialEq)]
|
||||
pub struct TextChangeTemplate<Index,Content> {
|
||||
/// Text fragment to be replaced. If we don't mean to remove any text, this should be an empty
|
||||
/// range with start set at position there `lines` will be inserted
|
||||
/// (see `TextChangeTemplate::insert` definition).
|
||||
pub replaced: Range<Index>,
|
||||
/// Text which replaces fragment described in `replaced` field.
|
||||
pub inserted: Content,
|
||||
}
|
||||
|
||||
/// The simplest change representation.
|
||||
pub type TextChange = TextChangeTemplate<Index,String>;
|
||||
|
||||
|
||||
// === Constructors ===
|
||||
|
||||
impl<Index:Copy,Content> TextChangeTemplate<Index,Content> {
|
||||
/// Creates operation which inserts text at given position.
|
||||
pub fn insert(at:Index, text:Content) -> Self {
|
||||
TextChangeTemplate {
|
||||
replaced : at..at,
|
||||
inserted: text,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<Index,Content> TextChangeTemplate<Index,Content> {
|
||||
/// Creates operation which replaces text at given range with given string.
|
||||
pub fn replace(replaced:Range<Index>, text:Content) -> Self {
|
||||
let inserted = text;
|
||||
TextChangeTemplate {replaced,inserted}
|
||||
}
|
||||
}
|
||||
|
||||
impl<Index:Sub+Clone,Content> TextChangeTemplate<Index,Content> {
|
||||
/// Calculate the size of the replaced text.
|
||||
pub fn replaced_size(&self) -> Index::Output {
|
||||
self.replaced.end.clone() - self.replaced.start.clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl<Content> TextChangeTemplate<Index,Content> {
|
||||
/// Calculate the size of the replaced text.
|
||||
pub fn replaced_span(&self) -> Span {
|
||||
let index = self.replaced.start;
|
||||
let size = self.replaced_size();
|
||||
Span {index,size}
|
||||
}
|
||||
|
||||
/// Applies the text edit on given `String` value.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// Panics if the replaced span is out of the string value bounds.
|
||||
pub fn apply(&self, target:&mut String) where Content:AsRef<str> {
|
||||
//debug!(logger, "change: {change:?}, my code: \n```\n{code}\n```");
|
||||
let replaced_indices = self.replaced.start.value..self.replaced.end.value;
|
||||
//debug!(logger, "replacing range {replaced_indices:?} with {change.inserted}");
|
||||
target.replace_range(replaced_indices,self.inserted.as_ref());
|
||||
}
|
||||
|
||||
/// Applies the text edit on string and returns the result.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// Panics if the replaced span is out of the string value bounds.
|
||||
pub fn applied(&self, target:&str) -> String where Content:AsRef<str> {
|
||||
let mut target = target.to_string();
|
||||
self.apply(&mut target);
|
||||
target
|
||||
}
|
||||
}
|
||||
|
||||
impl<Index,Content:Default> TextChangeTemplate<Index,Content> {
|
||||
/// Creates operation which deletes text at given range.
|
||||
pub fn delete(range:Range<Index>) -> Self {
|
||||
TextChangeTemplate {
|
||||
replaced : range,
|
||||
inserted : default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// =================
|
||||
// === Utilities ===
|
||||
// =================
|
||||
|
||||
/// Split text to lines handling both CR and CRLF line endings.
|
||||
pub fn split_to_lines(text:&str) -> impl Iterator<Item=String> + '_ {
|
||||
text.split('\n').map(cut_cr_at_end_of_line).map(|s| s.to_string())
|
||||
}
|
||||
|
||||
/// Returns slice without carriage return (also known as CR or `'\r'`) at line's end
|
||||
fn cut_cr_at_end_of_line(from:&str) -> &str {
|
||||
if from.ends_with('\r') {
|
||||
&from[..from.len()-1]
|
||||
} else {
|
||||
from
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ============
|
||||
// === Text ===
|
||||
// ============
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
|
||||
use super::Index;
|
||||
|
||||
#[test]
|
||||
fn converting_index_to_location() {
|
||||
let str = "first\nsecond\nthird";
|
||||
assert_eq!(TextLocation::from_index(str,Index::new(0)), TextLocation {line:0, column:0});
|
||||
assert_eq!(TextLocation::from_index(str,Index::new(5)), TextLocation {line:0, column:5});
|
||||
assert_eq!(TextLocation::from_index(str,Index::new(6)), TextLocation {line:1, column:0});
|
||||
assert_eq!(TextLocation::from_index(str,Index::new(9)), TextLocation {line:1, column:3});
|
||||
assert_eq!(TextLocation::from_index(str,Index::new(12)), TextLocation {line:1, column:6});
|
||||
assert_eq!(TextLocation::from_index(str,Index::new(13)), TextLocation {line:2, column:0});
|
||||
assert_eq!(TextLocation::from_index(str,Index::new(18)), TextLocation {line:2, column:5});
|
||||
|
||||
let str = "";
|
||||
assert_eq!(TextLocation {line:0, column:0}, TextLocation::from_index(str,Index::new(0)));
|
||||
|
||||
let str= "\n";
|
||||
assert_eq!(TextLocation {line:0, column:0}, TextLocation::from_index(str,Index::new(0)));
|
||||
assert_eq!(TextLocation {line:1, column:0}, TextLocation::from_index(str,Index::new(1)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn text_location_at_end() {
|
||||
let str = "first\nsecond\nthird";
|
||||
assert_eq!(TextLocation::at_document_end(str) , TextLocation {line:2, column:5});
|
||||
assert_eq!(TextLocation::at_document_end("") , TextLocation {line:0, column:0});
|
||||
assert_eq!(TextLocation::at_document_end("\n"), TextLocation {line:1, column:0});
|
||||
}
|
||||
}
|
@ -1,19 +0,0 @@
|
||||
[package]
|
||||
name = "enso-generics"
|
||||
version = "0.1.0"
|
||||
authors = ["Enso Team <enso-dev@enso.org>"]
|
||||
edition = "2018"
|
||||
|
||||
description = "A library for type-level and generic programming."
|
||||
readme = "README.md"
|
||||
homepage = "https://github.com/enso-org/enso/lib/rust/enso-generics"
|
||||
repository = "https://github.com/enso-org/enso"
|
||||
license-file = "../../../LICENSE"
|
||||
|
||||
keywords = ["type-level", "generic"]
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib", "rlib"]
|
||||
|
||||
[dependencies]
|
||||
nalgebra = { version = "0.21.1" }
|
@ -1,3 +0,0 @@
|
||||
# Enso Generics
|
||||
|
||||
A library for generic and type-level programming.
|
@ -1,208 +0,0 @@
|
||||
//! Generic representation of data types. Refer to the crate documentation to learn more.
|
||||
|
||||
// This crate defines many helper traits and uses this flag on purpose.
|
||||
#![allow(missing_docs)]
|
||||
|
||||
use super::hlist;
|
||||
pub use nalgebra::base::dimension::*;
|
||||
|
||||
|
||||
|
||||
// ==============
|
||||
// === Traits ===
|
||||
// ==============
|
||||
|
||||
/// Common traits.
|
||||
pub mod traits {
|
||||
pub use super::HasRepr as _TRAIT_HasRepr;
|
||||
pub use super::HasFieldsCount as _TRAIT_HasFieldsCount;
|
||||
|
||||
pub use super::HasItemAt as _TRAIT_HasItemAt;
|
||||
pub use super::HasItemAt0 as _TRAIT_HasItemAt0;
|
||||
pub use super::HasItemAt1 as _TRAIT_HasItemAt1;
|
||||
pub use super::HasItemAt2 as _TRAIT_HasItemAt2;
|
||||
pub use super::HasItemAt3 as _TRAIT_HasItemAt3;
|
||||
pub use super::HasItemAt4 as _TRAIT_HasItemAt4;
|
||||
pub use super::HasItemAt5 as _TRAIT_HasItemAt5;
|
||||
pub use super::HasItemAt6 as _TRAIT_HasItemAt6;
|
||||
pub use super::HasItemAt7 as _TRAIT_HasItemAt7;
|
||||
pub use super::HasItemAt8 as _TRAIT_HasItemAt8;
|
||||
pub use super::HasItemAt9 as _TRAIT_HasItemAt9;
|
||||
pub use super::HasItemAt10 as _TRAIT_HasItemAt10;
|
||||
pub use super::HasItemAt11 as _TRAIT_HasItemAt11;
|
||||
pub use super::HasItemAt12 as _TRAIT_HasItemAt12;
|
||||
pub use super::HasItemAt13 as _TRAIT_HasItemAt13;
|
||||
pub use super::HasItemAt14 as _TRAIT_HasItemAt14;
|
||||
pub use super::HasItemAt15 as _TRAIT_HasItemAt15;
|
||||
|
||||
pub use super::_GetItemAt as _TRAIT__GetItemAt;
|
||||
pub use super::GetItemAt as _TRAIT_GetItemAt;
|
||||
pub use super::GetItemAt0 as _TRAIT_GetItemAt0;
|
||||
pub use super::GetItemAt1 as _TRAIT_GetItemAt1;
|
||||
pub use super::GetItemAt2 as _TRAIT_GetItemAt2;
|
||||
pub use super::GetItemAt3 as _TRAIT_GetItemAt3;
|
||||
pub use super::GetItemAt4 as _TRAIT_GetItemAt4;
|
||||
pub use super::GetItemAt5 as _TRAIT_GetItemAt5;
|
||||
pub use super::GetItemAt6 as _TRAIT_GetItemAt6;
|
||||
pub use super::GetItemAt7 as _TRAIT_GetItemAt7;
|
||||
pub use super::GetItemAt8 as _TRAIT_GetItemAt8;
|
||||
pub use super::GetItemAt9 as _TRAIT_GetItemAt9;
|
||||
pub use super::GetItemAt10 as _TRAIT_GetItemAt10;
|
||||
pub use super::GetItemAt11 as _TRAIT_GetItemAt11;
|
||||
pub use super::GetItemAt12 as _TRAIT_GetItemAt12;
|
||||
pub use super::GetItemAt13 as _TRAIT_GetItemAt13;
|
||||
pub use super::GetItemAt14 as _TRAIT_GetItemAt14;
|
||||
pub use super::GetItemAt15 as _TRAIT_GetItemAt15;
|
||||
|
||||
pub use super::ItemAt as _TRAIT_ItemAt;
|
||||
pub use super::ItemAt0 as _TRAIT_ItemAt0;
|
||||
pub use super::ItemAt1 as _TRAIT_ItemAt1;
|
||||
pub use super::ItemAt2 as _TRAIT_ItemAt2;
|
||||
pub use super::ItemAt3 as _TRAIT_ItemAt3;
|
||||
pub use super::ItemAt4 as _TRAIT_ItemAt4;
|
||||
pub use super::ItemAt5 as _TRAIT_ItemAt5;
|
||||
pub use super::ItemAt6 as _TRAIT_ItemAt6;
|
||||
pub use super::ItemAt7 as _TRAIT_ItemAt7;
|
||||
pub use super::ItemAt8 as _TRAIT_ItemAt8;
|
||||
pub use super::ItemAt9 as _TRAIT_ItemAt9;
|
||||
pub use super::ItemAt10 as _TRAIT_ItemAt10;
|
||||
pub use super::ItemAt11 as _TRAIT_ItemAt11;
|
||||
pub use super::ItemAt12 as _TRAIT_ItemAt12;
|
||||
pub use super::ItemAt13 as _TRAIT_ItemAt13;
|
||||
pub use super::ItemAt14 as _TRAIT_ItemAt14;
|
||||
pub use super::ItemAt15 as _TRAIT_ItemAt15;
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ===============
|
||||
// === HasRepr ===
|
||||
// ===============
|
||||
|
||||
/// Association of a given type with its generic representation.
|
||||
pub trait HasRepr {
|
||||
type GenericRepr : hlist::HList;
|
||||
}
|
||||
|
||||
/// Type level accessor of a generic representation of the given type.
|
||||
pub type Repr<T> = <T as HasRepr>::GenericRepr;
|
||||
|
||||
/// Converts the type to its generic representation. Please note that this trait is implemented
|
||||
/// automatically for every type which implements `Into<Repr<Self>>`.
|
||||
pub trait IntoGeneric : HasRepr + Into<Repr<Self>> {
|
||||
fn into_generic(self) -> Repr<Self> {
|
||||
self.into()
|
||||
}
|
||||
}
|
||||
impl<T> IntoGeneric for T where T : HasRepr + Into<Repr<T>> {}
|
||||
|
||||
|
||||
|
||||
// ======================
|
||||
// === HasFieldsCount ===
|
||||
// ======================
|
||||
|
||||
/// Information of field count of any structure implementing `Generics`. This trait is implemented
|
||||
/// automatically.
|
||||
pub trait HasFieldsCount {
|
||||
const FIELDS_COUNT : usize;
|
||||
fn fields_count() -> usize {
|
||||
Self::FIELDS_COUNT
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> HasFieldsCount for T
|
||||
where T:HasRepr {
|
||||
const FIELDS_COUNT : usize = <Repr<T> as hlist::HasLength>::LEN;
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ================
|
||||
// === HasIndex ===
|
||||
// ================
|
||||
|
||||
/// Trait for heterogeneous containers like tuples which contain element at index `Ix`.
|
||||
pub trait HasItemAt<Ix> { type Item; }
|
||||
pub trait HasItemAt0 = HasItemAt<U0>;
|
||||
pub trait HasItemAt1 = HasItemAt<U1>;
|
||||
pub trait HasItemAt2 = HasItemAt<U2>;
|
||||
pub trait HasItemAt3 = HasItemAt<U3>;
|
||||
pub trait HasItemAt4 = HasItemAt<U4>;
|
||||
pub trait HasItemAt5 = HasItemAt<U5>;
|
||||
pub trait HasItemAt6 = HasItemAt<U6>;
|
||||
pub trait HasItemAt7 = HasItemAt<U7>;
|
||||
pub trait HasItemAt8 = HasItemAt<U8>;
|
||||
pub trait HasItemAt9 = HasItemAt<U9>;
|
||||
pub trait HasItemAt10 = HasItemAt<U10>;
|
||||
pub trait HasItemAt11 = HasItemAt<U11>;
|
||||
pub trait HasItemAt12 = HasItemAt<U12>;
|
||||
pub trait HasItemAt13 = HasItemAt<U13>;
|
||||
pub trait HasItemAt14 = HasItemAt<U14>;
|
||||
pub trait HasItemAt15 = HasItemAt<U15>;
|
||||
|
||||
/// Type of element at index `Ix`. Useful for heterogeneous containers like tuples.
|
||||
pub type ItemAt<Ix,T> = <T as HasItemAt<Ix>>::Item;
|
||||
pub type ItemAt0 <T> = ItemAt <U0 , T>;
|
||||
pub type ItemAt1 <T> = ItemAt <U1 , T>;
|
||||
pub type ItemAt2 <T> = ItemAt <U2 , T>;
|
||||
pub type ItemAt3 <T> = ItemAt <U3 , T>;
|
||||
pub type ItemAt4 <T> = ItemAt <U4 , T>;
|
||||
pub type ItemAt5 <T> = ItemAt <U5 , T>;
|
||||
pub type ItemAt6 <T> = ItemAt <U6 , T>;
|
||||
pub type ItemAt7 <T> = ItemAt <U7 , T>;
|
||||
pub type ItemAt8 <T> = ItemAt <U8 , T>;
|
||||
pub type ItemAt9 <T> = ItemAt <U9 , T>;
|
||||
pub type ItemAt10 <T> = ItemAt <U10 , T>;
|
||||
pub type ItemAt11 <T> = ItemAt <U11 , T>;
|
||||
pub type ItemAt12 <T> = ItemAt <U12 , T>;
|
||||
pub type ItemAt13 <T> = ItemAt <U13 , T>;
|
||||
pub type ItemAt14 <T> = ItemAt <U14 , T>;
|
||||
pub type ItemAt15 <T> = ItemAt <U15 , T>;
|
||||
|
||||
/// Accessor for element at index `Ix`.
|
||||
pub trait GetItemAt<Ix> : HasItemAt<Ix> + _GetItemAt {
|
||||
fn get_item_at(&self) -> &ItemAt<Ix,Self>;
|
||||
}
|
||||
|
||||
/// Smart wrapper for `GetItemAt`. Enables syntax `lst.item_at::<U0>()`.
|
||||
impl<T> _GetItemAt for T {}
|
||||
pub trait _GetItemAt {
|
||||
fn item_at<Ix>(&self) -> &ItemAt<Ix,Self> where Self:GetItemAt<Ix> {
|
||||
GetItemAt::<Ix>::get_item_at(self)
|
||||
}
|
||||
}
|
||||
|
||||
pub trait GetItemAt0 : GetItemAt <U0> { fn _0(&self) -> &ItemAt0 <Self> {self.item_at::<U0> ()} }
|
||||
pub trait GetItemAt1 : GetItemAt <U1> { fn _1(&self) -> &ItemAt1 <Self> {self.item_at::<U1> ()} }
|
||||
pub trait GetItemAt2 : GetItemAt <U2> { fn _2(&self) -> &ItemAt2 <Self> {self.item_at::<U2> ()} }
|
||||
pub trait GetItemAt3 : GetItemAt <U3> { fn _3(&self) -> &ItemAt3 <Self> {self.item_at::<U3> ()} }
|
||||
pub trait GetItemAt4 : GetItemAt <U4> { fn _4(&self) -> &ItemAt4 <Self> {self.item_at::<U4> ()} }
|
||||
pub trait GetItemAt5 : GetItemAt <U5> { fn _5(&self) -> &ItemAt5 <Self> {self.item_at::<U5> ()} }
|
||||
pub trait GetItemAt6 : GetItemAt <U6> { fn _6(&self) -> &ItemAt6 <Self> {self.item_at::<U6> ()} }
|
||||
pub trait GetItemAt7 : GetItemAt <U7> { fn _7(&self) -> &ItemAt7 <Self> {self.item_at::<U7> ()} }
|
||||
pub trait GetItemAt8 : GetItemAt <U8> { fn _8(&self) -> &ItemAt8 <Self> {self.item_at::<U8> ()} }
|
||||
pub trait GetItemAt9 : GetItemAt <U9> { fn _9(&self) -> &ItemAt9 <Self> {self.item_at::<U9> ()} }
|
||||
pub trait GetItemAt10 : GetItemAt<U10> { fn _10(&self) -> &ItemAt10 <Self> {self.item_at::<U10>()} }
|
||||
pub trait GetItemAt11 : GetItemAt<U11> { fn _11(&self) -> &ItemAt11 <Self> {self.item_at::<U11>()} }
|
||||
pub trait GetItemAt12 : GetItemAt<U12> { fn _12(&self) -> &ItemAt12 <Self> {self.item_at::<U12>()} }
|
||||
pub trait GetItemAt13 : GetItemAt<U13> { fn _13(&self) -> &ItemAt13 <Self> {self.item_at::<U13>()} }
|
||||
pub trait GetItemAt14 : GetItemAt<U14> { fn _14(&self) -> &ItemAt14 <Self> {self.item_at::<U14>()} }
|
||||
pub trait GetItemAt15 : GetItemAt<U15> { fn _15(&self) -> &ItemAt15 <Self> {self.item_at::<U15>()} }
|
||||
|
||||
impl<T:GetItemAt<U0>> GetItemAt0 for T {}
|
||||
impl<T:GetItemAt<U1>> GetItemAt1 for T {}
|
||||
impl<T:GetItemAt<U2>> GetItemAt2 for T {}
|
||||
impl<T:GetItemAt<U3>> GetItemAt3 for T {}
|
||||
impl<T:GetItemAt<U4>> GetItemAt4 for T {}
|
||||
impl<T:GetItemAt<U5>> GetItemAt5 for T {}
|
||||
impl<T:GetItemAt<U6>> GetItemAt6 for T {}
|
||||
impl<T:GetItemAt<U7>> GetItemAt7 for T {}
|
||||
impl<T:GetItemAt<U8>> GetItemAt8 for T {}
|
||||
impl<T:GetItemAt<U9>> GetItemAt9 for T {}
|
||||
impl<T:GetItemAt<U10>> GetItemAt10 for T {}
|
||||
impl<T:GetItemAt<U11>> GetItemAt11 for T {}
|
||||
impl<T:GetItemAt<U12>> GetItemAt12 for T {}
|
||||
impl<T:GetItemAt<U13>> GetItemAt13 for T {}
|
||||
impl<T:GetItemAt<U14>> GetItemAt14 for T {}
|
||||
impl<T:GetItemAt<U15>> GetItemAt15 for T {}
|
@ -1,369 +0,0 @@
|
||||
//! HList provides many operations to create and manipulate heterogenous lists (HLists) whose length
|
||||
//! and element types are known at compile-time. HLists can be used to implement records, variants,
|
||||
//! type-indexed products (TIP), type-indexed co-products (TIC), or keyword arguments.
|
||||
|
||||
|
||||
|
||||
// =============
|
||||
// === HList ===
|
||||
// =============
|
||||
|
||||
/// Type of every `HList`.
|
||||
pub trait HList = HasLength;
|
||||
|
||||
/// Empty `HList` value.
|
||||
#[derive(Debug,Clone,Copy)]
|
||||
pub struct Nil;
|
||||
|
||||
/// Non-empty `HList` with head and tail.
|
||||
#[derive(Debug,Clone,Copy)]
|
||||
#[allow(missing_docs)]
|
||||
pub struct Cons<Head,Tail>(pub Head, pub Tail);
|
||||
|
||||
|
||||
|
||||
// === Smart Constructors ===
|
||||
|
||||
/// Creates new `HList` from the provided elements, similar to `vec!`. In order to provide type for
|
||||
/// the list, use the `ty` macro. In order to pattern match on it, use the `pat` macro.
|
||||
///
|
||||
/// ```compile_fail
|
||||
/// let HList::pat![t1,t2] : HList::ty![&str,usize] = HList::new!["hello",7];
|
||||
/// ```
|
||||
#[macro_export]
|
||||
macro_rules! new {
|
||||
($(,)*) => { $crate::Nil };
|
||||
($t:expr $(,$($ts:expr),*)?) => {
|
||||
$crate::Cons($t,$crate::new!{$($($ts),*)?})
|
||||
}
|
||||
}
|
||||
|
||||
/// Pattern matches on a `HList`. See docs of `new` to learn more.
|
||||
#[macro_export]
|
||||
macro_rules! pat {
|
||||
($(,)*) => { $crate::Nil };
|
||||
($t:pat $(,$($ts:pat),*)?) => {
|
||||
$crate::Cons($t,$crate::pat!{$($($ts),*)?})
|
||||
}
|
||||
}
|
||||
|
||||
/// Smart `HList` type constructor. See docs of `new` to learn more.
|
||||
#[macro_export]
|
||||
macro_rules! ty {
|
||||
($(,)*) => { $crate::Nil };
|
||||
($t:ty $(,$($ts:ty),*)?) => {
|
||||
$crate::Cons<$t,$crate::ty!{$($($ts),*)?}>
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ==============
|
||||
// === Length ===
|
||||
// ==============
|
||||
|
||||
/// Compile-time known length value.
|
||||
#[allow(missing_docs)]
|
||||
pub trait HasLength {
|
||||
const LEN : usize;
|
||||
fn len() -> usize {
|
||||
Self::LEN
|
||||
}
|
||||
}
|
||||
|
||||
/// Compile-time known length value.
|
||||
pub const fn len<T:HasLength>() -> usize {
|
||||
<T as HasLength>::LEN
|
||||
}
|
||||
|
||||
impl HasLength for Nil { const LEN : usize = 0; }
|
||||
impl<H,T:HasLength> HasLength for Cons<H,T> { const LEN : usize = 1 + len::<T>(); }
|
||||
|
||||
|
||||
|
||||
// ============
|
||||
// === Head ===
|
||||
// ============
|
||||
|
||||
/// Head element accessor.
|
||||
#[allow(missing_docs)]
|
||||
pub trait KnownHead {
|
||||
type Head;
|
||||
}
|
||||
|
||||
/// Head element type accessor.
|
||||
pub type Head<T> = <T as KnownHead>::Head;
|
||||
|
||||
/// Head element accessor.
|
||||
#[allow(missing_docs)]
|
||||
pub trait GetHead : KnownHead {
|
||||
fn head(&self) -> &Self::Head;
|
||||
}
|
||||
|
||||
/// Mutable head element accessor.
|
||||
#[allow(missing_docs)]
|
||||
pub trait GetHeadMut : KnownHead {
|
||||
fn head_mut(&mut self) -> &mut Self::Head;
|
||||
}
|
||||
|
||||
/// Head element clone.
|
||||
#[allow(missing_docs)]
|
||||
pub trait GetHeadClone : KnownHead {
|
||||
fn head_clone(&self) -> Self::Head;
|
||||
}
|
||||
|
||||
impl<T> GetHeadClone for T
|
||||
where T:GetHead, Head<T>:Clone {
|
||||
default fn head_clone(&self) -> Self::Head {
|
||||
self.head().clone()
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// === Impls ===
|
||||
|
||||
impl<H,T> KnownHead for Cons<H,T> {
|
||||
type Head = H;
|
||||
}
|
||||
|
||||
impl<H,T> GetHead for Cons<H,T> {
|
||||
fn head(&self) -> &Self::Head {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl<H,T> GetHeadMut for Cons<H,T> {
|
||||
fn head_mut(&mut self) -> &mut Self::Head {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ============
|
||||
// === Tail ===
|
||||
// ============
|
||||
|
||||
/// Tail element accessor.
|
||||
#[allow(missing_docs)]
|
||||
pub trait KnownTail {
|
||||
type Tail;
|
||||
}
|
||||
|
||||
/// Tail element type accessor.
|
||||
pub type Tail<T> = <T as KnownTail>::Tail;
|
||||
|
||||
/// Tail element accessor.
|
||||
#[allow(missing_docs)]
|
||||
pub trait GetTail : KnownTail {
|
||||
fn tail(&self) -> &Self::Tail;
|
||||
}
|
||||
|
||||
/// Mutable tail element accessor.
|
||||
#[allow(missing_docs)]
|
||||
pub trait GetTailMut : KnownTail {
|
||||
fn tail_mut(&mut self) -> &mut Self::Tail;
|
||||
}
|
||||
|
||||
/// Tail element clone.
|
||||
#[allow(missing_docs)]
|
||||
pub trait GetTailClone : KnownTail {
|
||||
fn tail_clone(&self) -> Self::Tail;
|
||||
}
|
||||
|
||||
impl<T> GetTailClone for T
|
||||
where T:GetTail, Tail<T>:Clone {
|
||||
default fn tail_clone(&self) -> Self::Tail {
|
||||
self.tail().clone()
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// === Impls ===
|
||||
|
||||
impl<H,T> KnownTail for Cons<H,T> {
|
||||
type Tail = T;
|
||||
}
|
||||
|
||||
impl<H,T> GetTail for Cons<H,T> {
|
||||
fn tail(&self) -> &Self::Tail {
|
||||
&self.1
|
||||
}
|
||||
}
|
||||
|
||||
impl<H,T> GetTailMut for Cons<H,T> {
|
||||
fn tail_mut(&mut self) -> &mut Self::Tail {
|
||||
&mut self.1
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ============
|
||||
// === Last ===
|
||||
// ============
|
||||
|
||||
/// Last element accessor.
|
||||
#[allow(missing_docs)]
|
||||
pub trait KnownLast {
|
||||
type Last;
|
||||
}
|
||||
|
||||
/// Last element type accessor.
|
||||
pub type Last<T> = <T as KnownLast>::Last;
|
||||
|
||||
/// Last element accessor.
|
||||
#[allow(missing_docs)]
|
||||
pub trait GetLast : KnownLast {
|
||||
fn last(&self) -> &Self::Last;
|
||||
}
|
||||
|
||||
/// Mutable last element accessor.
|
||||
#[allow(missing_docs)]
|
||||
pub trait GetLastMut : KnownLast {
|
||||
fn last_mut(&mut self) -> &mut Self::Last;
|
||||
}
|
||||
|
||||
/// Last element clone.
|
||||
#[allow(missing_docs)]
|
||||
pub trait GetLastClone : KnownLast {
|
||||
fn last_clone(&self) -> Self::Last;
|
||||
}
|
||||
|
||||
impl<T> GetLastClone for T
|
||||
where T:GetLast, Last<T>:Clone {
|
||||
default fn last_clone(&self) -> Self::Last {
|
||||
self.last().clone()
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// === Impls ===
|
||||
|
||||
impl<H> KnownLast for Cons<H,Nil> { type Last = H; }
|
||||
impl<H,T:KnownLast> KnownLast for Cons<H,T> { type Last = Last<T>; }
|
||||
|
||||
impl<H> GetLast for Cons<H,Nil> {
|
||||
fn last(&self) -> &Self::Last {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl<H> GetLastMut for Cons<H,Nil> {
|
||||
fn last_mut(&mut self) -> &mut Self::Last {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl<H,T:GetLast> GetLast for Cons<H,T> {
|
||||
fn last(&self) -> &Self::Last {
|
||||
self.tail().last()
|
||||
}
|
||||
}
|
||||
|
||||
impl<H,T:GetLastMut> GetLastMut for Cons<H,T> {
|
||||
fn last_mut(&mut self) -> &mut Self::Last {
|
||||
self.tail_mut().last_mut()
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
// ============
|
||||
// === Init ===
|
||||
// ============
|
||||
|
||||
/// Init elements accessor (all but last).
|
||||
#[allow(missing_docs)]
|
||||
pub trait KnownInit {
|
||||
type Init;
|
||||
}
|
||||
|
||||
/// Init elements type accessor.
|
||||
pub type Init<T> = <T as KnownInit>::Init;
|
||||
|
||||
/// Init element clone.
|
||||
#[allow(missing_docs)]
|
||||
pub trait GetInitClone : KnownInit {
|
||||
fn init_clone(&self) -> Self::Init;
|
||||
}
|
||||
|
||||
|
||||
// === Impls ===
|
||||
|
||||
impl<H> KnownInit for Cons<H,Nil> { type Init = Nil; }
|
||||
impl<H,T:KnownInit> KnownInit for Cons<H,T> { type Init = Cons<H,Init<T>>; }
|
||||
|
||||
impl<H> GetInitClone for Cons<H,Nil> {
|
||||
fn init_clone(&self) -> Self::Init {
|
||||
Nil
|
||||
}
|
||||
}
|
||||
|
||||
impl<H:Clone,T:GetInitClone> GetInitClone for Cons<H,T> {
|
||||
fn init_clone(&self) -> Self::Init {
|
||||
Cons(self.head().clone(),self.tail().init_clone())
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ================
|
||||
// === PushBack ===
|
||||
// ================
|
||||
|
||||
// TODO: Consider implementing PushBack for everything that converts to and from HList.
|
||||
|
||||
/// Add a new element to the back of the list.
|
||||
#[allow(missing_docs)]
|
||||
pub trait PushBack<T> : Sized {
|
||||
type Output : KnownLast<Last=T> + KnownInit<Init=Self>;
|
||||
fn push_back(self,t:T) -> Self::Output;
|
||||
}
|
||||
|
||||
impl<X> PushBack<X> for Nil {
|
||||
type Output = Cons<X,Nil>;
|
||||
#[inline(always)]
|
||||
fn push_back(self,x:X) -> Self::Output {
|
||||
Cons(x,Nil)
|
||||
}
|
||||
}
|
||||
|
||||
impl<X,H,T> PushBack<X> for Cons<H,T>
|
||||
where T:PushBack<X> {
|
||||
type Output = Cons<H,<T as PushBack<X>>::Output>;
|
||||
#[inline(always)]
|
||||
fn push_back(self,x:X) -> Self::Output {
|
||||
let Cons(head,tail) = self;
|
||||
Cons(head,tail.push_back(x))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ===============
|
||||
// === PopBack ===
|
||||
// ===============
|
||||
|
||||
// TODO: Consider implementing PopBack for everything that converts to and from HList.
|
||||
|
||||
/// Remove the last element of the list and return it and the new list.
|
||||
#[allow(missing_docs)]
|
||||
pub trait PopBack : KnownLast + KnownInit {
|
||||
fn pop_back(self) -> (Self::Last,Self::Init);
|
||||
}
|
||||
|
||||
impl<H> PopBack for Cons<H,Nil> {
|
||||
fn pop_back(self) -> (Self::Last,Self::Init) {
|
||||
(self.0,Nil)
|
||||
}
|
||||
}
|
||||
|
||||
impl<H,T> PopBack for Cons<H,T>
|
||||
where T:PopBack {
|
||||
fn pop_back(self) -> (Self::Last,Self::Init) {
|
||||
let (last,tail) = self.1.pop_back();
|
||||
(last,Cons(self.0,tail))
|
||||
}
|
||||
}
|
@ -1,30 +0,0 @@
|
||||
//! Rust Generics implementation.
|
||||
//!
|
||||
//! Datatype-generic programming, also frequently just called generic programming or generics, is a
|
||||
//! form of abstraction that allows defining functions that can operate on a large class of
|
||||
//! data types. For a more in-depth introduction to generic programming in general, have a look at
|
||||
//! [Datatype-Generic Programming](http://www.cs.ox.ac.uk/jeremy.gibbons/publications/dgp.pdf), or
|
||||
//! the [Libraries for Generic Programming](http://dreixel.net/research/pdf/lgph.pdf) paper.
|
||||
|
||||
#![deny(unconditional_recursion)]
|
||||
|
||||
#![warn(missing_copy_implementations)]
|
||||
#![warn(missing_debug_implementations)]
|
||||
#![warn(missing_docs)]
|
||||
#![warn(trivial_casts)]
|
||||
#![warn(trivial_numeric_casts)]
|
||||
#![warn(unsafe_code)]
|
||||
#![warn(unused_import_braces)]
|
||||
#![warn(unused_qualifications)]
|
||||
|
||||
#![feature(const_fn)]
|
||||
#![feature(specialization)]
|
||||
#![feature(trait_alias)]
|
||||
|
||||
pub mod generic;
|
||||
pub mod hlist;
|
||||
pub mod tuple;
|
||||
|
||||
pub use generic::*;
|
||||
pub use hlist::*;
|
||||
pub use tuple::*;
|
@ -1,622 +0,0 @@
|
||||
//! This module contains implementations of generic operations on tuples.
|
||||
|
||||
use crate as hlist;
|
||||
use nalgebra::base::dimension::*;
|
||||
|
||||
|
||||
|
||||
// ====================
|
||||
// === HasTupleRepr ===
|
||||
// ====================
|
||||
|
||||
/// All types which have a tuple representation.
|
||||
#[allow(missing_docs)]
|
||||
pub trait HasTupleRepr {
|
||||
type TupleRepr;
|
||||
}
|
||||
|
||||
/// Tuple representation of a type.
|
||||
pub type TupleRepr<T> = <T as HasTupleRepr>::TupleRepr;
|
||||
|
||||
/// Conversion of the given type to its tuple representation.
|
||||
#[allow(missing_docs)]
|
||||
pub trait IntoTuple : HasTupleRepr + Into<TupleRepr<Self>> {
|
||||
fn into_tuple(self) -> TupleRepr<Self> {
|
||||
self.into()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> IntoTuple for T where T : HasTupleRepr + Into<TupleRepr<T>> {}
|
||||
|
||||
|
||||
|
||||
// ===================
|
||||
// === GenericRepr ===
|
||||
// ===================
|
||||
|
||||
macro_rules! gen_as_hlist_for_tuples {
|
||||
() => {};
|
||||
($t:ident $(,$($ts:ident),*)?) => {
|
||||
impl <$($($ts),*)?> $crate::HasRepr for ($($($ts,)*)?) {
|
||||
type GenericRepr = hlist::ty! { $($($ts),*)? };
|
||||
}
|
||||
gen_as_hlist_for_tuples! { $($($ts),*)? }
|
||||
}
|
||||
}
|
||||
|
||||
gen_as_hlist_for_tuples! {T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11,T12}
|
||||
|
||||
|
||||
|
||||
|
||||
// =============================
|
||||
// === KnownLast / KnownInit ===
|
||||
// =============================
|
||||
|
||||
macro_rules! gen_known_last {
|
||||
() => {};
|
||||
($t:ident $(,$($ts:ident),*)?) => {
|
||||
impl<X $(,$($ts),*)?> $crate::KnownLast for ($($($ts,)*)? X,) { type Last = X; }
|
||||
gen_known_last! { $($($ts),*)? }
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! gen_known_init {
|
||||
() => {};
|
||||
($t:ident $(,$($ts:ident),*)?) => {
|
||||
impl<X $(,$($ts),*)?> $crate::KnownInit for ($($($ts,)*)? X,) { type Init = ($($($ts,)*)?); }
|
||||
gen_known_init! { $($($ts),*)? }
|
||||
}
|
||||
}
|
||||
|
||||
gen_known_last!{T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11}
|
||||
gen_known_init!{T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11}
|
||||
|
||||
|
||||
|
||||
|
||||
// ================
|
||||
// === PushBack ===
|
||||
// ================
|
||||
|
||||
impl<X> hlist::PushBack<X>
|
||||
for () {
|
||||
type Output = (X,);
|
||||
fn push_back(self,x:X) -> Self::Output {
|
||||
(x,)
|
||||
}
|
||||
}
|
||||
|
||||
impl<X,T0> hlist::PushBack<X>
|
||||
for (T0,) {
|
||||
type Output = (T0,X);
|
||||
fn push_back(self,x:X) -> Self::Output {
|
||||
(self.0,x)
|
||||
}
|
||||
}
|
||||
|
||||
impl<X,T0,T1> hlist::PushBack<X>
|
||||
for (T0,T1) {
|
||||
type Output = (T0,T1,X);
|
||||
fn push_back(self,x:X) -> Self::Output {
|
||||
(self.0,self.1,x)
|
||||
}
|
||||
}
|
||||
|
||||
impl<X,T0,T1,T2> hlist::PushBack<X>
|
||||
for (T0,T1,T2) {
|
||||
type Output = (T0,T1,T2,X);
|
||||
fn push_back(self,x:X) -> Self::Output {
|
||||
(self.0,self.1,self.2,x)
|
||||
}
|
||||
}
|
||||
|
||||
impl<X,T0,T1,T2,T3> hlist::PushBack<X>
|
||||
for (T0,T1,T2,T3) {
|
||||
type Output = (T0,T1,T2,T3,X);
|
||||
fn push_back(self,x:X) -> Self::Output {
|
||||
(self.0,self.1,self.2,self.3,x)
|
||||
}
|
||||
}
|
||||
|
||||
impl<X,T0,T1,T2,T3,T4> hlist::PushBack<X>
|
||||
for (T0,T1,T2,T3,T4) {
|
||||
type Output = (T0,T1,T2,T3,T4,X);
|
||||
fn push_back(self,x:X) -> Self::Output {
|
||||
(self.0,self.1,self.2,self.3,self.4,x)
|
||||
}
|
||||
}
|
||||
|
||||
impl<X,T0,T1,T2,T3,T4,T5> hlist::PushBack<X>
|
||||
for (T0,T1,T2,T3,T4,T5) {
|
||||
type Output = (T0,T1,T2,T3,T4,T5,X);
|
||||
fn push_back(self,x:X) -> Self::Output {
|
||||
(self.0,self.1,self.2,self.3,self.4,self.5,x)
|
||||
}
|
||||
}
|
||||
|
||||
impl<X,T0,T1,T2,T3,T4,T5,T6> hlist::PushBack<X>
|
||||
for (T0,T1,T2,T3,T4,T5,T6) {
|
||||
type Output = (T0,T1,T2,T3,T4,T5,T6,X);
|
||||
fn push_back(self,x:X) -> Self::Output {
|
||||
(self.0,self.1,self.2,self.3,self.4,self.5,self.6,x)
|
||||
}
|
||||
}
|
||||
|
||||
impl<X,T0,T1,T2,T3,T4,T5,T6,T7> hlist::PushBack<X>
|
||||
for (T0,T1,T2,T3,T4,T5,T6,T7) {
|
||||
type Output = (T0,T1,T2,T3,T4,T5,T6,T7,X);
|
||||
fn push_back(self,x:X) -> Self::Output {
|
||||
(self.0,self.1,self.2,self.3,self.4,self.5,self.6,self.7,x)
|
||||
}
|
||||
}
|
||||
|
||||
impl<X,T0,T1,T2,T3,T4,T5,T6,T7,T8> hlist::PushBack<X>
|
||||
for (T0,T1,T2,T3,T4,T5,T6,T7,T8) {
|
||||
type Output = (T0,T1,T2,T3,T4,T5,T6,T7,T8,X);
|
||||
fn push_back(self,x:X) -> Self::Output {
|
||||
(self.0,self.1,self.2,self.3,self.4,self.5,self.6,self.7,self.8,x)
|
||||
}
|
||||
}
|
||||
|
||||
impl<X,T0,T1,T2,T3,T4,T5,T6,T7,T8,T9> hlist::PushBack<X>
|
||||
for (T0,T1,T2,T3,T4,T5,T6,T7,T8,T9) {
|
||||
type Output = (T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,X);
|
||||
fn push_back(self,x:X) -> Self::Output {
|
||||
(self.0,self.1,self.2,self.3,self.4,self.5,self.6,self.7,self.8,self.9,x)
|
||||
}
|
||||
}
|
||||
|
||||
impl<X,T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10> hlist::PushBack<X>
|
||||
for (T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10) {
|
||||
type Output = (T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,X);
|
||||
fn push_back(self,x:X) -> Self::Output {
|
||||
(self.0,self.1,self.2,self.3,self.4,self.5,self.6,self.7,self.8,self.9,self.10,x)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
// ===============
|
||||
// === PopBack ===
|
||||
// ===============
|
||||
|
||||
impl<T0> hlist::PopBack
|
||||
for (T0,) {
|
||||
fn pop_back(self) -> (Self::Last,Self::Init) {
|
||||
(self.0,())
|
||||
}
|
||||
}
|
||||
|
||||
impl<T0,T1> hlist::PopBack
|
||||
for (T0,T1) {
|
||||
fn pop_back(self) -> (Self::Last,Self::Init) {
|
||||
(self.1,(self.0,))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T0,T1,T2> hlist::PopBack
|
||||
for (T0,T1,T2) {
|
||||
fn pop_back(self) -> (Self::Last,Self::Init) {
|
||||
(self.2,(self.0,self.1))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T0,T1,T2,T3> hlist::PopBack
|
||||
for (T0,T1,T2,T3) {
|
||||
fn pop_back(self) -> (Self::Last,Self::Init) {
|
||||
(self.3,(self.0,self.1,self.2))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T0,T1,T2,T3,T4> hlist::PopBack
|
||||
for (T0,T1,T2,T3,T4) {
|
||||
fn pop_back(self) -> (Self::Last,Self::Init) {
|
||||
(self.4,(self.0,self.1,self.2,self.3))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T0,T1,T2,T3,T4,T5> hlist::PopBack
|
||||
for (T0,T1,T2,T3,T4,T5) {
|
||||
fn pop_back(self) -> (Self::Last,Self::Init) {
|
||||
(self.5,(self.0,self.1,self.2,self.3,self.4))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T0,T1,T2,T3,T4,T5,T6> hlist::PopBack
|
||||
for (T0,T1,T2,T3,T4,T5,T6) {
|
||||
fn pop_back(self) -> (Self::Last,Self::Init) {
|
||||
(self.6,(self.0,self.1,self.2,self.3,self.4,self.5))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T0,T1,T2,T3,T4,T5,T6,T7> hlist::PopBack
|
||||
for (T0,T1,T2,T3,T4,T5,T6,T7) {
|
||||
fn pop_back(self) -> (Self::Last,Self::Init) {
|
||||
(self.7,(self.0,self.1,self.2,self.3,self.4,self.5,self.6))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T0,T1,T2,T3,T4,T5,T6,T7,T8> hlist::PopBack
|
||||
for (T0,T1,T2,T3,T4,T5,T6,T7,T8) {
|
||||
fn pop_back(self) -> (Self::Last,Self::Init) {
|
||||
(self.8,(self.0,self.1,self.2,self.3,self.4,self.5,self.6,self.7))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T0,T1,T2,T3,T4,T5,T6,T7,T8,T9> hlist::PopBack
|
||||
for (T0,T1,T2,T3,T4,T5,T6,T7,T8,T9) {
|
||||
fn pop_back(self) -> (Self::Last,Self::Init) {
|
||||
(self.9,(self.0,self.1,self.2,self.3,self.4,self.5,self.6,self.7,self.8))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10> hlist::PopBack
|
||||
for (T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10) {
|
||||
fn pop_back(self) -> (Self::Last,Self::Init) {
|
||||
(self.10,(self.0,self.1,self.2,self.3,self.4,self.5,self.6,self.7,self.8,self.9))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11> hlist::PopBack
|
||||
for (T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11) {
|
||||
fn pop_back(self) -> (Self::Last,Self::Init) {
|
||||
(self.11,(self.0,self.1,self.2,self.3,self.4,self.5,self.6,self.7,self.8,self.9,self.10))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// =================================
|
||||
// === Conversion Tuple -> HList ===
|
||||
// =================================
|
||||
|
||||
impl From<()>
|
||||
for hlist::ty![] {
|
||||
#[inline(always)]
|
||||
fn from(_:()) -> Self {
|
||||
hlist::new![]
|
||||
}
|
||||
}
|
||||
|
||||
impl<T0> From<(T0,)>
|
||||
for hlist::ty![T0] {
|
||||
#[inline(always)]
|
||||
fn from(t:(T0,)) -> Self {
|
||||
hlist::new![t.0]
|
||||
}
|
||||
}
|
||||
|
||||
impl<T0,T1> From<(T0,T1,)>
|
||||
for hlist::ty![T0,T1] {
|
||||
#[inline(always)]
|
||||
fn from(t:(T0,T1,)) -> Self {
|
||||
hlist::new![t.0,t.1]
|
||||
}
|
||||
}
|
||||
|
||||
impl<T0,T1,T2> From<(T0,T1,T2,)>
|
||||
for hlist::ty![T0,T1,T2] {
|
||||
#[inline(always)]
|
||||
fn from(t:(T0,T1,T2,)) -> Self {
|
||||
hlist::new![t.0,t.1,t.2]
|
||||
}
|
||||
}
|
||||
|
||||
impl<T0,T1,T2,T3> From<(T0,T1,T2,T3,)>
|
||||
for hlist::ty![T0,T1,T2,T3] {
|
||||
#[inline(always)]
|
||||
fn from(t:(T0,T1,T2,T3,)) -> Self {
|
||||
hlist::new![t.0,t.1,t.2,t.3]
|
||||
}
|
||||
}
|
||||
|
||||
impl<T0,T1,T2,T3,T4> From<(T0,T1,T2,T3,T4,)>
|
||||
for hlist::ty![T0,T1,T2,T3,T4] {
|
||||
#[inline(always)]
|
||||
fn from(t:(T0,T1,T2,T3,T4,)) -> Self {
|
||||
hlist::new![t.0,t.1,t.2,t.3,t.4]
|
||||
}
|
||||
}
|
||||
|
||||
impl<T0,T1,T2,T3,T4,T5> From<(T0,T1,T2,T3,T4,T5,)>
|
||||
for hlist::ty![T0,T1,T2,T3,T4,T5] {
|
||||
#[inline(always)]
|
||||
fn from(t:(T0,T1,T2,T3,T4,T5,)) -> Self {
|
||||
hlist::new![t.0,t.1,t.2,t.3,t.4,t.5]
|
||||
}
|
||||
}
|
||||
|
||||
impl<T0,T1,T2,T3,T4,T5,T6> From<(T0,T1,T2,T3,T4,T5,T6,)>
|
||||
for hlist::ty![T0,T1,T2,T3,T4,T5,T6] {
|
||||
#[inline(always)]
|
||||
fn from(t:(T0,T1,T2,T3,T4,T5,T6,)) -> Self {
|
||||
hlist::new![t.0,t.1,t.2,t.3,t.4,t.5,t.6]
|
||||
}
|
||||
}
|
||||
|
||||
impl<T0,T1,T2,T3,T4,T5,T6,T7> From<(T0,T1,T2,T3,T4,T5,T6,T7,)>
|
||||
for hlist::ty![T0,T1,T2,T3,T4,T5,T6,T7] {
|
||||
#[inline(always)]
|
||||
fn from(t:(T0,T1,T2,T3,T4,T5,T6,T7,)) -> Self {
|
||||
hlist::new![t.0,t.1,t.2,t.3,t.4,t.5,t.6,t.7]
|
||||
}
|
||||
}
|
||||
|
||||
impl<T0,T1,T2,T3,T4,T5,T6,T7,T8> From<(T0,T1,T2,T3,T4,T5,T6,T7,T8,)>
|
||||
for hlist::ty![T0,T1,T2,T3,T4,T5,T6,T7,T8] {
|
||||
#[inline(always)]
|
||||
fn from(t:(T0,T1,T2,T3,T4,T5,T6,T7,T8,)) -> Self {
|
||||
hlist::new![t.0,t.1,t.2,t.3,t.4,t.5,t.6,t.7,t.8]
|
||||
}
|
||||
}
|
||||
|
||||
impl<T0,T1,T2,T3,T4,T5,T6,T7,T8,T9> From<(T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,)>
|
||||
for hlist::ty![T0,T1,T2,T3,T4,T5,T6,T7,T8,T9] {
|
||||
#[inline(always)]
|
||||
fn from(t:(T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,)) -> Self {
|
||||
hlist::new![t.0,t.1,t.2,t.3,t.4,t.5,t.6,t.7,t.8,t.9]
|
||||
}
|
||||
}
|
||||
|
||||
impl<T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10> From<(T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,)>
|
||||
for hlist::ty![T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10] {
|
||||
#[inline(always)]
|
||||
fn from(t:(T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,)) -> Self {
|
||||
hlist::new![t.0,t.1,t.2,t.3,t.4,t.5,t.6,t.7,t.8,t.9,t.10]
|
||||
}
|
||||
}
|
||||
|
||||
impl<T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11> From<(T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11,)>
|
||||
for hlist::ty![T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11] {
|
||||
#[inline(always)]
|
||||
fn from(t:(T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11,)) -> Self {
|
||||
hlist::new![t.0,t.1,t.2,t.3,t.4,t.5,t.6,t.7,t.8,t.9,t.10,t.11]
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
// =================================
|
||||
// === Conversion HList -> Tuple ===
|
||||
// =================================
|
||||
|
||||
impl Into<()>
|
||||
for hlist::ty![] {
|
||||
#[inline(always)]
|
||||
fn into(self) {}
|
||||
}
|
||||
|
||||
impl<T0> Into<(T0,)>
|
||||
for hlist::ty![T0] {
|
||||
#[inline(always)]
|
||||
fn into(self) -> (T0,) {
|
||||
let hlist::pat![t0] = self;
|
||||
(t0,)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T0,T1> Into<(T0,T1)>
|
||||
for hlist::ty![T0,T1] {
|
||||
#[inline(always)]
|
||||
fn into(self) -> (T0,T1) {
|
||||
let hlist::pat![t0,t1] = self;
|
||||
(t0,t1)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T0,T1,T2> Into<(T0,T1,T2)>
|
||||
for hlist::ty![T0,T1,T2] {
|
||||
#[inline(always)]
|
||||
fn into(self) -> (T0,T1,T2) {
|
||||
let hlist::pat![t0,t1,t2] = self;
|
||||
(t0,t1,t2)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T0,T1,T2,T3> Into<(T0,T1,T2,T3)>
|
||||
for hlist::ty![T0,T1,T2,T3] {
|
||||
#[inline(always)]
|
||||
fn into(self) -> (T0,T1,T2,T3) {
|
||||
let hlist::pat![t0,t1,t2,t3] = self;
|
||||
(t0,t1,t2,t3)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T0,T1,T2,T3,T4> Into<(T0,T1,T2,T3,T4)>
|
||||
for hlist::ty![T0,T1,T2,T3,T4] {
|
||||
#[inline(always)]
|
||||
fn into(self) -> (T0,T1,T2,T3,T4) {
|
||||
let hlist::pat![t0,t1,t2,t3,t4] = self;
|
||||
(t0,t1,t2,t3,t4)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T0,T1,T2,T3,T4,T5> Into<(T0,T1,T2,T3,T4,T5)>
|
||||
for hlist::ty![T0,T1,T2,T3,T4,T5] {
|
||||
#[inline(always)]
|
||||
fn into(self) -> (T0,T1,T2,T3,T4,T5) {
|
||||
let hlist::pat![t0,t1,t2,t3,t4,t5] = self;
|
||||
(t0,t1,t2,t3,t4,t5)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T0,T1,T2,T3,T4,T5,T6> Into<(T0,T1,T2,T3,T4,T5,T6)>
|
||||
for hlist::ty![T0,T1,T2,T3,T4,T5,T6] {
|
||||
#[inline(always)]
|
||||
fn into(self) -> (T0,T1,T2,T3,T4,T5,T6) {
|
||||
let hlist::pat![t0,t1,t2,t3,t4,t5,t6] = self;
|
||||
(t0,t1,t2,t3,t4,t5,t6)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T0,T1,T2,T3,T4,T5,T6,T7> Into<(T0,T1,T2,T3,T4,T5,T6,T7)>
|
||||
for hlist::ty![T0,T1,T2,T3,T4,T5,T6,T7] {
|
||||
#[inline(always)]
|
||||
fn into(self) -> (T0,T1,T2,T3,T4,T5,T6,T7) {
|
||||
let hlist::pat![t0,t1,t2,t3,t4,t5,t6,t7] = self;
|
||||
(t0,t1,t2,t3,t4,t5,t6,t7)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T0,T1,T2,T3,T4,T5,T6,T7,T8> Into<(T0,T1,T2,T3,T4,T5,T6,T7,T8)>
|
||||
for hlist::ty![T0,T1,T2,T3,T4,T5,T6,T7,T8] {
|
||||
#[inline(always)]
|
||||
fn into(self) -> (T0,T1,T2,T3,T4,T5,T6,T7,T8) {
|
||||
let hlist::pat![t0,t1,t2,t3,t4,t5,t6,t7,t8] = self;
|
||||
(t0,t1,t2,t3,t4,t5,t6,t7,t8)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T0,T1,T2,T3,T4,T5,T6,T7,T8,T9> Into<(T0,T1,T2,T3,T4,T5,T6,T7,T8,T9)>
|
||||
for hlist::ty![T0,T1,T2,T3,T4,T5,T6,T7,T8,T9] {
|
||||
#[inline(always)]
|
||||
fn into(self) -> (T0,T1,T2,T3,T4,T5,T6,T7,T8,T9) {
|
||||
let hlist::pat![t0,t1,t2,t3,t4,t5,t6,t7,t8,t9] = self;
|
||||
(t0,t1,t2,t3,t4,t5,t6,t7,t8,t9)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10> Into<(T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10)>
|
||||
for hlist::ty![T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10] {
|
||||
#[inline(always)]
|
||||
fn into(self) -> (T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10) {
|
||||
let hlist::pat![t0,t1,t2,t3,t4,t5,t6,t7,t8,t9,t10] = self;
|
||||
(t0,t1,t2,t3,t4,t5,t6,t7,t8,t9,t10)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11> Into<(T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11)>
|
||||
for hlist::ty![T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11] {
|
||||
#[inline(always)]
|
||||
fn into(self) -> (T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11) {
|
||||
let hlist::pat![t0,t1,t2,t3,t4,t5,t6,t7,t8,t9,t10,t11] = self;
|
||||
(t0,t1,t2,t3,t4,t5,t6,t7,t8,t9,t10,t11)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ==============================
|
||||
// === HasTupleRepr for HList ===
|
||||
// ==============================
|
||||
|
||||
impl HasTupleRepr
|
||||
for hlist::ty![] {
|
||||
type TupleRepr = ();
|
||||
}
|
||||
|
||||
impl<T1> HasTupleRepr
|
||||
for hlist::ty![T1] {
|
||||
type TupleRepr = (T1,);
|
||||
}
|
||||
|
||||
impl<T1,T2> HasTupleRepr
|
||||
for hlist::ty![T1,T2] {
|
||||
type TupleRepr = (T1,T2);
|
||||
}
|
||||
|
||||
impl<T1,T2,T3> HasTupleRepr
|
||||
for hlist::ty![T1,T2,T3] {
|
||||
type TupleRepr = (T1,T2,T3);
|
||||
}
|
||||
|
||||
impl<T1,T2,T3,T4> HasTupleRepr
|
||||
for hlist::ty![T1,T2,T3,T4] {
|
||||
type TupleRepr = (T1,T2,T3,T4);
|
||||
}
|
||||
|
||||
impl<T1,T2,T3,T4,T5> HasTupleRepr
|
||||
for hlist::ty![T1,T2,T3,T4,T5] {
|
||||
type TupleRepr = (T1,T2,T3,T4,T5);
|
||||
}
|
||||
|
||||
impl<T1,T2,T3,T4,T5,T6> HasTupleRepr
|
||||
for hlist::ty![T1,T2,T3,T4,T5,T6] {
|
||||
type TupleRepr = (T1,T2,T3,T4,T5,T6);
|
||||
}
|
||||
|
||||
impl<T1,T2,T3,T4,T5,T6,T7> HasTupleRepr
|
||||
for hlist::ty![T1,T2,T3,T4,T5,T6,T7] {
|
||||
type TupleRepr = (T1,T2,T3,T4,T5,T6,T7);
|
||||
}
|
||||
|
||||
impl<T1,T2,T3,T4,T5,T6,T7,T8> HasTupleRepr
|
||||
for hlist::ty![T1,T2,T3,T4,T5,T6,T7,T8] {
|
||||
type TupleRepr = (T1,T2,T3,T4,T5,T6,T7,T8);
|
||||
}
|
||||
|
||||
impl<T1,T2,T3,T4,T5,T6,T7,T8,T9> HasTupleRepr
|
||||
for hlist::ty![T1,T2,T3,T4,T5,T6,T7,T8,T9] {
|
||||
type TupleRepr = (T1,T2,T3,T4,T5,T6,T7,T8,T9);
|
||||
}
|
||||
|
||||
impl<T1,T2,T3,T4,T5,T6,T7,T8,T9,T10> HasTupleRepr
|
||||
for hlist::ty![T1,T2,T3,T4,T5,T6,T7,T8,T9,T10] {
|
||||
type TupleRepr = (T1,T2,T3,T4,T5,T6,T7,T8,T9,T10);
|
||||
}
|
||||
|
||||
impl<T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11> HasTupleRepr
|
||||
for hlist::ty![T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11] {
|
||||
type TupleRepr = (T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11);
|
||||
}
|
||||
|
||||
|
||||
|
||||
// =================
|
||||
// === HasItemAt ===
|
||||
// =================
|
||||
|
||||
macro_rules! gen_has_item_at {
|
||||
($at:ident $p:tt) => {};
|
||||
($at:ident [$($p:ident),*] $t:ident $(,$($ts:ident),*)?) => {
|
||||
impl<$($p,)* X $(,$($ts),*)?> $crate::HasItemAt<$at> for ($($p,)*X,$($($ts,)*)?) {
|
||||
type Item = X;
|
||||
}
|
||||
gen_has_item_at! { $at [$($p),*] $($($ts),*)? }
|
||||
}
|
||||
}
|
||||
|
||||
gen_has_item_at!{U0 [] T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11}
|
||||
gen_has_item_at!{U1 [T0] T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11}
|
||||
gen_has_item_at!{U2 [T0,T1] T2,T3,T4,T5,T6,T7,T8,T9,T10,T11}
|
||||
gen_has_item_at!{U3 [T0,T1,T2] T3,T4,T5,T6,T7,T8,T9,T10,T11}
|
||||
gen_has_item_at!{U4 [T0,T1,T2,T3] T4,T5,T6,T7,T8,T9,T10,T11}
|
||||
gen_has_item_at!{U5 [T0,T1,T2,T3,T4] T5,T6,T7,T8,T9,T10,T11}
|
||||
gen_has_item_at!{U6 [T0,T1,T2,T3,T4,T5] T6,T7,T8,T9,T10,T11}
|
||||
gen_has_item_at!{U7 [T0,T1,T2,T3,T4,T5,T6] T7,T8,T9,T10,T11}
|
||||
gen_has_item_at!{U8 [T0,T1,T2,T3,T4,T5,T6,T7] T8,T9,T10,T11}
|
||||
gen_has_item_at!{U9 [T0,T1,T2,T3,T4,T5,T6,T7,T8] T9,T10,T11}
|
||||
gen_has_item_at!{U10 [T0,T1,T2,T3,T4,T5,T6,T7,T8,T9] T10,T11}
|
||||
gen_has_item_at!{U11 [T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10] T11}
|
||||
|
||||
|
||||
|
||||
// =================
|
||||
// === GetItemAt ===
|
||||
// =================
|
||||
|
||||
macro_rules! gen_get_item_at {
|
||||
($at:ident $num:tt $p:tt) => {};
|
||||
($at:ident $num:tt [$($p:ident),*] $t:ident $(,$($ts:ident),*)?) => {
|
||||
impl<$($p,)* X $(,$($ts),*)?> $crate::GetItemAt<$at> for ($($p,)*X,$($($ts,)*)?) {
|
||||
fn get_item_at(&self) -> &X { &self.$num }
|
||||
}
|
||||
gen_get_item_at! { $at $num [$($p),*] $($($ts),*)? }
|
||||
}
|
||||
}
|
||||
|
||||
gen_get_item_at!{U0 0 [] T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11}
|
||||
gen_get_item_at!{U1 1 [T0] T1,T2,T3,T4,T5,T6,T7,T8,T9,T10,T11}
|
||||
gen_get_item_at!{U2 2 [T0,T1] T2,T3,T4,T5,T6,T7,T8,T9,T10,T11}
|
||||
gen_get_item_at!{U3 3 [T0,T1,T2] T3,T4,T5,T6,T7,T8,T9,T10,T11}
|
||||
gen_get_item_at!{U4 4 [T0,T1,T2,T3] T4,T5,T6,T7,T8,T9,T10,T11}
|
||||
gen_get_item_at!{U5 5 [T0,T1,T2,T3,T4] T5,T6,T7,T8,T9,T10,T11}
|
||||
gen_get_item_at!{U6 6 [T0,T1,T2,T3,T4,T5] T6,T7,T8,T9,T10,T11}
|
||||
gen_get_item_at!{U7 7 [T0,T1,T2,T3,T4,T5,T6] T7,T8,T9,T10,T11}
|
||||
gen_get_item_at!{U8 8 [T0,T1,T2,T3,T4,T5,T6,T7] T8,T9,T10,T11}
|
||||
gen_get_item_at!{U9 9 [T0,T1,T2,T3,T4,T5,T6,T7,T8] T9,T10,T11}
|
||||
gen_get_item_at!{U10 10 [T0,T1,T2,T3,T4,T5,T6,T7,T8,T9] T10,T11}
|
||||
gen_get_item_at!{U11 11 [T0,T1,T2,T3,T4,T5,T6,T7,T8,T9,T10] T11}
|
@ -1,26 +0,0 @@
|
||||
[package]
|
||||
name = "enso-logger"
|
||||
version = "0.1.0"
|
||||
authors = ["Enso Team <enso-dev@enso.org>"]
|
||||
edition = "2018"
|
||||
|
||||
description = "A generic logging library."
|
||||
readme = "README.md"
|
||||
homepage = "https://github.com/enso-org/enso/lib/rust/enso-logger"
|
||||
repository = "https://github.com/enso-org/enso"
|
||||
license-file = "../../../LICENSE"
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib", "rlib"]
|
||||
|
||||
[features]
|
||||
default = []
|
||||
|
||||
[dependencies]
|
||||
enso-prelude = { version = "0.1.0" , path = "../enso-prelude" }
|
||||
enso-shapely = { version = "0.1.0" , path = "../enso-shapely/impl" }
|
||||
wasm-bindgen = { version = "=0.2.58", features = ["nightly"] }
|
||||
|
||||
[dependencies.web-sys]
|
||||
version = "0.3.4"
|
||||
features = ['console']
|
@ -1,3 +0,0 @@
|
||||
# Enso Logger
|
||||
|
||||
A generic logging library.
|
@ -1,35 +0,0 @@
|
||||
//! Contains definition of trivial logger that discards all messages except warnings and errors.
|
||||
|
||||
use enso_prelude::*;
|
||||
|
||||
use crate::Message;
|
||||
use crate::AnyLogger;
|
||||
use crate::enabled;
|
||||
|
||||
use enso_shapely::CloneRef;
|
||||
use std::fmt::Debug;
|
||||
|
||||
|
||||
|
||||
// ==============
|
||||
// === Logger ===
|
||||
// ==============
|
||||
|
||||
/// Trivial logger that discards all messages except warnings and errors.
|
||||
#[derive(Clone,CloneRef,Debug,Default)]
|
||||
pub struct Logger {
|
||||
enabled : enabled::Logger,
|
||||
}
|
||||
|
||||
|
||||
// === Impls ===
|
||||
|
||||
impls!{ From + &From <enabled::Logger> for Logger { |logger| Self::new(logger.path()) }}
|
||||
|
||||
impl AnyLogger for Logger {
|
||||
type Owned = Self;
|
||||
fn new (path:impl Into<ImString>) -> Self { Self {enabled : enabled::Logger::new(path) } }
|
||||
fn path (&self) -> &str { self.enabled.path() }
|
||||
fn warning (&self, msg:impl Message) { self.enabled.warning (msg) }
|
||||
fn error (&self, msg:impl Message) { self.enabled.error (msg) }
|
||||
}
|
@ -1,97 +0,0 @@
|
||||
//! Contains implementation of default logger.
|
||||
|
||||
use enso_prelude::*;
|
||||
|
||||
use crate::AnyLogger;
|
||||
use crate::Message;
|
||||
|
||||
use enso_shapely::CloneRef;
|
||||
use std::fmt::Debug;
|
||||
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
use web_sys::console;
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
use wasm_bindgen::JsValue;
|
||||
|
||||
|
||||
|
||||
// ==============
|
||||
// === Logger ===
|
||||
// ==============
|
||||
|
||||
/// Default Logger implementation.
|
||||
#[derive(Clone,CloneRef,Debug,Default)]
|
||||
pub struct Logger {
|
||||
/// Path that is used as an unique identifier of this logger.
|
||||
path : ImString,
|
||||
#[cfg(not(target_arch="wasm32"))]
|
||||
indent : Rc<Cell<usize>>,
|
||||
}
|
||||
|
||||
#[cfg(not(target_arch="wasm32"))]
|
||||
impl Logger {
|
||||
fn format(&self, msg:impl Message) -> String {
|
||||
let indent = " ".repeat(4*self.indent.get());
|
||||
msg.with(|s|iformat!("{indent}[{self.path}] {s}"))
|
||||
}
|
||||
|
||||
fn inc_indent(&self) {
|
||||
self.indent.update(|t|t+1);
|
||||
}
|
||||
|
||||
fn dec_indent(&self) {
|
||||
self.indent.update(|t|t.saturating_sub(1));
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_arch="wasm32")]
|
||||
impl Logger {
|
||||
fn format(&self, msg:impl Message) -> JsValue {
|
||||
msg.with(|s|iformat!("[{self.path}] {s}")).into()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(target_arch="wasm32"))]
|
||||
impl AnyLogger for Logger {
|
||||
type Owned = Self;
|
||||
fn new(path:impl Into<ImString>) -> Self {
|
||||
let path = path.into();
|
||||
let indent = default();
|
||||
Self {path,indent}
|
||||
}
|
||||
|
||||
fn path (&self) -> &str { &self.path }
|
||||
fn trace (&self, msg:impl Message) { println!("{}",self.format(msg)) }
|
||||
fn debug (&self, msg:impl Message) { println!("{}",self.format(msg)) }
|
||||
fn info (&self, msg:impl Message) { println!("{}",self.format(msg)) }
|
||||
fn warning (&self, msg:impl Message) { println!("[WARNING] {}",self.format(msg)) }
|
||||
fn error (&self, msg:impl Message) { println!("[ERROR] {}",self.format(msg)) }
|
||||
fn group_begin (&self, msg:impl Message) { println!("{}",self.format(msg)); self.inc_indent() }
|
||||
fn group_end (&self) { self.dec_indent() }
|
||||
}
|
||||
|
||||
|
||||
#[cfg(target_arch="wasm32")]
|
||||
impl AnyLogger for Logger {
|
||||
type Owned = Self;
|
||||
fn new(path:impl Into<ImString>) -> Self {
|
||||
let path = path.into();
|
||||
Self {path}
|
||||
}
|
||||
fn path (&self) -> &str { &self.path }
|
||||
fn trace (&self, msg:impl Message) { console::trace_1 (&self.format(msg)) }
|
||||
fn debug (&self, msg:impl Message) { console::debug_1 (&self.format(msg)) }
|
||||
fn info (&self, msg:impl Message) { console::info_1 (&self.format(msg)) }
|
||||
fn warning (&self, msg:impl Message) { console::warn_1 (&self.format(msg)) }
|
||||
fn error (&self, msg:impl Message) { console::error_1 (&self.format(msg)) }
|
||||
fn group_begin (&self, msg:impl Message) { console::group_1 (&self.format(msg)) }
|
||||
fn group_end (&self) { console::group_end() }
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ===================
|
||||
// === Conversions ===
|
||||
// ===================
|
||||
|
||||
impls!{ From + &From <crate::disabled::Logger> for Logger { |logger| Self::new(logger.path()) }}
|
@ -1,235 +0,0 @@
|
||||
//! This crate contains implementation of logging interface.
|
||||
|
||||
#![feature(cell_update)]
|
||||
|
||||
#![deny(unconditional_recursion)]
|
||||
#![warn(missing_copy_implementations)]
|
||||
#![warn(missing_debug_implementations)]
|
||||
#![warn(missing_docs)]
|
||||
#![warn(trivial_casts)]
|
||||
#![warn(trivial_numeric_casts)]
|
||||
#![warn(unsafe_code)]
|
||||
#![warn(unused_import_braces)]
|
||||
|
||||
pub mod disabled;
|
||||
pub mod enabled;
|
||||
|
||||
use enso_prelude::*;
|
||||
|
||||
|
||||
|
||||
// ==============
|
||||
// === Message ===
|
||||
// ==============
|
||||
|
||||
/// Message that can be logged.
|
||||
pub trait Message {
|
||||
/// Turns message into `&str` and passes it to input function.
|
||||
fn with<T,F:FnOnce(&str)->T>(&self, f:F) -> T;
|
||||
}
|
||||
|
||||
impl Message for &str {
|
||||
fn with<T,F:FnOnce(&str)->T>(&self, f:F) -> T {
|
||||
f(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<G:Fn()->S, S:AsRef<str>> Message for G {
|
||||
fn with<T,F:FnOnce(&str)->T>(&self, f:F) -> T {
|
||||
f(self().as_ref())
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// =================
|
||||
// === AnyLogger ===
|
||||
// =================
|
||||
|
||||
/// Interface common to all loggers.
|
||||
pub trait AnyLogger {
|
||||
/// Owned type of the logger.
|
||||
type Owned;
|
||||
|
||||
/// Creates a new logger. Path should be a unique identifier for this logger.
|
||||
fn new(path:impl Into<ImString>) -> Self::Owned;
|
||||
|
||||
/// Path that is used as an unique identifier of this logger.
|
||||
fn path(&self) -> &str;
|
||||
|
||||
/// Creates a new logger with this logger as a parent.
|
||||
fn sub(logger:impl AnyLogger, path:impl Into<ImString>) -> Self::Owned {
|
||||
let path = path.into();
|
||||
let super_path = logger.path();
|
||||
if super_path.is_empty() { Self::new(path) }
|
||||
else { Self::new(iformat!("{super_path}.{path}")) }
|
||||
}
|
||||
|
||||
/// Creates a logger from AnyLogger.
|
||||
fn from_logger(logger:impl AnyLogger) -> Self::Owned {
|
||||
Self::new(logger.path())
|
||||
}
|
||||
|
||||
/// Evaluates function `f` and visually groups all logs will occur during its execution.
|
||||
fn group<T,F:FnOnce() -> T>(&self, msg:impl Message, f:F) -> T {
|
||||
self.group_begin(msg);
|
||||
let out = f();
|
||||
self.group_end();
|
||||
out
|
||||
}
|
||||
|
||||
/// Log with stacktrace and info level verbosity.
|
||||
fn trace(&self, _msg:impl Message) {}
|
||||
|
||||
/// Log with debug level verbosity
|
||||
fn debug(&self, _msg:impl Message) {}
|
||||
|
||||
/// Log with info level verbosity.
|
||||
fn info(&self, _msg:impl Message) {}
|
||||
|
||||
/// Log with warning level verbosity.
|
||||
fn warning(&self, _msg:impl Message) {}
|
||||
|
||||
/// Log with error level verbosity.
|
||||
fn error(&self, _msg:impl Message) {}
|
||||
|
||||
/// Visually groups all logs between group_begin and group_end.
|
||||
fn group_begin(&self, _msg:impl Message) {}
|
||||
|
||||
/// Visually groups all logs between group_begin and group_end.
|
||||
fn group_end(&self) {}
|
||||
}
|
||||
|
||||
impl<T:AnyLogger> AnyLogger for &T {
|
||||
type Owned = T::Owned;
|
||||
fn new (path:impl Into<ImString>) -> Self::Owned { T::new(path) }
|
||||
fn path (&self) -> &str { T::path(self) }
|
||||
fn trace (&self, msg:impl Message) { T::trace (self,msg) }
|
||||
fn debug (&self, msg:impl Message) { T::debug (self,msg) }
|
||||
fn info (&self, msg:impl Message) { T::info (self,msg) }
|
||||
fn warning (&self, msg:impl Message) { T::warning (self,msg) }
|
||||
fn error (&self, msg:impl Message) { T::error (self,msg) }
|
||||
fn group_begin (&self, msg:impl Message) { T::group_begin (self,msg) }
|
||||
fn group_end (&self) { T::group_end (self) }
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ==============
|
||||
// === Macros ===
|
||||
// ==============
|
||||
|
||||
/// Shortcut for `|| format!(..)`.
|
||||
#[macro_export]
|
||||
macro_rules! fmt {
|
||||
($($arg:tt)*) => (||(format!($($arg)*)))
|
||||
}
|
||||
|
||||
/// Evaluates expression and visually groups all logs will occur during its execution.
|
||||
#[macro_export]
|
||||
macro_rules! group {
|
||||
($logger:expr, $message:tt, {$($body:tt)*}) => {{
|
||||
let __logger = $logger.clone();
|
||||
__logger.group_begin(|| iformat!{$message});
|
||||
let out = {$($body)*};
|
||||
__logger.group_end();
|
||||
out
|
||||
}};
|
||||
}
|
||||
|
||||
/// Logs a message on on given level.
|
||||
#[macro_export]
|
||||
macro_rules! log_template {
|
||||
($method:ident $logger:expr, $message:tt $($rest:tt)*) => {
|
||||
$crate::log_template_impl! {$method $logger, iformat!($message) $($rest)*}
|
||||
};
|
||||
}
|
||||
|
||||
/// Logs a message on on given level.
|
||||
#[macro_export]
|
||||
macro_rules! log_template_impl {
|
||||
($method:ident $logger:expr, $expr:expr) => {{
|
||||
$logger.$method(|| $expr);
|
||||
}};
|
||||
($method:ident $logger:expr, $expr:expr, $body:tt) => {{
|
||||
let __logger = $logger.clone();
|
||||
__logger.group_begin(|| $expr);
|
||||
let out = $body;
|
||||
__logger.group_end();
|
||||
out
|
||||
}};
|
||||
}
|
||||
|
||||
/// Logs an internal error with descriptive message.
|
||||
#[macro_export]
|
||||
macro_rules! with_internal_bug_message { ($f:ident $($args:tt)*) => { $crate::$f! {
|
||||
"This is a bug. Please report it and and provide us with as much information as \
|
||||
possible at https://github.com/luna/enso/issues. Thank you!"
|
||||
$($args)*
|
||||
}};}
|
||||
|
||||
/// Logs an internal error.
|
||||
#[macro_export]
|
||||
macro_rules! log_internal_bug_template {
|
||||
($($toks:tt)*) => {
|
||||
$crate::with_internal_bug_message! { log_internal_bug_template_impl $($toks)* }
|
||||
};
|
||||
}
|
||||
|
||||
/// Logs an internal error.
|
||||
#[macro_export]
|
||||
macro_rules! log_internal_bug_template_impl {
|
||||
($note:tt $method:ident $logger:expr, $message:tt $($rest:tt)*) => {
|
||||
$crate::log_template_impl! {$method $logger,
|
||||
format!("Internal Error. {}\n\n{}",iformat!($message),$note) $($rest)*
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/// Log with stacktrace and level:info.
|
||||
#[macro_export]
|
||||
macro_rules! trace {
|
||||
($($toks:tt)*) => {
|
||||
$crate::log_template! {trace $($toks)*}
|
||||
};
|
||||
}
|
||||
|
||||
/// Log with level:debug
|
||||
#[macro_export]
|
||||
macro_rules! debug {
|
||||
($($toks:tt)*) => {
|
||||
$crate::log_template! {debug $($toks)*}
|
||||
};
|
||||
}
|
||||
|
||||
/// Log with level:info.
|
||||
#[macro_export]
|
||||
macro_rules! info {
|
||||
($($toks:tt)*) => {
|
||||
$crate::log_template! {info $($toks)*}
|
||||
};
|
||||
}
|
||||
|
||||
/// Log with level:warning.
|
||||
#[macro_export]
|
||||
macro_rules! warning {
|
||||
($($toks:tt)*) => {
|
||||
$crate::log_template! {warning $($toks)*}
|
||||
};
|
||||
}
|
||||
|
||||
/// Log with level:error.
|
||||
#[macro_export]
|
||||
macro_rules! error {
|
||||
($($toks:tt)*) => {
|
||||
$crate::log_template! {error $($toks)*}
|
||||
};
|
||||
}
|
||||
|
||||
/// Logs an internal warning.
|
||||
#[macro_export]
|
||||
macro_rules! internal_warning {
|
||||
($($toks:tt)*) => {
|
||||
$crate::log_internal_bug_template! {warning $($toks)*}
|
||||
};
|
||||
}
|
@ -1,34 +0,0 @@
|
||||
[package]
|
||||
name = "enso-macro-utils"
|
||||
version = "0.1.0"
|
||||
authors = ["Enso Team <enso-dev@enso.org>"]
|
||||
edition = "2018"
|
||||
|
||||
description = "Utilities for writing macros."
|
||||
readme = "README.md"
|
||||
homepage = "https://github.com/enso-org/enso/lib/rust/enso-macro-utils"
|
||||
repository = "https://github.com/enso-org/enso"
|
||||
license-file = "../../../LICENSE"
|
||||
|
||||
keywords = ["macro", "utility"]
|
||||
categories = ["development-tools::procedural-macro-helpers"]
|
||||
|
||||
publish = true
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib", "rlib"]
|
||||
|
||||
[dependencies]
|
||||
proc-macro2 = "1.0"
|
||||
quote = "1.0"
|
||||
|
||||
[dependencies.syn]
|
||||
version = "1.0"
|
||||
features = [
|
||||
'extra-traits',
|
||||
'full', # for syn::ItemStruct
|
||||
'visit'
|
||||
]
|
||||
|
||||
[dev-dependencies]
|
||||
wasm-bindgen-test = "0.2"
|
@ -1,3 +0,0 @@
|
||||
# Macro Utils
|
||||
|
||||
This crate provides some utilities useful for writing macros.
|
@ -1,358 +0,0 @@
|
||||
//! A number of helper functions meant to be used in the procedural enso-shapely-macros
|
||||
//! definitions.
|
||||
|
||||
#![warn(missing_docs)]
|
||||
#![feature(trait_alias)]
|
||||
|
||||
use proc_macro2::TokenStream;
|
||||
use proc_macro2::TokenTree;
|
||||
use quote::quote;
|
||||
use std::iter::FromIterator;
|
||||
use syn::visit::Visit;
|
||||
use syn::WhereClause;
|
||||
use syn::WherePredicate;
|
||||
use syn;
|
||||
|
||||
|
||||
|
||||
// =====================
|
||||
// === Trait Aliases ===
|
||||
// =====================
|
||||
|
||||
pub trait Str = Into<String> + AsRef<str>;
|
||||
|
||||
|
||||
|
||||
// ==========================
|
||||
// === Token Stream Utils ===
|
||||
// ==========================
|
||||
|
||||
/// Maps all the tokens in the stream using a given function.
|
||||
pub fn map_tokens<F:Fn(TokenTree) -> TokenTree>
|
||||
(input:TokenStream, f:F) -> TokenStream {
|
||||
let ret_iter = input.into_iter().map(f);
|
||||
TokenStream::from_iter(ret_iter)
|
||||
}
|
||||
|
||||
/// Rewrites stream replacing each token with a sequence of tokens returned by
|
||||
/// the given function. The groups (e.g. token tree within braces) are unpacked,
|
||||
/// rewritten and repacked into groups -- the function is applied recursively.
|
||||
pub fn rewrite_stream
|
||||
<F:Fn(TokenTree) -> TokenStream + Copy>
|
||||
(input:TokenStream, f:F) -> TokenStream {
|
||||
let mut ret = TokenStream::new();
|
||||
for token in input.into_iter() {
|
||||
match token {
|
||||
proc_macro2::TokenTree::Group(group) => {
|
||||
let delim = group.delimiter();
|
||||
let span = group.span();
|
||||
let rewritten = rewrite_stream(group.stream(), f);
|
||||
let mut new_group = proc_macro2::Group::new(delim,rewritten);
|
||||
new_group.set_span(span);
|
||||
let new_group = vec![TokenTree::from(new_group)];
|
||||
ret.extend(new_group.into_iter())
|
||||
}
|
||||
_ => ret.extend(f(token)),
|
||||
}
|
||||
}
|
||||
ret
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ===================
|
||||
// === Token Utils ===
|
||||
// ===================
|
||||
|
||||
/// Is the given token an identifier matching to a given string?
|
||||
pub fn matching_ident(token:&TokenTree, name:&str) -> bool {
|
||||
match token {
|
||||
TokenTree::Ident(ident) => *ident == name,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ============
|
||||
// === Repr ===
|
||||
// ============
|
||||
|
||||
/// Obtains text representation of given `ToTokens`-compatible input.
|
||||
pub fn repr<T: quote::ToTokens>(t:&T) -> String {
|
||||
quote!(#t).to_string()
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ===================
|
||||
// === Field Utils ===
|
||||
// ===================
|
||||
|
||||
/// Collects all fields, named or not.
|
||||
pub fn fields_list(fields:&syn::Fields) -> Vec<&syn::Field> {
|
||||
match fields {
|
||||
syn::Fields::Named (ref f) => f.named .iter().collect(),
|
||||
syn::Fields::Unnamed(ref f) => f.unnamed.iter().collect(),
|
||||
syn::Fields::Unit => Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns token that refers to the field.
|
||||
///
|
||||
/// It is the field name for named field and field index for unnamed fields.
|
||||
pub fn field_ident_token(field:&syn::Field, index:syn::Index) -> TokenStream {
|
||||
match &field.ident {
|
||||
Some(ident) => quote!(#ident),
|
||||
None => quote!(#index),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns names of the named fields.
|
||||
pub fn field_names(fields:&syn::FieldsNamed) -> Vec<&syn::Ident> {
|
||||
fields.named.iter().map(|field| {
|
||||
field.ident.as_ref().expect("Impossible: no name on a named field.")
|
||||
}).collect()
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ==================
|
||||
// === Path Utils ===
|
||||
// ==================
|
||||
|
||||
/// Checks if a given `Path` consists of a single identifier same as given string.
|
||||
pub fn path_matching_ident(path:&syn::Path, str:impl Str) -> bool {
|
||||
path.get_ident().map_or(false, |ident| ident == str.as_ref())
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ======================
|
||||
// === Index Sequence ===
|
||||
// ======================
|
||||
|
||||
/// For given length, returns a sequence of Literals like `[0,1,2…]`. These are unsuffixed
|
||||
/// usize literals, so e.g. can be used to identify the tuple unnamed fields.
|
||||
pub fn index_sequence(len:usize) -> Vec<syn::Index> {
|
||||
(0..len).map(syn::Index::from).collect()
|
||||
}
|
||||
|
||||
/// For given length returns sequence of identifiers like `[field0,field1,…]`.
|
||||
pub fn identifier_sequence(len:usize) -> Vec<syn::Ident> {
|
||||
let format_field = |ix| quote::format_ident!("field{}",ix);
|
||||
(0..len).map(format_field).collect()
|
||||
}
|
||||
|
||||
|
||||
|
||||
// =======================
|
||||
// === Type Path Utils ===
|
||||
// =======================
|
||||
|
||||
/// Obtain list of generic arguments on the path's segment.
|
||||
pub fn path_segment_generic_args
|
||||
(segment:&syn::PathSegment) -> Vec<&syn::GenericArgument> {
|
||||
match segment.arguments {
|
||||
syn::PathArguments::AngleBracketed(ref args) =>
|
||||
args.args.iter().collect(),
|
||||
_ =>
|
||||
Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Obtain list of generic arguments on the path's last segment.
|
||||
///
|
||||
/// Empty, if path contains no segments.
|
||||
pub fn ty_path_generic_args
|
||||
(ty_path:&syn::TypePath) -> Vec<&syn::GenericArgument> {
|
||||
ty_path.path.segments.last().map_or(Vec::new(), path_segment_generic_args)
|
||||
}
|
||||
|
||||
/// Obtain list of type arguments on the path's last segment.
|
||||
pub fn ty_path_type_args
|
||||
(ty_path:&syn::TypePath) -> Vec<&syn::Type> {
|
||||
ty_path_generic_args(ty_path).iter().filter_map( |generic_arg| {
|
||||
match generic_arg {
|
||||
syn::GenericArgument::Type(t) => Some(t),
|
||||
_ => None,
|
||||
}
|
||||
}).collect()
|
||||
}
|
||||
|
||||
/// Last type argument of the last segment on the type path.
|
||||
pub fn last_type_arg(ty_path:&syn::TypePath) -> Option<&syn::GenericArgument> {
|
||||
ty_path_generic_args(ty_path).last().copied()
|
||||
}
|
||||
|
||||
|
||||
|
||||
// =====================
|
||||
// === Collect Types ===
|
||||
// =====================
|
||||
|
||||
/// Visitor that accumulates all visited `syn::TypePath`.
|
||||
#[derive(Default)]
|
||||
pub struct TypeGatherer<'ast> {
|
||||
/// Observed types accumulator.
|
||||
pub types: Vec<&'ast syn::TypePath>
|
||||
}
|
||||
|
||||
impl<'ast> Visit<'ast> for TypeGatherer<'ast> {
|
||||
fn visit_type_path(&mut self, node:&'ast syn::TypePath) {
|
||||
self.types.push(node);
|
||||
syn::visit::visit_type_path(self, node);
|
||||
}
|
||||
}
|
||||
|
||||
/// All `TypePath`s in the given's `Type` subtree.
|
||||
pub fn gather_all_types(node:&syn::Type) -> Vec<&syn::TypePath> {
|
||||
let mut type_gather = TypeGatherer::default();
|
||||
type_gather.visit_type(node);
|
||||
type_gather.types
|
||||
}
|
||||
|
||||
/// All text representations of `TypePath`s in the given's `Type` subtree.
|
||||
pub fn gather_all_type_reprs(node:&syn::Type) -> Vec<String> {
|
||||
gather_all_types(node).iter().map(|t| repr(t)).collect()
|
||||
}
|
||||
|
||||
|
||||
|
||||
// =======================
|
||||
// === Type Dependency ===
|
||||
// =======================
|
||||
|
||||
/// Naive type equality test by comparing its representation with a string.
|
||||
pub fn type_matches_repr(ty:&syn::Type, target_repr:&str) -> bool {
|
||||
repr(ty) == target_repr
|
||||
}
|
||||
|
||||
/// Naive type equality test by comparing their text representations.
|
||||
pub fn type_matches(ty:&syn::Type, target_param:&syn::GenericParam) -> bool {
|
||||
type_matches_repr(ty, &repr(target_param))
|
||||
}
|
||||
|
||||
/// Does type depends on the given type parameter.
|
||||
pub fn type_depends_on(ty:&syn::Type, target_param:&syn::GenericParam) -> bool {
|
||||
let target_param = repr(target_param);
|
||||
let relevant_types = gather_all_types(ty);
|
||||
relevant_types.iter().any(|ty| repr(ty) == target_param)
|
||||
}
|
||||
|
||||
/// Does enum variant depend on the given type parameter.
|
||||
pub fn variant_depends_on
|
||||
(var:&syn::Variant, target_param:&syn::GenericParam) -> bool {
|
||||
var.fields.iter().any(|field| type_depends_on(&field.ty, target_param))
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ===================
|
||||
// === WhereClause ===
|
||||
// ===================
|
||||
|
||||
/// Creates a new where clause from provided sequence of where predicates.
|
||||
pub fn new_where_clause(predicates:impl IntoIterator<Item=WherePredicate>) -> WhereClause {
|
||||
let predicates = syn::punctuated::Punctuated::from_iter(predicates);
|
||||
WhereClause {where_token:Default::default(),predicates}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// =============
|
||||
// === Tests ===
|
||||
// =============
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use proc_macro2::TokenStream;
|
||||
|
||||
fn parse<T:syn::parse::Parse>(code:&str) -> T {
|
||||
syn::parse_str(code).unwrap()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn repr_round_trips() {
|
||||
let program = "pub fn repr<T: quote::ToTokens>(t: &T) -> String {}";
|
||||
let tokens = parse::<TokenStream>(program);
|
||||
let quoted_program = repr(&tokens);
|
||||
let tokens2 = parse::<TokenStream>("ed_program);
|
||||
// check only second round-trip, first is allowed to break whitespace
|
||||
assert_eq!(repr(&tokens), repr(&tokens2));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn fields_list_test() {
|
||||
let tuple_like = "struct Unnamed(i32, String, T);";
|
||||
let proper_struct = "struct Named{i: i32, s: String, t: T}";
|
||||
let expected_types = vec!["i32", "String", "T"];
|
||||
|
||||
fn assert_field_types(program:&str, expected_types:&[&str]) {
|
||||
let tokens = parse::<syn::ItemStruct>(program);
|
||||
let fields = fields_list(&tokens.fields);
|
||||
let types = fields.iter().map(|f| repr(&f.ty));
|
||||
assert_eq!(Vec::from_iter(types), expected_types);
|
||||
}
|
||||
|
||||
assert_field_types(tuple_like, &expected_types);
|
||||
assert_field_types(proper_struct, &expected_types);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn type_dependency() {
|
||||
let param:syn::GenericParam = parse("T");
|
||||
let depends = |code| {
|
||||
let ty:syn::Type = parse(code);
|
||||
type_depends_on(&ty, ¶m)
|
||||
};
|
||||
|
||||
// sample types that depend on `T`
|
||||
let dependents = vec!{
|
||||
"T",
|
||||
"Option<T>",
|
||||
"Pair<T, U>",
|
||||
"Pair<U, T>",
|
||||
"Pair<U, (T,)>",
|
||||
"&T",
|
||||
"&'t mut T",
|
||||
};
|
||||
// sample types that do not depend on `T`
|
||||
let independents = vec!{
|
||||
"Tt",
|
||||
"Option<Tt>",
|
||||
"Pair<Tt, U>",
|
||||
"Pair<U, Tt>",
|
||||
"Pair<U, Tt>",
|
||||
"i32",
|
||||
"&str",
|
||||
};
|
||||
for dependent in dependents {
|
||||
assert!(depends(dependent), "{} must depend on {}"
|
||||
, repr(&dependent), repr(¶m));
|
||||
}
|
||||
for independent in independents {
|
||||
assert!(!depends(independent), "{} must not depend on {}"
|
||||
, repr(&independent), repr(¶m));
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn collecting_type_path_args() {
|
||||
fn check(expected_type_args:Vec<&str>, ty_path:&str) {
|
||||
let ty_path = parse(ty_path);
|
||||
let args = super::ty_path_type_args(&ty_path);
|
||||
assert_eq!(expected_type_args.len(), args.len());
|
||||
let zipped = expected_type_args.iter().zip(args.iter());
|
||||
for (expected,got) in zipped {
|
||||
assert_eq!(expected, &repr(got));
|
||||
}
|
||||
}
|
||||
check(vec!["T"] , "std::Option<T>");
|
||||
check(vec!["U"] , "std::Option<U>");
|
||||
check(vec!["A", "B"], "Either<A,B>");
|
||||
assert_eq!(super::last_type_arg(&parse("i32")), None);
|
||||
assert_eq!(repr(&super::last_type_arg(&parse("Foo<C>"))), "C");
|
||||
}
|
||||
}
|
@ -1,19 +0,0 @@
|
||||
[package]
|
||||
name = "enso-optics"
|
||||
version = "0.1.0"
|
||||
authors = ["Enso Team <enso-dev@enso.org>"]
|
||||
edition = "2018"
|
||||
|
||||
description = "A library providing lenses and prisms."
|
||||
readme = "README.md"
|
||||
homepage = "https://github.com/enso-org/enso/lib/rust/enso-optics"
|
||||
repository = "https://github.com/enso-org/enso"
|
||||
license-file = "../../../LICENSE"
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib", "rlib"]
|
||||
|
||||
[features]
|
||||
|
||||
[dependencies]
|
||||
enso-prelude = { version = "0.1.0", path = "../enso-prelude" }
|
@ -1,3 +0,0 @@
|
||||
# Enso Optics
|
||||
|
||||
A library providing lenses and prisms.
|
@ -1,540 +0,0 @@
|
||||
#![warn(unsafe_code)]
|
||||
#![warn(missing_copy_implementations)]
|
||||
#![warn(missing_debug_implementations)]
|
||||
#![allow(non_snake_case)]
|
||||
#![allow(non_camel_case_types)]
|
||||
#![allow(dead_code)]
|
||||
#![allow(unused_macros)]
|
||||
#![allow(clippy::option_map_unit_fn)]
|
||||
|
||||
use enso_prelude::*;
|
||||
|
||||
// ================
|
||||
// === TypeList ===
|
||||
// ================
|
||||
|
||||
trait HList {}
|
||||
impl HList for Nil {}
|
||||
impl<Head,Tail> HList for Cons<Head,Tail> where Head:?Sized, Tail:?Sized {}
|
||||
|
||||
struct Nil;
|
||||
struct Cons<Head,Tail>(PhantomData2<Head,Tail>) where Head:?Sized, Tail:?Sized;
|
||||
|
||||
// === Instances ===
|
||||
|
||||
impl<Head:?Sized, Tail:?Sized>
|
||||
Cons<Head,Tail> {
|
||||
pub fn new() -> Self { Self::default() }
|
||||
}
|
||||
|
||||
impl<Head:?Sized, Tail:?Sized>
|
||||
Default for Cons<Head,Tail> {
|
||||
fn default() -> Self { Self(default()) }
|
||||
}
|
||||
|
||||
// === Append ===
|
||||
|
||||
type Append<El, Els> = <Els as Appendable<El>>::Result;
|
||||
trait Appendable<T:?Sized> { type Result; }
|
||||
|
||||
impl<T:?Sized>
|
||||
Appendable<T> for Nil {
|
||||
type Result = Cons<T, Nil>;
|
||||
}
|
||||
|
||||
impl<T:?Sized, Head:?Sized, Tail:?Sized+Appendable<T>>
|
||||
Appendable<T> for Cons<Head, Tail> {
|
||||
type Result = Cons<Head, Append<T, Tail>>;
|
||||
}
|
||||
|
||||
// =============
|
||||
// === Field ===
|
||||
// =============
|
||||
|
||||
type Field<T, Field> = <T as HasField<Field>>::Result;
|
||||
trait HasField<Field> { type Result; }
|
||||
|
||||
// ==============
|
||||
// === Getter ===
|
||||
// ==============
|
||||
|
||||
trait Getter<T>: HasField<T> {
|
||||
fn get (& self) -> & Field<Self, T>;
|
||||
fn get_mut (&mut self) -> &mut Field<Self, T>;
|
||||
}
|
||||
|
||||
trait OptGetter<T>: HasField<T> {
|
||||
fn get (& self) -> Option <& Field<Self, T>>;
|
||||
fn get_mut (&mut self) -> Option <&mut Field<Self, T>>;
|
||||
}
|
||||
|
||||
// ================
|
||||
// === Resolver ===
|
||||
// ================
|
||||
|
||||
// === FieldResolver ===
|
||||
|
||||
type NestedField<T, Path> = <Path as FieldResolver<T>>::Result;
|
||||
trait FieldResolver<T: ?Sized> { type Result; }
|
||||
|
||||
impl<T>
|
||||
FieldResolver<T> for Nil {
|
||||
type Result = T;
|
||||
}
|
||||
|
||||
impl<Head, Tail, T>
|
||||
FieldResolver<T> for Cons<Head, Tail>
|
||||
where T: HasField<Head>, Tail: FieldResolver<Field<T, Head>> {
|
||||
type Result = NestedField<Field<T, Head>, Tail>;
|
||||
}
|
||||
|
||||
// === Resolver ===
|
||||
|
||||
trait Resolver<T>: FieldResolver<T> {
|
||||
fn resolve (t: & T) -> & NestedField<T, Self>;
|
||||
fn resolve_mut (t: &mut T) -> &mut NestedField<T, Self>;
|
||||
}
|
||||
|
||||
trait OptResolver<T>: FieldResolver<T> {
|
||||
fn resolve (t: & T) -> Option<& NestedField<T, Self>>;
|
||||
fn resolve_mut (t: &mut T) -> Option<&mut NestedField<T, Self>>;
|
||||
}
|
||||
|
||||
impl<T> Resolver<T> for Nil {
|
||||
fn resolve (t: & T) -> & NestedField<T, Self> { t }
|
||||
fn resolve_mut (t: &mut T) -> &mut NestedField<T, Self> { t }
|
||||
}
|
||||
|
||||
impl<T> OptResolver<T> for Nil {
|
||||
fn resolve (t: & T) -> Option<& NestedField<T, Self>> { Some(t) }
|
||||
fn resolve_mut (t: &mut T) -> Option<&mut NestedField<T, Self>> { Some(t) }
|
||||
}
|
||||
|
||||
impl<Head:'static,Tail,T> Resolver<T> for Cons<Head, Tail>
|
||||
where T: Getter<Head>, Tail: Resolver<Field<T, Head>> {
|
||||
fn resolve(t: &T) -> &NestedField<T, Self> {
|
||||
let head = Getter::<Head>::get(t);
|
||||
<Tail as Resolver<Field<T, Head>>>::resolve(head)
|
||||
}
|
||||
fn resolve_mut(t: &mut T) -> &mut NestedField<T, Self> {
|
||||
let head = Getter::<Head>::get_mut(t);
|
||||
<Tail as Resolver<Field<T, Head>>>::resolve_mut(head)
|
||||
}
|
||||
}
|
||||
|
||||
impl<Head:'static,Tail,T> OptResolver<T> for Cons<Head, Tail>
|
||||
where T: OptGetter<Head>, Tail: OptResolver<Field<T, Head>> {
|
||||
fn resolve(t: &T) -> Option<&NestedField<T, Self>> {
|
||||
OptGetter::<Head>::get(t)
|
||||
.and_then(|t| <Tail as OptResolver<Field<T, Head>>>::resolve(t))
|
||||
}
|
||||
fn resolve_mut(t: &mut T) -> Option<&mut NestedField<T, Self>> {
|
||||
OptGetter::<Head>::get_mut(t)
|
||||
.and_then(|t| <Tail as OptResolver<Field<T, Head>>>::resolve_mut(t))
|
||||
}
|
||||
}
|
||||
|
||||
// ============
|
||||
// === Lens ===
|
||||
// ============
|
||||
|
||||
struct Lens <Src,Tgt,Path>(PhantomData3<Src,Tgt,Path>);
|
||||
struct OptLens <Src,Tgt,Path>(PhantomData3<Src,Tgt,Path>);
|
||||
|
||||
impl<Src,Tgt,Path> Copy for Lens<Src,Tgt,Path> {}
|
||||
impl<Src,Tgt,Path> Clone for Lens<Src,Tgt,Path> {
|
||||
fn clone(&self) -> Self { Lens::new() }
|
||||
}
|
||||
|
||||
impl<Src,Tgt,Path> Copy for OptLens<Src,Tgt,Path> {}
|
||||
impl<Src,Tgt,Path> Clone for OptLens<Src,Tgt,Path> {
|
||||
fn clone(&self) -> Self { OptLens::new() }
|
||||
}
|
||||
|
||||
impl<Src,Tgt,Path> OptLens<Src,Tgt,Path> {
|
||||
pub fn resolve(self, t: &Src) -> Option<&NestedField<Src, Path>>
|
||||
where Path: OptResolver<Src> {
|
||||
<Path as OptResolver<Src>>::resolve(t)
|
||||
}
|
||||
pub fn resolve_mut(self, t: &mut Src) -> Option<&mut NestedField<Src, Path>>
|
||||
where Path: OptResolver<Src> {
|
||||
<Path as OptResolver<Src>>::resolve_mut(t)
|
||||
}
|
||||
}
|
||||
|
||||
impl<Src,Tgt,Path> Lens<Src,Tgt,Path> {
|
||||
pub fn resolve(self, t: &Src) -> &NestedField<Src, Path>
|
||||
where Path: Resolver<Src> {
|
||||
<Path as Resolver<Src>>::resolve(&t)
|
||||
}
|
||||
}
|
||||
|
||||
impl<Src,Tgt,Path> Lens<Src,Tgt,Path> {
|
||||
pub fn new() -> Self { default() }
|
||||
}
|
||||
|
||||
impl<Src,Tgt,Path> Default for Lens<Src,Tgt,Path> {
|
||||
fn default() -> Self { Self(default()) }
|
||||
}
|
||||
|
||||
impl<Src,Tgt,Path> OptLens<Src,Tgt,Path> {
|
||||
pub fn new() -> Self { default() }
|
||||
}
|
||||
|
||||
impl<Src,Tgt,Path> Default for OptLens<Src,Tgt,Path> {
|
||||
fn default() -> Self { Self(default()) }
|
||||
}
|
||||
|
||||
struct BoundLens<'t,Src,Tgt,Path> {
|
||||
target: &'t Src,
|
||||
lens: Lens<Src,Tgt,Path>,
|
||||
}
|
||||
|
||||
struct BoundLensMut<'t,Src,Tgt,Path> {
|
||||
target: &'t mut Src,
|
||||
lens: Lens<Src,Tgt,Path>,
|
||||
}
|
||||
|
||||
struct BoundOptLens<'t,Src,Tgt,Path> {
|
||||
target: &'t Src,
|
||||
lens: OptLens<Src,Tgt,Path>,
|
||||
}
|
||||
|
||||
struct BoundOptLensMut<'t,Src,Tgt,Path> {
|
||||
target: &'t mut Src,
|
||||
lens: OptLens<Src,Tgt,Path>,
|
||||
}
|
||||
|
||||
impl<'t,Src,Tgt,Path> BoundLens<'t,Src,Tgt,Path> {
|
||||
pub fn new(target: &'t Src) -> Self {
|
||||
let lens = Lens::new();
|
||||
Self { target, lens }
|
||||
}
|
||||
|
||||
pub fn read(&self) -> &NestedField<Src,Path>
|
||||
where Path: Resolver<Src> {
|
||||
self.lens.resolve(self.target)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'t,Src,Tgt,Path> BoundOptLens<'t,Src,Tgt,Path> {
|
||||
pub fn new(target: &'t Src) -> Self {
|
||||
let lens = OptLens::new();
|
||||
Self { target, lens }
|
||||
}
|
||||
|
||||
pub fn read(&self) -> Option<&NestedField<Src, Path>>
|
||||
where Path: OptResolver<Src> {
|
||||
self.lens.resolve(self.target)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'t,Src:Clone,Tgt,Path> BoundOptLens<'t,Src,Tgt,Path> {
|
||||
pub fn write(&self, val: NestedField<Src, Path>)
|
||||
where for<'a> Path: OptResolver<Src>,
|
||||
OptLens<Src,Tgt,Path>: Copy {
|
||||
let mut a = (*self.target).clone();
|
||||
a.lens_mut().unsafe_repath::<Path>().set(val);
|
||||
}
|
||||
}
|
||||
|
||||
impl<'t,Src,Tgt,Path> BoundOptLensMut<'t,Src,Tgt,Path> {
|
||||
pub fn new(target: &'t mut Src) -> Self {
|
||||
let lens = OptLens::new();
|
||||
Self { target, lens }
|
||||
}
|
||||
|
||||
pub fn get(&mut self) -> Option<&mut NestedField<Src, Path>>
|
||||
where Path: OptResolver<Src> {
|
||||
self.lens.resolve_mut(self.target)
|
||||
}
|
||||
|
||||
pub fn set(&mut self, val: NestedField<Src, Path>)
|
||||
where Path: OptResolver<Src> {
|
||||
let r = self.get();
|
||||
r.map(|s| *s = val);
|
||||
}
|
||||
|
||||
pub fn unsafe_repath<Path2>(self) -> BoundOptLensMut<'t,Src,Tgt,Path2> {
|
||||
BoundOptLensMut {
|
||||
target: self.target,
|
||||
lens: OptLens::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
////////////////////////////////////////
|
||||
|
||||
|
||||
trait HasLens where Self:Sized {
|
||||
fn lens(&self) -> BoundOptLens<'_, Self, Self, Nil> {
|
||||
BoundOptLens::new(self)
|
||||
}
|
||||
fn lens_mut(&mut self) -> BoundOptLensMut<'_, Self, Self, Nil> {
|
||||
BoundOptLensMut::new(self)
|
||||
}
|
||||
}
|
||||
impl<T> HasLens for T {}
|
||||
|
||||
macro_rules! mk_lens_field_decl {
|
||||
($struct_name:ident<$($param:ident),*>{$field_name:ident : $field_type:ty}) => {
|
||||
paste::item! {
|
||||
// struct FIELD_bar;
|
||||
struct [<FIELD_ $field_name>];
|
||||
|
||||
// impl HasField<FIELD_bar> for Foo<> {
|
||||
// type Result = Bar;
|
||||
// }
|
||||
impl<$($param),*> HasField<[<FIELD_ $field_name>]> for $struct_name<$($param),*> {
|
||||
type Result = $field_type;
|
||||
}
|
||||
}}}
|
||||
|
||||
macro_rules! mk_lenses_for {
|
||||
($struct_name:ident<$($param:ident),*>{$field_name:ident : $field_type:ty}) => {
|
||||
paste::item! {
|
||||
|
||||
mk_lens_field_decl!($struct_name<$($param),*>{$field_name:$field_type});
|
||||
|
||||
// impl Getter<FIELD_bar> for Foo {
|
||||
// fn get(&self) -> &Field<Self, FIELD_bar> {
|
||||
// &self.bar
|
||||
// }
|
||||
// fn get_mut(&mut self) -> &mut Field<Self, FIELD_bar> {
|
||||
// &mut self.bar
|
||||
// }
|
||||
// }
|
||||
impl<$($param),*> Getter<[<FIELD_ $field_name>]> for $struct_name<$($param),*> {
|
||||
fn get(&self) -> &Field<Self, [<FIELD_ $field_name>]> {
|
||||
&self.$field_name
|
||||
}
|
||||
fn get_mut(&mut self) -> &mut Field<Self, [<FIELD_ $field_name>]> {
|
||||
&mut self.$field_name
|
||||
}
|
||||
}
|
||||
|
||||
// impl OptGetter<FIELD_bar> for Foo {
|
||||
// fn get(&self) -> Option<&Field<Self, FIELD_bar>> {
|
||||
// Some(&self.bar)
|
||||
// }
|
||||
// fn get_mut(&mut self) -> Option<&mut Field<Self, FIELD_bar>> {
|
||||
// Some(&mut self.bar)
|
||||
// }
|
||||
// }
|
||||
impl<$($param),*> OptGetter<[<FIELD_ $field_name>]> for $struct_name<$($param),*> {
|
||||
fn get(&self) -> Option<&Field<Self, [<FIELD_ $field_name>]>> {
|
||||
Some(&self.$field_name)
|
||||
}
|
||||
fn get_mut(&mut self)
|
||||
-> Option<&mut Field<Self, [<FIELD_ $field_name>]>> {
|
||||
Some(&mut self.$field_name)
|
||||
}
|
||||
}
|
||||
|
||||
// impl<'lens_lifetime, LENS_BEGIN, LENS_NEW_PATH>
|
||||
// BoundOptLensMut<'lens_lifetime, LENS_BEGIN, Foo, LENS_NEW_PATH>
|
||||
// where LENS_NEW_PATH: Appendable<FIELD_bar> {
|
||||
// fn bar(self)
|
||||
// -> BoundOptLensMut
|
||||
// < 'lens_lifetime
|
||||
// , LENS_BEGIN
|
||||
// , Bar
|
||||
// , Append<FIELD_bar
|
||||
// , LENS_NEW_PATH>
|
||||
// > {
|
||||
// BoundOptLensMut::new(self.target)
|
||||
// }
|
||||
// }
|
||||
impl<'lens_lifetime, LENS_BEGIN, LENS_NEW_PATH>
|
||||
BoundOptLensMut<'lens_lifetime, LENS_BEGIN, $struct_name, LENS_NEW_PATH>
|
||||
where LENS_NEW_PATH: Appendable<[<FIELD_ $field_name>]> {
|
||||
fn $field_name(self)
|
||||
-> BoundOptLensMut
|
||||
< 'lens_lifetime
|
||||
, LENS_BEGIN
|
||||
, Field<$struct_name
|
||||
,[<FIELD_ $field_name>]>
|
||||
, Append<[<FIELD_ $field_name>]
|
||||
, LENS_NEW_PATH>
|
||||
> {
|
||||
BoundOptLensMut::new(self.target)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'lens_lifetime, LENS_BEGIN, LENS_NEW_PATH>
|
||||
BoundOptLens<'lens_lifetime, LENS_BEGIN, $struct_name, LENS_NEW_PATH>
|
||||
where LENS_NEW_PATH: Appendable<[<FIELD_ $field_name>]> {
|
||||
fn $field_name(self)
|
||||
-> BoundOptLens
|
||||
< 'lens_lifetime
|
||||
, LENS_BEGIN
|
||||
, Field<$struct_name
|
||||
,[<FIELD_ $field_name>]>
|
||||
, Append<[<FIELD_ $field_name>]
|
||||
, LENS_NEW_PATH>
|
||||
> {
|
||||
BoundOptLens::new(self.target)
|
||||
}
|
||||
}
|
||||
}};
|
||||
|
||||
($struct_name:ident<$($param:ident),*> :: $cons_name:ident {$field_name:ident : $field_type:ty}) => {
|
||||
paste::item! {
|
||||
|
||||
mk_lens_field_decl!($struct_name<$($param),*>{$field_name:$field_type});
|
||||
|
||||
// impl OptGetter<FIELD_bar> for Foo {
|
||||
// fn get(&self) -> Option<&Field<Self, FIELD_bar>> {
|
||||
// Some(&self.bar)
|
||||
// }
|
||||
// fn get_mut(&mut self) -> Option<&mut Field<Self, FIELD_bar>> {
|
||||
// Some(&mut self.bar)
|
||||
// }
|
||||
// }
|
||||
impl<$($param),*> OptGetter<[<FIELD_ $field_name>]> for $struct_name<$($param),*> {
|
||||
fn get(&self) -> Option<&Field<Self, [<FIELD_ $field_name>]>> {
|
||||
match self {
|
||||
$cons_name(ref $field_name) => Some($field_name),
|
||||
_ => None
|
||||
}
|
||||
}
|
||||
fn get_mut(&mut self)
|
||||
-> Option<&mut Field<Self, [<FIELD_ $field_name>]>> {
|
||||
match self {
|
||||
$cons_name(ref mut $field_name) => Some($field_name),
|
||||
_ => None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// impl<'lens_lifetime, LENS_BEGIN, LENS_NEW_PATH>
|
||||
// BoundOptLensMut<'lens_lifetime, LENS_BEGIN, Foo, LENS_NEW_PATH>
|
||||
// where LENS_NEW_PATH: Appendable<FIELD_bar> {
|
||||
// fn bar(self)
|
||||
// -> BoundOptLensMut
|
||||
// < 'lens_lifetime
|
||||
// , LENS_BEGIN
|
||||
// , Field<FIELD_Bar, Foo<>>
|
||||
// , Append<FIELD_bar, LENS_NEW_PATH>
|
||||
// > {
|
||||
// BoundOptLensMut::new(self.target)
|
||||
// }
|
||||
// }
|
||||
impl<'lens_lifetime, LENS_BEGIN, LENS_NEW_PATH, $($param),*>
|
||||
BoundOptLensMut<'lens_lifetime, LENS_BEGIN, $struct_name<$($param),*>, LENS_NEW_PATH>
|
||||
where LENS_NEW_PATH: Appendable<[<FIELD_ $field_name>]> {
|
||||
fn $cons_name(self)
|
||||
-> BoundOptLensMut
|
||||
< 'lens_lifetime
|
||||
, LENS_BEGIN
|
||||
, Field<$struct_name<$($param),*>,[<FIELD_ $field_name>]>
|
||||
, Append<[<FIELD_ $field_name>], LENS_NEW_PATH>
|
||||
> {
|
||||
BoundOptLensMut::new(self.target)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'lens_lifetime, LENS_BEGIN, LENS_NEW_PATH, $($param),*>
|
||||
BoundOptLens<'lens_lifetime, LENS_BEGIN, $struct_name<$($param),*>, LENS_NEW_PATH>
|
||||
where LENS_NEW_PATH: Appendable<[<FIELD_ $field_name>]> {
|
||||
fn $cons_name(self)
|
||||
-> BoundOptLens
|
||||
< 'lens_lifetime
|
||||
, LENS_BEGIN
|
||||
, Field<$struct_name<$($param),*>,[<FIELD_ $field_name>]>
|
||||
, Append<[<FIELD_ $field_name>], LENS_NEW_PATH>
|
||||
> {
|
||||
BoundOptLens::new(self.target)
|
||||
}
|
||||
}
|
||||
}}}
|
||||
|
||||
macro_rules! lens {
|
||||
($base:ident . $($seg:ident).*) => {
|
||||
$base.lens().$($seg()).*
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! lens_mut {
|
||||
($base:ident . $($seg:ident).*) => {
|
||||
$base.lens_mut().$($seg()).*
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
mk_lenses_for!(Option<T>::Some{val: T});
|
||||
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
struct Foo {
|
||||
bar: Bar,
|
||||
}
|
||||
mk_lenses_for!(Foo<>{bar: Bar});
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
struct Bar {
|
||||
baz: Option<Baz>,
|
||||
}
|
||||
mk_lenses_for!(Bar<>{baz: Option<Baz>});
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
struct Baz {
|
||||
qux: Option<Qux>,
|
||||
}
|
||||
mk_lenses_for!(Baz<>{qux: Option<Qux>});
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
struct Qux {
|
||||
quxx: String,
|
||||
}
|
||||
mk_lenses_for!(Qux<>{quxx: String});
|
||||
|
||||
|
||||
#[test]
|
||||
fn deeply_nested() {
|
||||
let mut foo = Foo {
|
||||
bar: Bar {
|
||||
baz: Some(Baz {
|
||||
qux: Some(Qux {
|
||||
quxx: "nice".to_owned(),
|
||||
}),
|
||||
}),
|
||||
},
|
||||
};
|
||||
|
||||
lens_mut!(foo.bar.baz.Some.qux.Some.quxx).set("Hello".into());
|
||||
assert_eq!(foo, Foo {
|
||||
bar: Bar {
|
||||
baz: Some(Baz {
|
||||
qux: Some(Qux {
|
||||
quxx: "Hello".to_owned(),
|
||||
}),
|
||||
}),
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub fn main() {
|
||||
let mut val: Option<Option<i32>> = Some(Some(17));
|
||||
|
||||
val.lens_mut().Some().Some().set(10);
|
||||
let lens1 = val.lens().Some().Some();
|
||||
let mut val2 = *lens1.target;
|
||||
let lm = val2.lens_mut();
|
||||
let mut lm2 = BoundOptLensMut {
|
||||
target: lm.target,
|
||||
lens: lens1.lens,
|
||||
};
|
||||
lm2.set(9);
|
||||
println!("{:?}", val.lens_mut().Some().Some().get());
|
||||
println!("{:?}", val2.lens_mut().Some().Some().get());
|
||||
}
|
@ -1,83 +0,0 @@
|
||||
[package]
|
||||
name = "enso-prelude"
|
||||
version = "0.1.0"
|
||||
authors = ["Enso Team <enso-dev@enso.org>"]
|
||||
edition = "2018"
|
||||
|
||||
description = "An augmented standard library in the vein of Haskell's prelude."
|
||||
readme = "README.md"
|
||||
homepage = "https://github.com/enso-org/enso/lib/rust/enso-prelude"
|
||||
repository = "https://github.com/enso-org/enso"
|
||||
license-file = "../../../LICENSE"
|
||||
|
||||
keywords = ["prelude", "standard-library"]
|
||||
categories = ["algorithms"]
|
||||
|
||||
publish = true
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib", "rlib"]
|
||||
|
||||
[dependencies]
|
||||
enso-shapely = { version = "0.1.0" , path = "../enso-shapely/impl" }
|
||||
|
||||
boolinator = "2.4.0"
|
||||
derivative = "1.0.3"
|
||||
derive_more = "0.99.2"
|
||||
enclose = "1.1.8"
|
||||
failure = "0.1.5"
|
||||
ifmt = "0.2.0"
|
||||
itertools = "0.8"
|
||||
lazy_static = "1.4"
|
||||
num = "0.2.0"
|
||||
paste = "0.1"
|
||||
shrinkwraprs = "0.3.0"
|
||||
smallvec = "1.0.0"
|
||||
weak-table = "0.2.3"
|
||||
wasm-bindgen = { version = "=0.2.58" , features = ["nightly"] }
|
||||
|
||||
# TODO: should be behind a flag, as the `nalgebra` package is pretty big and this crate would be
|
||||
# also useful for projects which do not require `nalgebra`.
|
||||
nalgebra = "0.21.1"
|
||||
|
||||
[dependencies.web-sys]
|
||||
version = "0.3.4"
|
||||
features = [
|
||||
'CanvasRenderingContext2d',
|
||||
'CssStyleDeclaration',
|
||||
'Document',
|
||||
'Element',
|
||||
'EventTarget',
|
||||
'KeyboardEvent',
|
||||
'HtmlCanvasElement',
|
||||
'HtmlCollection',
|
||||
'HtmlDivElement',
|
||||
'HtmlElement',
|
||||
'HtmlImageElement',
|
||||
'Location',
|
||||
'Node',
|
||||
'Url',
|
||||
'WebGlBuffer',
|
||||
'WebGlFramebuffer',
|
||||
'WebGlProgram',
|
||||
'WebGlRenderingContext',
|
||||
'WebGlShader',
|
||||
'WebGlSync',
|
||||
'WebGlTexture',
|
||||
'WebGl2RenderingContext',
|
||||
'WebGlUniformLocation',
|
||||
'WebGlUniformLocation',
|
||||
'WebGlVertexArrayObject',
|
||||
'Window',
|
||||
'console',
|
||||
'EventTarget',
|
||||
'Event',
|
||||
'MouseEvent',
|
||||
'Performance',
|
||||
'WheelEvent',
|
||||
'DomRect',
|
||||
'AddEventListenerOptions'
|
||||
]
|
||||
|
||||
[dev-dependencies]
|
||||
wasm-bindgen-test = "0.2"
|
@ -1,5 +0,0 @@
|
||||
# Enso Prelude
|
||||
|
||||
The Enso prelude is a library in the style of the Haskell prelude, pulling in a
|
||||
number of useful foundational features for writing Rust code in the
|
||||
[Enso](https://github.com/enso-org/enso) project.
|
@ -1,117 +0,0 @@
|
||||
|
||||
use crate::*;
|
||||
pub use enso_shapely::CloneRef;
|
||||
|
||||
|
||||
|
||||
// ================
|
||||
// === CloneRef ===
|
||||
// ================
|
||||
|
||||
/// Clone for internal-mutable structures. This trait can be implemented only if mutating one
|
||||
/// structure will be reflected in all of its clones. Please note that it does not mean that all the
|
||||
/// fields needs to provide internal mutability as well. For example, a structure can remember it's
|
||||
/// creation time and store it as `f32`. As long as it cannot be mutated, the structure can
|
||||
/// implement `CloneRef`. In order to guide the auto-deriving mechanism, it is advised to wrap all
|
||||
/// immutable fields in the `Immutable` newtype.
|
||||
pub trait CloneRef: Sized {
|
||||
fn clone_ref(&self) -> Self;
|
||||
}
|
||||
|
||||
|
||||
// === Macros ===
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! impl_clone_ref_as_clone {
|
||||
([$($bounds:tt)*] $($toks:tt)*) => {
|
||||
impl <$($bounds)*> CloneRef for $($toks)* {
|
||||
fn clone_ref(&self) -> Self {
|
||||
self.clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl <$($bounds)*> From<&$($toks)*> for $($toks)* {
|
||||
fn from(t:&$($toks)*) -> Self {
|
||||
t.clone_ref()
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
($($toks:tt)*) => {
|
||||
impl CloneRef for $($toks)* {
|
||||
fn clone_ref(&self) -> Self {
|
||||
self.clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&$($toks)*> for $($toks)* {
|
||||
fn from(t:&$($toks)*) -> Self {
|
||||
t.clone_ref()
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! impl_clone_ref_as_clone_no_from {
|
||||
([$($bounds:tt)*] $($toks:tt)*) => {
|
||||
impl <$($bounds)*> CloneRef for $($toks)* {
|
||||
fn clone_ref(&self) -> Self {
|
||||
self.clone()
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
($($toks:tt)*) => {
|
||||
impl CloneRef for $($toks)* {
|
||||
fn clone_ref(&self) -> Self {
|
||||
self.clone()
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
// === Prim Impls ===
|
||||
|
||||
impl_clone_ref_as_clone_no_from!(());
|
||||
impl_clone_ref_as_clone_no_from!(f32);
|
||||
impl_clone_ref_as_clone_no_from!(f64);
|
||||
impl_clone_ref_as_clone_no_from!(i32);
|
||||
impl_clone_ref_as_clone_no_from!(i64);
|
||||
impl_clone_ref_as_clone_no_from!(usize);
|
||||
impl_clone_ref_as_clone_no_from!([T] PhantomData<T>);
|
||||
impl_clone_ref_as_clone_no_from!([T:?Sized] Rc<T>);
|
||||
impl_clone_ref_as_clone_no_from!([T:?Sized] Weak<T>);
|
||||
|
||||
impl_clone_ref_as_clone_no_from!(wasm_bindgen::JsValue);
|
||||
impl_clone_ref_as_clone_no_from!(web_sys::HtmlDivElement);
|
||||
impl_clone_ref_as_clone_no_from!(web_sys::HtmlElement);
|
||||
impl_clone_ref_as_clone_no_from!(web_sys::Performance);
|
||||
impl_clone_ref_as_clone_no_from!(web_sys::WebGl2RenderingContext);
|
||||
impl_clone_ref_as_clone_no_from!(web_sys::HtmlCanvasElement);
|
||||
impl_clone_ref_as_clone_no_from!(web_sys::EventTarget);
|
||||
|
||||
|
||||
// === Option ===
|
||||
|
||||
/// Trait for types that can be internally cloned using `CloneRef`, like `Option<&T>`.
|
||||
#[allow(missing_docs)]
|
||||
pub trait ClonedRef {
|
||||
type Output;
|
||||
fn cloned_ref(&self) -> Self::Output;
|
||||
}
|
||||
|
||||
impl<T:CloneRef> ClonedRef for Option<&T> {
|
||||
type Output = Option<T>;
|
||||
fn cloned_ref(&self) -> Self::Output {
|
||||
self.map(|t| t.clone_ref())
|
||||
}
|
||||
}
|
||||
|
||||
impl<T:CloneRef> ClonedRef for Option<&mut T> {
|
||||
type Output = Option<T>;
|
||||
fn cloned_ref(&self) -> Self::Output {
|
||||
self.as_ref().map(|t| t.clone_ref())
|
||||
}
|
||||
}
|
@ -1,6 +0,0 @@
|
||||
//! This module exports collections which are popular enough to be available everywhere.
|
||||
|
||||
pub use smallvec::SmallVec;
|
||||
pub use std::collections::BTreeMap;
|
||||
pub use std::collections::HashMap;
|
||||
pub use std::collections::HashSet;
|
@ -1,11 +0,0 @@
|
||||
//! Generic data types and utilities.
|
||||
|
||||
pub mod at_least_one_of_two;
|
||||
pub mod monoid;
|
||||
pub mod non_empty_vec;
|
||||
pub mod semigroup;
|
||||
|
||||
pub use at_least_one_of_two::*;
|
||||
pub use monoid::*;
|
||||
pub use non_empty_vec::*;
|
||||
pub use semigroup::*;
|
@ -1,55 +0,0 @@
|
||||
//! Definition of `AtLeastOneOfTwo`.
|
||||
|
||||
|
||||
|
||||
// =======================
|
||||
// === AtLeastOneOfTwo ===
|
||||
// =======================
|
||||
|
||||
/// A struct similar to `Option` and `Either`. It can contain the first value, or the second value,
|
||||
/// or both of them at the same time.
|
||||
#[derive(Debug,Clone,Copy)]
|
||||
#[allow(missing_docs)]
|
||||
pub enum AtLeastOneOfTwo<T1,T2> {
|
||||
First(T1),
|
||||
Second(T2),
|
||||
Both(T1,T2)
|
||||
}
|
||||
|
||||
impl<T:PartialEq> AtLeastOneOfTwo<T,T> {
|
||||
/// Checks whether the values are equal.
|
||||
pub fn same(&self) -> bool {
|
||||
match self {
|
||||
Self::Both(t1,t2) => t1 == t2,
|
||||
_ => false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T1,T2> AtLeastOneOfTwo<T1,T2> {
|
||||
/// Extracts the first value if exists.
|
||||
pub fn first(&self) -> Option<&T1> {
|
||||
match self {
|
||||
Self::Both(t1,_) => Some(t1),
|
||||
Self::First(t1) => Some(t1),
|
||||
_ => None
|
||||
}
|
||||
}
|
||||
|
||||
/// Extracts the second value if exists.
|
||||
pub fn second(&self) -> Option<&T2> {
|
||||
match self {
|
||||
Self::Both(_,t2) => Some(t2),
|
||||
Self::Second(t2) => Some(t2),
|
||||
_ => None
|
||||
}
|
||||
}
|
||||
|
||||
/// Extracts both of the value if exist.
|
||||
pub fn both(&self) -> Option<(&T1,&T2)> {
|
||||
match self {
|
||||
Self::Both(t1,t2) => Some((t1,t2)),
|
||||
_ => None
|
||||
}
|
||||
}
|
||||
}
|
@ -1,107 +0,0 @@
|
||||
//! A class for monoids (types with an associative binary operation that has an identity) with
|
||||
//! various general-purpose instances.
|
||||
|
||||
use super::semigroup::Semigroup;
|
||||
|
||||
|
||||
|
||||
// ===============
|
||||
// === Monoid ====
|
||||
// ===============
|
||||
|
||||
/// Mutable Monoid definition.
|
||||
pub trait Monoid : Default + Semigroup {
|
||||
/// Repeat a value n times. Given that this works on a Monoid it will not fail if you request 0
|
||||
/// or fewer repetitions.
|
||||
fn times_mut(&mut self, n:usize) {
|
||||
if n == 0 {
|
||||
*self = Default::default()
|
||||
} else {
|
||||
let val = self.clone();
|
||||
for _ in 0..n-1 {
|
||||
self.concat_mut(&val)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn times(&self, n:usize) -> Self {
|
||||
std::iter::repeat(self).take(n).fold(Default::default(),|l,r| l.concat_ref(r))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// === Default Impls ===
|
||||
|
||||
impl<T> Monoid for T where T : Default + Semigroup {}
|
||||
|
||||
|
||||
|
||||
// =============
|
||||
// === Tests ===
|
||||
// =============
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn option() {
|
||||
let vec_nop : Vec<usize> = vec![];
|
||||
let vec_1_2 : Vec<usize> = vec![1,2];
|
||||
let vec_1_2_times_3 : Vec<usize> = vec![1,2,1,2,1,2];
|
||||
assert_eq!(vec_1_2.times(0) , vec_nop);
|
||||
assert_eq!(vec_1_2.times(1) , vec_1_2);
|
||||
assert_eq!(vec_1_2.times(3) , vec_1_2_times_3);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// TODO: Think what to do with this. It would not be needed if tuples implement Iter. Alternatively
|
||||
// we could immplement own tuple type.
|
||||
|
||||
//trait Foldable {
|
||||
// type Item : Monoid;
|
||||
// fn fold(self) -> Self::Item;
|
||||
//}
|
||||
//
|
||||
//
|
||||
//
|
||||
//macro_rules! replace {
|
||||
// ($a:tt,$b:tt) => {$b};
|
||||
//}
|
||||
//
|
||||
//
|
||||
//macro_rules! define_foldable_for_tuple {
|
||||
// (0$(,$num:tt)*) => {
|
||||
// impl<T:Monoid> Foldable for (T,$(replace!{$num,T}),*) {
|
||||
// type Item = T;
|
||||
// fn fold(self) -> Self::Item {
|
||||
// self.0$(.concat(self.$num))*
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// impl<T:Monoid> Foldable for &(T,$(replace!{$num,T}),*) {
|
||||
// type Item = T;
|
||||
// fn fold(self) -> Self::Item {
|
||||
// self.0.clone()$(.concat(&self.$num))*
|
||||
// }
|
||||
// }
|
||||
// };
|
||||
//}
|
||||
//
|
||||
//define_foldable_for_tuple![0];
|
||||
//define_foldable_for_tuple![0,1];
|
||||
//define_foldable_for_tuple![0,1,2];
|
||||
//define_foldable_for_tuple![0,1,2,3];
|
||||
//define_foldable_for_tuple![0,1,2,3,4];
|
||||
//define_foldable_for_tuple![0,1,2,3,4,5];
|
||||
//define_foldable_for_tuple![0,1,2,3,4,5,6];
|
||||
//define_foldable_for_tuple![0,1,2,3,4,5,6,7];
|
||||
//define_foldable_for_tuple![0,1,2,3,4,5,6,7,8];
|
||||
//define_foldable_for_tuple![0,1,2,3,4,5,6,7,8,9];
|
||||
//define_foldable_for_tuple![0,1,2,3,4,5,6,7,8,9,10];
|
||||
//define_foldable_for_tuple![0,1,2,3,4,5,6,7,8,9,10,11];
|
||||
//define_foldable_for_tuple![0,1,2,3,4,5,6,7,8,9,10,11,12];
|
||||
//define_foldable_for_tuple![0,1,2,3,4,5,6,7,8,9,10,11,12,13];
|
||||
//define_foldable_for_tuple![0,1,2,3,4,5,6,7,8,9,10,11,12,13,14];
|
||||
//define_foldable_for_tuple![0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15];
|
@ -1,303 +0,0 @@
|
||||
//! This file contains an implementation of Vec that can't be empty.
|
||||
|
||||
use crate::*;
|
||||
|
||||
use std::vec::Drain;
|
||||
use std::vec::Splice;
|
||||
use std::ops::Bound;
|
||||
|
||||
|
||||
// ===================
|
||||
// === NonEmptyVec ===
|
||||
// ===================
|
||||
|
||||
/// A version of [`std::vec::Vec`] that can't be empty.
|
||||
#[allow(missing_docs)]
|
||||
#[derive(Clone,Debug,PartialEq)]
|
||||
pub struct NonEmptyVec<T> {
|
||||
elems: Vec<T>
|
||||
}
|
||||
|
||||
impl<T> Deref for NonEmptyVec<T> {
|
||||
type Target = Vec<T>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.elems
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> NonEmptyVec<T> {
|
||||
/// Construct a new non-empty vector.
|
||||
///
|
||||
/// The vector will not allocate more than the space required to contain `first` and `rest`.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// #![allow(unused_mut)]
|
||||
/// use enso_prelude::NonEmptyVec;
|
||||
/// let mut vec: NonEmptyVec<usize> = NonEmptyVec::new(0,vec![]);
|
||||
/// ```
|
||||
pub fn new(first:T, rest:Vec<T>) -> NonEmptyVec<T> {
|
||||
let mut elems = vec![first];
|
||||
elems.extend(rest);
|
||||
NonEmptyVec{elems}
|
||||
}
|
||||
|
||||
/// Construct a `NonEmptyVec` containing a single element.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use enso_prelude::NonEmptyVec;
|
||||
/// let vec = NonEmptyVec::singleton(0);
|
||||
/// assert_eq!(vec.get(0),Some(&0));
|
||||
/// assert_eq!(vec.len(),1);
|
||||
/// ```
|
||||
pub fn singleton(first:T) -> NonEmptyVec<T> {
|
||||
NonEmptyVec::new(first,vec![])
|
||||
}
|
||||
|
||||
/// Construct a new, `NonEmptyVec<T>` containing the provided element and with the provided
|
||||
/// `capacity`.
|
||||
///
|
||||
/// If `capacity` is 0, then the vector will be allocated with capacity for the provided `first`
|
||||
/// element. The vector will be able to hold exactly `capacity` elements without reallocating.
|
||||
///
|
||||
/// It is important to note that although the returned vector has the *capacity* specified, the
|
||||
/// vector will have a length of 1.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// Panics if `capacity` is not > 0.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use enso_prelude::NonEmptyVec;
|
||||
/// let mut vec = NonEmptyVec::with_capacity(0, 10);
|
||||
///
|
||||
/// // The vector contains one item, even though it has capacity for more
|
||||
/// assert_eq!(vec.len(), 1);
|
||||
///
|
||||
/// // These are all done without reallocating...
|
||||
/// for i in 1..10 {
|
||||
/// vec.push(i);
|
||||
/// }
|
||||
///
|
||||
/// // ...but this may make the vector reallocate
|
||||
/// vec.push(11);
|
||||
/// ```
|
||||
pub fn with_capacity(first:T, capacity:usize) -> NonEmptyVec<T> {
|
||||
if capacity == 0 {
|
||||
panic!("Capacity must be greater than zero for a NonEmptyVec.");
|
||||
}
|
||||
let mut elems = Vec::with_capacity(capacity);
|
||||
elems.push(first);
|
||||
NonEmptyVec{elems}
|
||||
}
|
||||
|
||||
/// Reserve capacity for at least `additional` more elements to be inserted in the given
|
||||
/// `Vec<T>`.
|
||||
///
|
||||
/// The collection may reserve more space to avoid frequent reallocations. After calling
|
||||
/// `reserve`, capacity will be greater than or equal to `self.len() + additional`. Does nothing
|
||||
/// if capacity is already sufficient.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// Panics if the new capacity overflows `usize`.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use enso_prelude::NonEmptyVec;
|
||||
/// let mut vec = NonEmptyVec::new(0,vec![]);
|
||||
/// vec.reserve(10);
|
||||
/// assert!(vec.capacity() >= 11);
|
||||
/// ```
|
||||
pub fn reserve(&mut self, additional:usize) {
|
||||
self.elems.reserve(additional);
|
||||
}
|
||||
|
||||
/// Shrinks the capacity of the `NonEmotyVec` as much as possible.
|
||||
///
|
||||
/// It will drop down as close as possible to the length, but the allocator may still inform the
|
||||
/// vector that there is space for a few more elements.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use enso_prelude::NonEmptyVec;
|
||||
/// let mut vec = NonEmptyVec::with_capacity(0, 10);
|
||||
/// assert_eq!(vec.capacity(),10);
|
||||
/// vec.shrink_to_fit();
|
||||
/// assert!(vec.capacity() < 10);
|
||||
/// ```
|
||||
pub fn shrink_to_fit(&mut self) {
|
||||
self.elems.shrink_to_fit();
|
||||
}
|
||||
|
||||
/// Append an element to the back of a collection.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// Panics if the number of elements in the vector overflows a `usize`.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use enso_prelude::NonEmptyVec;
|
||||
/// let mut vec = NonEmptyVec::new(0,vec![1,2]);
|
||||
/// vec.push(3);
|
||||
/// assert_eq!(vec.len(),4);
|
||||
/// ```
|
||||
pub fn push(&mut self, value:T) {
|
||||
self.elems.push(value)
|
||||
}
|
||||
|
||||
/// Remove an element from the back of the collection, returning it.
|
||||
///
|
||||
/// Will not pop any item if there is only one item left in the vector.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use enso_prelude::NonEmptyVec;
|
||||
/// let mut vec = NonEmptyVec::new(0,vec![1]);
|
||||
/// assert!(vec.pop().is_some());
|
||||
/// assert!(vec.pop().is_none());
|
||||
/// assert_eq!(vec.len(),1);
|
||||
/// ```
|
||||
pub fn pop(&mut self) -> Option<T> {
|
||||
(self.len() > 1).and_option_from(||self.elems.pop())
|
||||
}
|
||||
|
||||
/// Obtain a mutable reference to teh element in the vector at the specified `index`.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use enso_prelude::NonEmptyVec;
|
||||
/// let mut vec = NonEmptyVec::new(0,vec![1,2]);
|
||||
/// let reference = vec.get_mut(0);
|
||||
/// assert!(reference.is_some());
|
||||
/// assert_eq!(*reference.unwrap(),0);
|
||||
/// ```
|
||||
pub fn get_mut(&mut self, index:usize) -> Option<&mut T> {
|
||||
self.elems.get_mut(index)
|
||||
}
|
||||
|
||||
/// Obtain an immutable reference to the head of the `NonEmptyVec`.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use enso_prelude::NonEmptyVec;
|
||||
/// let vec = NonEmptyVec::new(0,vec![1,2]);
|
||||
/// assert_eq!(*vec.first(), 0);
|
||||
/// ```
|
||||
pub fn first(&self) -> &T {
|
||||
&self.elems.first().expect("The NonEmptyVec always has an item in it.")
|
||||
}
|
||||
|
||||
/// Obtain a mutable reference to the head of the `NonEmptyVec`.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use enso_prelude::NonEmptyVec;
|
||||
/// let mut vec = NonEmptyVec::new(0,vec![1,2]);
|
||||
/// assert_eq!(*vec.first_mut(), 0);
|
||||
/// ```
|
||||
pub fn first_mut(&mut self) -> &mut T {
|
||||
self.elems.first_mut().expect("The NonEmptyVec always has an item in it.")
|
||||
}
|
||||
|
||||
/// Obtain an immutable reference to the last element in the `NonEmptyVec`.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use enso_prelude::NonEmptyVec;
|
||||
/// let vec = NonEmptyVec::new(0,vec![1,2]);
|
||||
/// assert_eq!(*vec.last(),2)
|
||||
/// ```
|
||||
pub fn last(&self) -> &T {
|
||||
self.get(self.len() - 1).expect("There is always one element in a NonEmptyVec.")
|
||||
}
|
||||
|
||||
/// Obtain a mutable reference to the last element in the `NonEmptyVec`.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use enso_prelude::NonEmptyVec;
|
||||
/// let mut vec = NonEmptyVec::new(0,vec![1,2]);
|
||||
/// assert_eq!(*vec.last_mut(),2)
|
||||
/// ```
|
||||
pub fn last_mut(&mut self) -> &mut T {
|
||||
self.get_mut(self.len() - 1).expect("There is always one element in a NonEmptyVec.")
|
||||
}
|
||||
|
||||
/// Create a draining iterator that removes the specified range in the vector and yields the
|
||||
/// removed items.
|
||||
///
|
||||
/// It will never remove the root element of the vector.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// Panics if the starting point is greater than the end point or if the end point is greater
|
||||
/// than the length of the vector.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use enso_prelude::NonEmptyVec;
|
||||
/// let mut vec = NonEmptyVec::new(0,vec![1,2,3,4,5]);
|
||||
/// let drained:Vec<i32> = vec.drain(1..=5).collect();
|
||||
/// assert_eq!(drained,[1,2,3,4,5])
|
||||
/// ```
|
||||
pub fn drain<R>(&mut self, range:R) -> Drain<T> where R:RangeBounds<usize> {
|
||||
if range.contains(&0) {
|
||||
match range.end_bound() {
|
||||
Bound::Included(n) => self.elems.drain(1..=*n),
|
||||
Bound::Excluded(n) => self.elems.drain(1..*n),
|
||||
Bound::Unbounded => self.elems.drain(1..)
|
||||
}
|
||||
} else {
|
||||
self.elems.drain(range)
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a splicing iterator that replaces the specified range in the vector with the given 4
|
||||
/// `replace_with` iterator and yields the removed items.
|
||||
///
|
||||
/// `replace_with` does not need to be the same length as range. The element range is removed
|
||||
/// even if the iterator is not consumed until the end.
|
||||
///
|
||||
/// It is unspecified how many elements are removed from the vector if the Splice value is leaked.
|
||||
///
|
||||
/// The input iterator replace_with is only consumed when the Splice value is dropped.
|
||||
///
|
||||
/// # Panics
|
||||
///
|
||||
/// Panics if the starting point is greater than the end point or if the end point is greater
|
||||
/// than the length of the vector.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use enso_prelude::NonEmptyVec;
|
||||
/// let mut vec = NonEmptyVec::new(0,vec![1,2,3,4,5]);
|
||||
/// let replacements = [10,20,30,40];
|
||||
/// let yielded:Vec<_> = vec.splice(..2,replacements.iter().cloned()).collect();
|
||||
/// assert_eq!(vec.as_slice(),&[10,20,30,40,2,3,4,5]);
|
||||
/// assert_eq!(yielded,&[0,1])
|
||||
/// ```
|
||||
pub fn splice<R,I>(&mut self, range:R, replace_with:I) -> Splice<<I as IntoIterator>::IntoIter>
|
||||
where I: IntoIterator<Item = T>,
|
||||
R: RangeBounds<usize> {
|
||||
self.elems.splice(range,replace_with)
|
||||
}
|
||||
}
|
@ -1,143 +0,0 @@
|
||||
//! In mathematics, a semigroup is an algebraic structure consisting of a set together with an
|
||||
//! associative binary operation. A semigroup generalizes a monoid in that there might not exist an
|
||||
//! identity element. It also (originally) generalized a group (a monoid with all inverses) to a
|
||||
//! type where every element did not have to have an inverse, thus the name semigroup.
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::hash::BuildHasher;
|
||||
use std::hash::Hash;
|
||||
use std::iter::Extend;
|
||||
|
||||
|
||||
|
||||
// =================
|
||||
// === Semigroup ===
|
||||
// =================
|
||||
|
||||
/// Mutable Semigroup definition. Impls should satisfy the associativity law:
|
||||
/// `x.concat(y.concat(z)) = x.concat(y).concat(z)`, in symbolic form:
|
||||
/// `x <> (y <> z) = (x <> y) <> z`
|
||||
pub trait PartialSemigroup<T> : Clone {
|
||||
/// An associative operation.
|
||||
fn concat_mut(&mut self, other:T);
|
||||
|
||||
/// An associative operation.
|
||||
fn concat_ref(&self, other:T) -> Self where Self:Clone {
|
||||
self.clone().concat(other)
|
||||
}
|
||||
|
||||
/// An associative operation.
|
||||
fn concat(mut self, other:T) -> Self {
|
||||
self.concat_mut(other);
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Semigroup for T where T : PartialSemigroup<T> + for<'t> PartialSemigroup<&'t T> {}
|
||||
pub trait Semigroup : PartialSemigroup<Self> + for<'t> PartialSemigroup<&'t Self> {
|
||||
fn partial_times_mut(&mut self, n:usize) {
|
||||
let val = self.clone();
|
||||
for _ in 0..n-1 {
|
||||
self.concat_mut(&val)
|
||||
}
|
||||
}
|
||||
|
||||
fn partial_times(mut self, n:usize) -> Self {
|
||||
self.partial_times_mut(n);
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ====================
|
||||
// === Stdlib Impls ===
|
||||
// ====================
|
||||
|
||||
// === Option ===
|
||||
|
||||
impl<T:Semigroup> PartialSemigroup<&Option<T>> for Option<T> {
|
||||
fn concat_mut(&mut self, other:&Self) {
|
||||
if let Some(r) = other {
|
||||
match self {
|
||||
None => *self = Some(r.clone()),
|
||||
Some(l) => l.concat_mut(r)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T:Semigroup> PartialSemigroup<Option<T>> for Option<T> {
|
||||
fn concat_mut(&mut self, other:Self) {
|
||||
if let Some(r) = other {
|
||||
match self {
|
||||
None => *self = Some(r),
|
||||
Some(l) => l.concat_mut(r)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// === HashMap ===
|
||||
|
||||
impl<K,V,S> PartialSemigroup<&HashMap<K,V,S>> for HashMap<K,V,S>
|
||||
where K : Eq + Hash + Clone,
|
||||
V : Semigroup,
|
||||
S : Clone + BuildHasher {
|
||||
fn concat_mut(&mut self, other:&Self) {
|
||||
for (key,new_val) in other {
|
||||
let key = key.clone();
|
||||
self.entry(key)
|
||||
.and_modify(|val| val.concat_mut(new_val))
|
||||
.or_insert_with(|| new_val.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<K,V,S> PartialSemigroup<HashMap<K,V,S>> for HashMap<K,V,S>
|
||||
where K : Eq + Hash + Clone,
|
||||
V : Semigroup,
|
||||
S : Clone + BuildHasher {
|
||||
fn concat_mut(&mut self, other:Self) {
|
||||
for (key,new_val) in other {
|
||||
self.entry(key)
|
||||
.and_modify(|val| val.concat_mut(&new_val))
|
||||
.or_insert(new_val);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// === Vec ===
|
||||
|
||||
impl<T:Clone> PartialSemigroup<&Vec<T>> for Vec<T> {
|
||||
fn concat_mut(&mut self, other:&Self) {
|
||||
self.extend(other.iter().cloned())
|
||||
}
|
||||
}
|
||||
|
||||
impl<T:Clone> PartialSemigroup<Vec<T>> for Vec<T> {
|
||||
fn concat_mut(&mut self, other:Self) {
|
||||
self.extend(other.into_iter())
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// =============
|
||||
// === Tests ===
|
||||
// =============
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn option() {
|
||||
assert_eq!(None::<Vec<usize>>.concat(&None) , None);
|
||||
assert_eq!(Some(vec![1]).concat(&None) , Some(vec![1]));
|
||||
assert_eq!(None.concat(&Some(vec![1])) , Some(vec![1]));
|
||||
assert_eq!(Some(vec![1]).concat(&Some(vec![2])) , Some(vec![1,2]));
|
||||
}
|
||||
}
|
@ -1,353 +0,0 @@
|
||||
//! This module re-exports a lot of useful stuff. It is not meant to be used
|
||||
//! by libraries, but it is definitely usefull for bigger projects. It also
|
||||
//! defines several aliases and utils which may find their place in new
|
||||
//! libraries in the future.
|
||||
|
||||
#![feature(specialization)]
|
||||
#![feature(test)]
|
||||
#![feature(trait_alias)]
|
||||
#![warn(missing_copy_implementations)]
|
||||
#![warn(missing_debug_implementations)]
|
||||
#![warn(unsafe_code)]
|
||||
|
||||
mod clone;
|
||||
mod collections;
|
||||
mod data;
|
||||
mod macros;
|
||||
mod option;
|
||||
mod phantom;
|
||||
mod reference;
|
||||
mod std_reexports;
|
||||
mod string;
|
||||
mod tp;
|
||||
mod vec;
|
||||
mod wrapper;
|
||||
|
||||
pub use clone::*;
|
||||
pub use collections::*;
|
||||
pub use data::*;
|
||||
pub use macros::*;
|
||||
pub use option::*;
|
||||
pub use phantom::*;
|
||||
pub use reference::*;
|
||||
pub use std_reexports::*;
|
||||
pub use string::*;
|
||||
pub use tp::*;
|
||||
pub use vec::*;
|
||||
pub use wrapper::*;
|
||||
|
||||
pub use boolinator::Boolinator;
|
||||
pub use derivative::Derivative;
|
||||
pub use derive_more::*;
|
||||
pub use enclose::enclose;
|
||||
pub use failure::Fail;
|
||||
pub use ifmt::*;
|
||||
pub use itertools::Itertools;
|
||||
pub use lazy_static::lazy_static;
|
||||
pub use num::Num;
|
||||
pub use paste;
|
||||
pub use shrinkwraprs::Shrinkwrap;
|
||||
pub use weak_table::traits::WeakElement;
|
||||
pub use weak_table::traits::WeakKey;
|
||||
pub use weak_table::WeakKeyHashMap;
|
||||
pub use weak_table::WeakValueHashMap;
|
||||
pub use weak_table;
|
||||
|
||||
use std::cell::UnsafeCell;
|
||||
|
||||
|
||||
|
||||
// =================
|
||||
// === Immutable ===
|
||||
// =================
|
||||
|
||||
/// A zero-overhead newtype which provides immutable access to its content. Of course this does not
|
||||
/// apply to internal mutability of the wrapped data. A good use case of this structure is when you
|
||||
/// want to pass an ownership to a structure, allow access all its public fields, but do not allow
|
||||
/// their modification.
|
||||
#[derive(Clone,Copy,Default)]
|
||||
pub struct Immutable<T> {
|
||||
data : T
|
||||
}
|
||||
|
||||
/// Constructor of the `Immutable` struct.
|
||||
#[allow(non_snake_case)]
|
||||
pub fn Immutable<T>(data:T) -> Immutable<T> {
|
||||
Immutable {data}
|
||||
}
|
||||
|
||||
impl<T:Debug> Debug for Immutable<T> {
|
||||
fn fmt(&self, f:&mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
self.data.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T:Display> Display for Immutable<T> {
|
||||
fn fmt(&self, f:&mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
self.data.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T:Clone> CloneRef for Immutable<T> {
|
||||
fn clone_ref(&self) -> Self {
|
||||
Self {data:self.data.clone()}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> AsRef<T> for Immutable<T> {
|
||||
fn as_ref(&self) -> &T {
|
||||
&self.data
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> std::borrow::Borrow<T> for Immutable<T> {
|
||||
fn borrow(&self) -> &T {
|
||||
&self.data
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Deref for Immutable<T> {
|
||||
type Target = T;
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.data
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ==============
|
||||
// === ToImpl ===
|
||||
// ==============
|
||||
|
||||
/// Provides method `to`, which is just like `into` but allows fo superfish syntax.
|
||||
pub trait ToImpl: Sized {
|
||||
fn to<P>(self) -> P where Self:Into<P> {
|
||||
self.into()
|
||||
}
|
||||
}
|
||||
impl<T> ToImpl for T {}
|
||||
|
||||
// TODO
|
||||
// This impl should be hidden behind a flag. Not everybody using prelude want to import nalgebra.
|
||||
impl <T,R,C,S> TypeDisplay for nalgebra::Matrix<T,R,C,S>
|
||||
where T:nalgebra::Scalar, R:nalgebra::DimName, C:nalgebra::DimName {
|
||||
fn type_display() -> String {
|
||||
let cols = <C as nalgebra::DimName>::dim();
|
||||
let rows = <R as nalgebra::DimName>::dim();
|
||||
let item = type_name::<T>();
|
||||
match cols {
|
||||
1 => format!("Vector{}<{}>" , rows, item),
|
||||
_ => format!("Matrix{}x{}<{}>" , rows, cols, item)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! clone_boxed {
|
||||
( $name:ident ) => { paste::item! {
|
||||
#[allow(missing_docs)]
|
||||
pub trait [<CloneBoxedFor $name>] {
|
||||
fn clone_boxed(&self) -> Box<dyn $name>;
|
||||
}
|
||||
|
||||
impl<T:Clone+$name+'static> [<CloneBoxedFor $name>] for T {
|
||||
fn clone_boxed(&self) -> Box<dyn $name> {
|
||||
Box::new(self.clone())
|
||||
}
|
||||
}
|
||||
|
||||
impl Clone for Box<dyn $name> {
|
||||
fn clone(&self) -> Self {
|
||||
self.clone_boxed()
|
||||
}
|
||||
}
|
||||
}}
|
||||
}
|
||||
|
||||
/// Alias for `for<'t> &'t Self : Into<T>`.
|
||||
pub trait RefInto<T> = where for<'t> &'t Self : Into<T>;
|
||||
|
||||
|
||||
|
||||
// =================
|
||||
// === CloneCell ===
|
||||
// =================
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct CloneCell<T> {
|
||||
data : UnsafeCell<T>
|
||||
}
|
||||
|
||||
impl<T> CloneCell<T> {
|
||||
pub fn new(elem:T) -> CloneCell<T> {
|
||||
CloneCell { data:UnsafeCell::new(elem) }
|
||||
}
|
||||
|
||||
#[allow(unsafe_code)]
|
||||
pub fn get(&self) -> T where T:Clone {
|
||||
unsafe {(*self.data.get()).clone()}
|
||||
}
|
||||
|
||||
#[allow(unsafe_code)]
|
||||
pub fn set(&self, elem:T) {
|
||||
unsafe { *self.data.get() = elem; }
|
||||
}
|
||||
|
||||
#[allow(unsafe_code)]
|
||||
pub fn take(&self) -> T where T:Default {
|
||||
let ptr:&mut T = unsafe { &mut *self.data.get() };
|
||||
std::mem::take(ptr)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T:Clone> Clone for CloneCell<T> {
|
||||
fn clone(&self) -> Self {
|
||||
Self::new(self.get())
|
||||
}
|
||||
}
|
||||
|
||||
impl<T:Default> Default for CloneCell<T> {
|
||||
fn default() -> Self {
|
||||
Self::new(default())
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// =================
|
||||
// === CloneCell ===
|
||||
// =================
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct CloneRefCell<T> {
|
||||
data : UnsafeCell<T>
|
||||
}
|
||||
|
||||
impl<T> CloneRefCell<T> {
|
||||
pub fn new(elem:T) -> CloneRefCell<T> {
|
||||
CloneRefCell { data:UnsafeCell::new(elem) }
|
||||
}
|
||||
|
||||
#[allow(unsafe_code)]
|
||||
pub fn get(&self) -> T where T:CloneRef {
|
||||
unsafe {(*self.data.get()).clone_ref()}
|
||||
}
|
||||
|
||||
#[allow(unsafe_code)]
|
||||
pub fn set(&self, elem:T) {
|
||||
unsafe { *self.data.get() = elem; }
|
||||
}
|
||||
|
||||
#[allow(unsafe_code)]
|
||||
pub fn take(&self) -> T where T:Default {
|
||||
let ptr:&mut T = unsafe { &mut *self.data.get() };
|
||||
std::mem::take(ptr)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T:CloneRef> Clone for CloneRefCell<T> {
|
||||
fn clone(&self) -> Self {
|
||||
Self::new(self.get())
|
||||
}
|
||||
}
|
||||
|
||||
impl<T:CloneRef> CloneRef for CloneRefCell<T> {
|
||||
fn clone_ref(&self) -> Self {
|
||||
Self::new(self.get())
|
||||
}
|
||||
}
|
||||
|
||||
impl<T:Default> Default for CloneRefCell<T> {
|
||||
fn default() -> Self {
|
||||
Self::new(default())
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ================================
|
||||
// === RefCell<Option<T>> Utils ===
|
||||
// ================================
|
||||
|
||||
pub trait RefcellOptionOps<T> {
|
||||
fn clear(&self);
|
||||
fn set(&self, val:T);
|
||||
fn set_if_none(&self, val:T);
|
||||
}
|
||||
|
||||
impl<T> RefcellOptionOps<T> for RefCell<Option<T>> {
|
||||
fn clear(&self) {
|
||||
*self.borrow_mut() = None;
|
||||
}
|
||||
|
||||
fn set(&self, val:T) {
|
||||
*self.borrow_mut() = Some(val);
|
||||
}
|
||||
|
||||
fn set_if_none(&self, val:T) {
|
||||
let mut ptr = self.borrow_mut();
|
||||
if ptr.is_some() { panic!("The value was already set.") }
|
||||
*ptr = Some(val)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ================================
|
||||
// === Strong / Weak References ===
|
||||
// ================================
|
||||
|
||||
/// Abstraction for a strong reference like `Rc` or newtypes over it.
|
||||
pub trait StrongRef : CloneRef {
|
||||
/// Downgraded reference type.
|
||||
type WeakRef : WeakRef<StrongRef=Self>;
|
||||
/// Creates a new weak reference of this allocation.
|
||||
fn downgrade(&self) -> Self::WeakRef;
|
||||
}
|
||||
|
||||
/// Abstraction for a weak reference like `Weak` or newtypes over it.
|
||||
pub trait WeakRef : CloneRef {
|
||||
/// Upgraded reference type.
|
||||
type StrongRef : StrongRef<WeakRef=Self>;
|
||||
/// Attempts to upgrade the weak referenc to a strong one, delaying dropping of the inner value
|
||||
/// if successful.
|
||||
fn upgrade(&self) -> Option<Self::StrongRef>;
|
||||
}
|
||||
|
||||
impl<T:?Sized> StrongRef for Rc<T> {
|
||||
type WeakRef = Weak<T>;
|
||||
fn downgrade(&self) -> Self::WeakRef {
|
||||
Rc::downgrade(&self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T:?Sized> WeakRef for Weak<T> {
|
||||
type StrongRef = Rc<T>;
|
||||
fn upgrade(&self) -> Option<Self::StrongRef> {
|
||||
Weak::upgrade(self)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ==================
|
||||
// === Result Ops ===
|
||||
// ==================
|
||||
|
||||
/// Allows extracting the element from `Result<T,T>` for any `T`.
|
||||
#[allow(missing_docs)]
|
||||
pub trait ResultGet {
|
||||
type Item;
|
||||
/// Allows extracting the element from `Result<T,T>` for any `T`.
|
||||
fn unwrap_both(self) -> Self::Item;
|
||||
}
|
||||
|
||||
impl<T> ResultGet for Result<T,T> {
|
||||
type Item = T;
|
||||
fn unwrap_both(self) -> T {
|
||||
match self {
|
||||
Ok (t) => t,
|
||||
Err (t) => t,
|
||||
}
|
||||
}
|
||||
}
|
@ -1,194 +0,0 @@
|
||||
//! This macro defines set of common macros which are useful across different projects.
|
||||
|
||||
|
||||
/// Allows for nicer definition of impls, similar to what Haskell or Scala does. Reduces the needed
|
||||
/// boilerplate. For example, the following usage:
|
||||
///
|
||||
/// ```compile_fail
|
||||
/// struct A { name:String };
|
||||
/// impls! { From<A> for String { |t| t.name.clone() } }
|
||||
/// ```
|
||||
///
|
||||
/// compiles to:
|
||||
/// ```
|
||||
/// struct A { name:String };
|
||||
/// impl From<A> for String {
|
||||
/// fn from(t:A) -> Self {
|
||||
/// t.name.clone()
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// This macro is meant to support many standard traits (like From) and should grow in the future.
|
||||
/// Currently supported ones are:
|
||||
/// * From<…>
|
||||
/// * From + &From<…>
|
||||
/// * Into + &Into<…>
|
||||
/// * PhantomFrom<…>
|
||||
#[macro_export]
|
||||
macro_rules! impls {
|
||||
($([$($impl_params:tt)*])? From<$ty:ty> for $target:ty $(where [$($bounds:tt)*])? {
|
||||
|$arg:tt| $($result:tt)*
|
||||
} ) => {
|
||||
#[allow(clippy::redundant_closure_call)]
|
||||
impl <$($($impl_params)*)?> From <$ty> for $target $(where $($bounds)*)? {
|
||||
fn from (arg:$ty) -> Self {
|
||||
(|$arg:$ty| $($result)*)(arg)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
($([$($impl_params:tt)*])? From + &From <$ty:ty> for $target:ty $(where [$($bounds:tt)*])? {
|
||||
|$arg:tt| $($result:tt)*
|
||||
} ) => {
|
||||
#[allow(clippy::redundant_closure_call)]
|
||||
#[allow(clippy::identity_conversion)]
|
||||
impl <$($($impl_params)*)?> From <$ty> for $target $(where $($bounds)*)? {
|
||||
fn from (arg:$ty) -> Self {
|
||||
(|$arg:$ty| $($result)*)(arg)
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::redundant_closure_call)]
|
||||
#[allow(clippy::identity_conversion)]
|
||||
impl <$($($impl_params)*)?> From <&$ty> for $target $(where $($bounds)*)? {
|
||||
fn from (arg:&$ty) -> Self {
|
||||
(|$arg:&$ty| $($result)*)(arg)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
($([$($impl_params:tt)*])? Into + &Into <$ty:ty> for $target:ty $(where [$($bounds:tt)*])? {
|
||||
|$arg:tt| $($result:tt)*
|
||||
} ) => {
|
||||
#[allow(clippy::redundant_closure_call)]
|
||||
#[allow(clippy::identity_conversion)]
|
||||
impl <$($($impl_params)*)?> Into <$ty> for $target $(where $($bounds)*)? {
|
||||
fn into(self) -> $ty {
|
||||
(|$arg:Self| $($result)*)(self)
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::redundant_closure_call)]
|
||||
#[allow(clippy::identity_conversion)]
|
||||
impl <$($($impl_params)*)?> Into <$ty> for &$target $(where $($bounds)*)? {
|
||||
fn into(self) -> $ty {
|
||||
(|$arg:Self| $($result)*)(self)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
($([$($impl_params:tt)*])? PhantomFrom<$ty:ty> for $target:ty {
|
||||
$($result:tt)*
|
||||
} ) => {
|
||||
impl <$($($impl_params)*)?> From <PhantomData<$ty>> for $target {
|
||||
fn from (_:PhantomData<$ty>) -> Self {
|
||||
$($result)*
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! alias {
|
||||
($( $(#$meta:tt)* $name:ident = {$($tok:tt)*} )*) => {$(
|
||||
$(#$meta)*
|
||||
pub trait $name: $($tok)* {}
|
||||
impl<T:$($tok)*> $name for T {}
|
||||
)*};
|
||||
|
||||
(no_docs $( $(#$meta:tt)* $name:ident = {$($tok:tt)*} )*) => {$(
|
||||
$(#$meta)*
|
||||
#[allow(missing_docs)]
|
||||
pub trait $name: $($tok)* {}
|
||||
impl<T:$($tok)*> $name for T {}
|
||||
)*};
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ==============
|
||||
// === Lambda ===
|
||||
// ==============
|
||||
|
||||
/// Clones all arguments from the first argument list by using `CloneRef` and defines lambda with
|
||||
/// arguments from the second argument list (if present). For example, the following usage
|
||||
///
|
||||
/// ```compile_fail
|
||||
/// f! { (a,b)(c) a + b + c }
|
||||
/// ```
|
||||
///
|
||||
/// is equivalent to:
|
||||
///
|
||||
/// ```compile_fail
|
||||
/// {
|
||||
/// let a = a.clone_ref();
|
||||
/// let b = b.clone_ref();
|
||||
/// move |c| { a + b + c }
|
||||
/// }
|
||||
/// ```
|
||||
#[macro_export]
|
||||
macro_rules! f {
|
||||
([$($name:ident),*] ($($args:tt)*) $($expr:tt)*) => {
|
||||
{
|
||||
$(let $name = $name.clone_ref();)*
|
||||
move |$($args)*| { $($expr)* }
|
||||
}
|
||||
};
|
||||
|
||||
([$($name:ident),*] $($expr:tt)*) => {
|
||||
{
|
||||
$(let $name = $name.clone_ref();)*
|
||||
move || { $($expr)* }
|
||||
}
|
||||
};
|
||||
|
||||
(($($args:tt)*) $name:ident . $($toks:tt)*) => {
|
||||
f! { [$name] ($($args)*) $name . $($toks)* }
|
||||
};
|
||||
|
||||
(($($args:tt)*) { $name:ident . $($toks:tt)* }) => {
|
||||
f! { [$name] ($($args)*) { $name . $($toks)* } }
|
||||
};
|
||||
|
||||
($name:ident . $($toks:tt)*) => {
|
||||
f! { [$name] $name . $($toks)* }
|
||||
};
|
||||
}
|
||||
|
||||
/// Variant of the `f` macro producing a lambda which drops its first argument.
|
||||
#[macro_export]
|
||||
macro_rules! f_ {
|
||||
([$($name:ident),*] $($expr:tt)*) => {
|
||||
f! { [$($name),*] (_) $($expr)* }
|
||||
};
|
||||
|
||||
($name:ident . $($toks:tt)*) => {
|
||||
f_! { [$name] $name . $($toks)* }
|
||||
};
|
||||
|
||||
( { $name:ident . $($toks:tt)* } ) => {
|
||||
f_! { [$name] { $name . $($toks)* } }
|
||||
};
|
||||
}
|
||||
|
||||
/// A macro for use in situations where the code is unreachable.
|
||||
///
|
||||
/// This macro will panic in debug builds, but in release builds it expands to
|
||||
/// the unsafe [`std::hint::unreachable_unchecked()`] function, which allows the
|
||||
/// compiler to optimise more.
|
||||
#[macro_export]
|
||||
macro_rules! unreachable_panic {
|
||||
() => (
|
||||
unreachable_panic!("This code was marked as unreachable.")
|
||||
);
|
||||
($msg:tt) => (
|
||||
if cfg!(debug_assertions) {
|
||||
panic!($msg)
|
||||
} else {
|
||||
use std::hint::unreachable_unchecked;
|
||||
#[allow(unsafe_code)]
|
||||
unsafe { unreachable_unchecked() }
|
||||
}
|
||||
)
|
||||
}
|
@ -1,25 +0,0 @@
|
||||
//! This module defines utilities for working with the [`std::option::Option`] type.
|
||||
|
||||
/// Adds mapping methods to the `Option` type.
|
||||
pub trait OptionOps {
|
||||
type Item;
|
||||
fn map_ref <U,F> (&self , f:F) -> Option<U> where F : FnOnce(&Self::Item) -> U;
|
||||
fn for_each <U,F> (self , f:F) where F : FnOnce(Self::Item) -> U;
|
||||
fn for_each_ref <U,F> (&self , f:F) where F : FnOnce(&Self::Item) -> U;
|
||||
}
|
||||
|
||||
impl<T> OptionOps for Option<T> {
|
||||
type Item = T;
|
||||
|
||||
fn map_ref<U,F>(&self, f:F) -> Option<U> where F : FnOnce(&Self::Item) -> U {
|
||||
self.as_ref().map(f)
|
||||
}
|
||||
|
||||
fn for_each<U,F>(self, f:F) where F : FnOnce(Self::Item) -> U {
|
||||
if let Some(x) = self { f(x); }
|
||||
}
|
||||
|
||||
fn for_each_ref<U,F>(&self, f:F) where F : FnOnce(&Self::Item) -> U {
|
||||
if let Some(x) = self { f(x); }
|
||||
}
|
||||
}
|
@ -1,94 +0,0 @@
|
||||
//! This module defines utilities for working with PhantomData.
|
||||
|
||||
use super::std_reexports::*;
|
||||
use derivative::Derivative;
|
||||
use shrinkwraprs::Shrinkwrap;
|
||||
|
||||
|
||||
|
||||
// ===================
|
||||
// === PhantomData ===
|
||||
// ===================
|
||||
|
||||
/// The following `PhantomData` implementations allow each argument to be non
|
||||
/// Sized. Unfortunately, this is not equivalent to `PhantomData<(T1,T2,...)>`,
|
||||
/// as tuple requires each arg to implement `Sized`.
|
||||
pub type PhantomData2<T1,T2> = PhantomData<(PhantomData <T1>, PhantomData<T2>)>;
|
||||
pub type PhantomData3<T1,T2,T3> = PhantomData2<PhantomData2<T1,T2>, PhantomData<T3>>;
|
||||
pub type PhantomData4<T1,T2,T3,T4> = PhantomData2<PhantomData3<T1,T2,T3>, PhantomData<T4>>;
|
||||
pub type PhantomData5<T1,T2,T3,T4,T5> = PhantomData2<PhantomData4<T1,T2,T3,T4>, PhantomData<T5>>;
|
||||
pub type PhantomData6<T1,T2,T3,T4,T5,T6> = PhantomData2<PhantomData5<T1,T2,T3,T4,T5>, PhantomData<T6>>;
|
||||
pub type PhantomData7<T1,T2,T3,T4,T5,T6,T7> = PhantomData2<PhantomData6<T1,T2,T3,T4,T5,T6>, PhantomData<T7>>;
|
||||
pub type PhantomData8<T1,T2,T3,T4,T5,T6,T7,T8> = PhantomData2<PhantomData7<T1,T2,T3,T4,T5,T6,T7>, PhantomData<T8>>;
|
||||
pub type PhantomData9<T1,T2,T3,T4,T5,T6,T7,T8,T9> = PhantomData2<PhantomData8<T1,T2,T3,T4,T5,T6,T7,T8>, PhantomData<T9>>;
|
||||
|
||||
|
||||
|
||||
// ===================
|
||||
// === WithPhantom ===
|
||||
// ===================
|
||||
|
||||
/// A wrapper adding a phantom type to a structure.
|
||||
#[derive(Derivative)]
|
||||
#[derive(Shrinkwrap)]
|
||||
#[shrinkwrap(mutable)]
|
||||
#[derivative(Clone (bound="T:Clone"))]
|
||||
#[derivative(Default (bound="T:Default"))]
|
||||
#[derivative(Debug (bound="T:Debug"))]
|
||||
pub struct WithPhantom<T, P=()> {
|
||||
#[shrinkwrap(main_field)]
|
||||
pub without_phantom: T,
|
||||
phantom: PhantomData<P>
|
||||
}
|
||||
|
||||
impl<T, P> WithPhantom<T, P> {
|
||||
pub fn new(without_phantom: T) -> Self {
|
||||
let phantom = PhantomData;
|
||||
Self { without_phantom, phantom }
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ==========================
|
||||
// === PhantomConversions ===
|
||||
// ==========================
|
||||
|
||||
/// A utility for easy driving of type-level computations from value level. Often we've got some
|
||||
/// type level relations, like a few singleton types, and for each such type we've got an associated
|
||||
/// value. For example, we can define types `Int` and `Float` and associate with them
|
||||
/// `WebGlContext::Int` and `WebGlContext::Float` constants encoded as `GlEnum`. In order to convert
|
||||
/// `Int` or `Float` to the `GlEnum` we do not need the instance of the types, only the information
|
||||
/// what type it was. So we can define:
|
||||
///
|
||||
/// ```compile_fail
|
||||
/// impl From<PhantomData<Int>> for u32 {
|
||||
/// from(_:PhantomData<Int>>) {
|
||||
/// GlEnum(WebGlContext::Int)
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// And use it like:
|
||||
///
|
||||
/// ```compile_fail
|
||||
/// let val = GlEnum::from(PhantomData::<Int>)
|
||||
/// ```
|
||||
///
|
||||
/// Using this utility we can always write the following code instead:
|
||||
///
|
||||
/// ```compile_fail
|
||||
/// let val = GlEnum::phantom_from::<Int>()
|
||||
/// ```
|
||||
pub trait PhantomConversions: Sized {
|
||||
fn phantom_into<P>() -> P where Self:PhantomInto<P> {
|
||||
PhantomData::<Self>.into()
|
||||
}
|
||||
fn phantom_from<P:PhantomInto<Self>>() -> Self {
|
||||
PhantomData::<P>.into()
|
||||
}
|
||||
}
|
||||
impl<T> PhantomConversions for T {}
|
||||
|
||||
/// Like `Into` but for phantom types.
|
||||
pub trait PhantomInto<T> = where PhantomData<Self>: Into<T>;
|
@ -1,78 +0,0 @@
|
||||
//! This module defines helpers and utilities for working with references.
|
||||
|
||||
// ============
|
||||
// === With ===
|
||||
// ============
|
||||
|
||||
/// Surprisingly useful function. Consider the following code:
|
||||
///
|
||||
/// ```compile_fail
|
||||
/// fn init(self) -> Self {
|
||||
/// let mut data = self.borrow_mut();
|
||||
/// ...
|
||||
/// self
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// It may not compile telling that the last line moves self out, however,
|
||||
/// borrow might be used there, when `data` is dropped and runs the destructor.
|
||||
///
|
||||
/// We can use this function to narrow-down the lifetimes. The following code
|
||||
/// compiles just fine:
|
||||
///
|
||||
/// ```compile_fail
|
||||
/// fn init(self) -> Self {
|
||||
/// with(self.borrow_mut(), |mut data| {
|
||||
/// ...
|
||||
/// });
|
||||
/// self
|
||||
/// }
|
||||
/// ```
|
||||
pub fn with<T, F: FnOnce(T) -> Out, Out>(t: T, f: F) -> Out { f(t) }
|
||||
|
||||
|
||||
|
||||
// =============
|
||||
// === ToRef ===
|
||||
// =============
|
||||
|
||||
/// Similar to `AsRef` but more specific and automatically implemented for every type. Allows for
|
||||
/// conversion `&T` to `&T` (identity) and `T` to `&T` for any type `T`. In contrast, `AsRef`
|
||||
/// requires explicit impls, so for example you cannot do `let t:&() = ().as_ref()`
|
||||
pub trait ToRef<T> where T:?Sized { fn to_ref(&self) -> &T; }
|
||||
impl<T> ToRef<T> for T where T:?Sized { fn to_ref(&self) -> &T { self } }
|
||||
impl<T> ToRef<T> for &T where T:?Sized { fn to_ref(&self) -> &T { self } }
|
||||
|
||||
// pub trait ToRef = ?Sized + HasRefValue + ToRef__<RefValue<Self>>;
|
||||
|
||||
pub trait HasRefValue where { type RefValue:?Sized; }
|
||||
impl <T> HasRefValue for T where T:?Sized { default type RefValue=T; }
|
||||
impl <T> HasRefValue for &T where T:?Sized { type RefValue=T; }
|
||||
|
||||
pub type RefValue<T> = <T as HasRefValue>::RefValue;
|
||||
|
||||
|
||||
|
||||
// =============
|
||||
// === Owned ===
|
||||
// =============
|
||||
|
||||
/// The owned version of a type. It would be super cool if Rust would allow us to automatically
|
||||
/// implement it for every type: `Owned<&T> = T` and `Owned<T> = T` if `T` was not a reference.
|
||||
/// Unfortunately, we need to implement it by hand for every type now.
|
||||
pub trait AsOwned {
|
||||
type Owned;
|
||||
}
|
||||
|
||||
/// Owned type family.
|
||||
pub type Owned<T> = <T as AsOwned>::Owned;
|
||||
|
||||
/// Converts type to its owned version.
|
||||
pub trait IntoOwned = AsOwned + Into<Owned<Self>>;
|
||||
|
||||
|
||||
// === Default Impls ===
|
||||
|
||||
impl<T> AsOwned for &T {
|
||||
type Owned = T;
|
||||
}
|
@ -1,70 +0,0 @@
|
||||
//! This module reexports several commonly used types defined in the standard library.
|
||||
|
||||
// === Data ===
|
||||
|
||||
pub use std::any::Any;
|
||||
pub use std::borrow::Cow;
|
||||
pub use std::hash::Hash;
|
||||
pub use std::marker::PhantomData;
|
||||
pub use std::ops::Range;
|
||||
pub use std::ops::RangeBounds;
|
||||
pub use std::ops::RangeFrom;
|
||||
pub use std::ops::RangeFull;
|
||||
pub use std::ops::RangeInclusive;
|
||||
pub use std::ops::RangeTo;
|
||||
pub use std::ops::RangeToInclusive;
|
||||
|
||||
// === Format ===
|
||||
|
||||
pub use core::any::type_name;
|
||||
pub use core::fmt::Debug;
|
||||
pub use std::fmt::Display;
|
||||
pub use std::fmt;
|
||||
pub use std::iter::FromIterator;
|
||||
pub use std::iter;
|
||||
|
||||
|
||||
// === Data Operations ===
|
||||
|
||||
pub use std::ops::Deref;
|
||||
pub use std::ops::DerefMut;
|
||||
pub use std::ops::Index;
|
||||
pub use std::ops::IndexMut;
|
||||
|
||||
|
||||
// === Conversion ===
|
||||
|
||||
pub use std::convert::identity;
|
||||
pub use std::convert::TryFrom;
|
||||
pub use std::convert::TryInto;
|
||||
|
||||
|
||||
// === References ===
|
||||
|
||||
pub use std::cell::Cell;
|
||||
pub use std::cell::Ref;
|
||||
pub use std::cell::RefCell;
|
||||
pub use std::cell::RefMut;
|
||||
pub use std::rc::Rc;
|
||||
pub use std::rc::Weak;
|
||||
pub use std::slice::SliceIndex;
|
||||
pub use std::slice;
|
||||
|
||||
|
||||
// === Operators ===
|
||||
|
||||
pub use std::ops::Add;
|
||||
pub use std::ops::Div;
|
||||
pub use std::ops::Mul;
|
||||
pub use std::ops::Neg;
|
||||
pub use std::ops::Sub;
|
||||
|
||||
|
||||
// === Utils ===
|
||||
|
||||
pub use std::mem;
|
||||
|
||||
/// Alias for `Default::default()`.
|
||||
pub fn default<T:Default>() -> T {
|
||||
Default::default()
|
||||
}
|
@ -1,247 +0,0 @@
|
||||
//! This module defines several useful string variants, including copy-on-write and immutable
|
||||
//! implementations.
|
||||
|
||||
use std::borrow::Cow;
|
||||
|
||||
use crate::impls;
|
||||
use crate::clone::*;
|
||||
use std::ops::Deref;
|
||||
use std::rc::Rc;
|
||||
use derive_more::*;
|
||||
|
||||
|
||||
|
||||
// ===========
|
||||
// === Str ===
|
||||
// ===========
|
||||
|
||||
/// Abstraction for any kind of string as an argument. Functions defined as
|
||||
/// `fn test<S:Str>(s: Str) { ... }` can be called with `String`, `&String`, and `&str` without
|
||||
/// requiring caller to know the implementation details. Moreover, the definition can decide if it
|
||||
/// needs allocation or not. Calling `s.as_ref()` will never allocate, while `s.into()` will
|
||||
/// allocate only when necessary.
|
||||
pub trait Str = Into<String> + AsRef<str>;
|
||||
|
||||
|
||||
|
||||
// =================
|
||||
// === CowString ===
|
||||
// =================
|
||||
|
||||
// === Definition ===
|
||||
|
||||
/// A copy-on-write String implementation. It is a newtype wrapper for `Cow<'static,str>` and
|
||||
/// provides many useful impls for efficient workflow. Use it whenever you want to store a string
|
||||
/// but you are not sure if the string will be allocated or not. This way you can store a static
|
||||
/// slice as long as you can and switch to allocated String on demand.
|
||||
#[derive(Clone,Debug,Default,Display)]
|
||||
pub struct CowString(Cow<'static,str>);
|
||||
|
||||
|
||||
// === Conversions From CowString ===
|
||||
|
||||
impls!{ From <&CowString> for String { |t| t.clone().into() } }
|
||||
impls!{ From <CowString> for String { |t| t.0.into() } }
|
||||
|
||||
|
||||
// === Conversions To CowString ===
|
||||
|
||||
impls!{ From <Cow<'static,str>> for CowString { |t| Self(t) } }
|
||||
impls!{ From <&Cow<'static,str>> for CowString { |t| Self(t.clone()) } }
|
||||
impls!{ From <&'static str> for CowString { |t| Self(t.into()) } }
|
||||
impls!{ From <String> for CowString { |t| Self(t.into()) } }
|
||||
impls!{ From <&String> for CowString { |t| t.to_string().into() } }
|
||||
impls!{ From <&CowString> for CowString { |t| t.clone() } }
|
||||
|
||||
|
||||
// === Instances ===
|
||||
|
||||
impl Deref for CowString {
|
||||
type Target = str;
|
||||
fn deref(&self) -> &str {
|
||||
self.0.deref()
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<str> for CowString {
|
||||
fn as_ref(&self) -> &str {
|
||||
self.deref()
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ================
|
||||
// === ImString ===
|
||||
// ================
|
||||
|
||||
/// Immutable string implementation with a fast clone implementation.
|
||||
#[derive(Clone,CloneRef,Debug,Default,Eq,Hash,PartialEq)]
|
||||
pub struct ImString {
|
||||
content : Rc<String>
|
||||
}
|
||||
|
||||
impl ImString {
|
||||
/// Constructor.
|
||||
pub fn new(content:impl Into<String>) -> Self {
|
||||
let content = Rc::new(content.into());
|
||||
Self {content}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for ImString {
|
||||
fn fmt(&self, f:&mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f,"{}",self.content)
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for ImString {
|
||||
type Target = str;
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.content
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<ImString> for ImString {
|
||||
fn as_ref(&self) -> &ImString {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<String> for ImString {
|
||||
fn as_ref(&self) -> &String {
|
||||
self.content.as_ref()
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<str> for ImString {
|
||||
fn as_ref(&self) -> &str {
|
||||
self.content.as_ref()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<String> for ImString {
|
||||
fn from(t:String) -> Self {
|
||||
Self::new(t)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&String> for ImString {
|
||||
fn from(t:&String) -> Self {
|
||||
Self::new(t)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&&String> for ImString {
|
||||
fn from(t:&&String) -> Self {
|
||||
Self::new(*t)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&str> for ImString {
|
||||
fn from(t:&str) -> Self {
|
||||
Self::new(t)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&&str> for ImString {
|
||||
fn from(t:&&str) -> Self {
|
||||
Self::new(*t)
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<String> for ImString {
|
||||
fn eq(&self, other:&String) -> bool {
|
||||
self.content.as_ref().eq(other)
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<ImString> for String {
|
||||
fn eq(&self, other:&ImString) -> bool {
|
||||
self.eq(other.content.as_ref())
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// === Macros ===
|
||||
|
||||
/// Defines a newtype for `ImString`.
|
||||
#[macro_export]
|
||||
macro_rules! im_string_newtype {
|
||||
($($(#$meta:tt)* $name:ident),* $(,)?) => {$(
|
||||
$(#$meta)*
|
||||
#[derive(Clone,CloneRef,Debug,Default,Eq,Hash,PartialEq)]
|
||||
pub struct $name {
|
||||
content : ImString
|
||||
}
|
||||
|
||||
impl $name {
|
||||
/// Constructor.
|
||||
pub fn new(content:impl Into<ImString>) -> Self {
|
||||
let content = content.into();
|
||||
Self {content}
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for $name {
|
||||
type Target = str;
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.content
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<$name> for $name {
|
||||
fn as_ref(&self) -> &$name {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<ImString> for $name {
|
||||
fn as_ref(&self) -> &ImString {
|
||||
self.content.as_ref()
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<String> for $name {
|
||||
fn as_ref(&self) -> &String {
|
||||
self.content.as_ref()
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<str> for $name {
|
||||
fn as_ref(&self) -> &str {
|
||||
self.content.as_ref()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<String> for $name {
|
||||
fn from(t:String) -> Self {
|
||||
Self::new(t)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&String> for $name {
|
||||
fn from(t:&String) -> Self {
|
||||
Self::new(t)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&&String> for $name {
|
||||
fn from(t:&&String) -> Self {
|
||||
Self::new(t)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&str> for $name {
|
||||
fn from(t:&str) -> Self {
|
||||
Self::new(t)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&&str> for $name {
|
||||
fn from(t:&&str) -> Self {
|
||||
Self::new(t)
|
||||
}
|
||||
}
|
||||
)*};
|
||||
}
|
@ -1,80 +0,0 @@
|
||||
//! Type related utilities.
|
||||
|
||||
use super::std_reexports::*;
|
||||
|
||||
|
||||
// ================
|
||||
// === Anything ===
|
||||
// ================
|
||||
|
||||
/// Placeholder type used to represent any value type. It is useful to define type-level relations
|
||||
/// like defining an unit with any quantity, let it be distance or mass.
|
||||
#[derive(Clone,Copy,Debug,PartialEq)]
|
||||
pub struct Anything {}
|
||||
|
||||
|
||||
// ===================
|
||||
// === TypeDisplay ===
|
||||
// ===================
|
||||
|
||||
/// Like `Display` trait but for types. However, unlike `Display` it defaults to
|
||||
/// `impl::any::type_name` if not provided with explicit implementation.
|
||||
pub trait TypeDisplay {
|
||||
fn type_display() -> String;
|
||||
}
|
||||
|
||||
impl<T> TypeDisplay for T {
|
||||
default fn type_display() -> String {
|
||||
type_name::<Self>().to_string()
|
||||
}
|
||||
}
|
||||
|
||||
/// Formats the type for the user-facing output.
|
||||
pub fn type_display<T:TypeDisplay>() -> String {
|
||||
<T as TypeDisplay>::type_display()
|
||||
}
|
||||
|
||||
|
||||
// =============
|
||||
// === Value ===
|
||||
// =============
|
||||
|
||||
/// Defines relation between types and values, like between `True` and `true`.
|
||||
pub trait KnownTypeValue {
|
||||
|
||||
/// The value-level counterpart of this type-value.
|
||||
type Value;
|
||||
|
||||
/// The value of this type-value.
|
||||
fn value() -> Self::Value;
|
||||
}
|
||||
|
||||
pub type TypeValue<T> = <T as KnownTypeValue>::Value;
|
||||
|
||||
|
||||
|
||||
// =======================
|
||||
// === Type-level Bool ===
|
||||
// =======================
|
||||
|
||||
/// Type level `true` value.
|
||||
#[derive(Clone,Copy,Debug)]
|
||||
pub struct True {}
|
||||
|
||||
/// Type level `false` value.
|
||||
#[derive(Clone,Copy,Debug)]
|
||||
pub struct False {}
|
||||
|
||||
impl KnownTypeValue for True {
|
||||
type Value = bool;
|
||||
fn value() -> Self::Value {
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
impl KnownTypeValue for False {
|
||||
type Value = bool;
|
||||
fn value() -> Self::Value {
|
||||
false
|
||||
}
|
||||
}
|
@ -1,71 +0,0 @@
|
||||
//! This module defines utilities for working with the [`std::vec::Vec`] type.
|
||||
|
||||
use failure::_core::hint::unreachable_unchecked;
|
||||
|
||||
|
||||
|
||||
// ==============
|
||||
// === VecOps ===
|
||||
// ==============
|
||||
|
||||
pub trait VecOps {
|
||||
type Item;
|
||||
|
||||
/// Pushes the provided `item` onto the [`std::vec::Vec`], and then returns an immutable
|
||||
/// reference to the item.
|
||||
fn push_and_get(&mut self, item:Self::Item) -> &Self::Item;
|
||||
|
||||
/// Pushes the provided `item` onto the [`std::vec::Vec`], and then returns a mutable reference
|
||||
/// to the item.
|
||||
fn push_and_get_mut(&mut self, item:Self::Item) -> &mut Self::Item;
|
||||
}
|
||||
|
||||
impl <T> VecOps for Vec<T> {
|
||||
type Item = T;
|
||||
|
||||
fn push_and_get(&mut self, item:Self::Item) -> &Self::Item {
|
||||
self.push(item);
|
||||
let item_ix = self.len() - 1;
|
||||
#[allow(unsafe_code)]
|
||||
unsafe { self.get(item_ix).unwrap_or_else(||unreachable_unchecked()) }
|
||||
}
|
||||
|
||||
fn push_and_get_mut(&mut self, item:Self::Item) -> &mut Self::Item {
|
||||
self.push(item);
|
||||
let item_ix = self.len() - 1;
|
||||
#[allow(unsafe_code)]
|
||||
unsafe { self.get_mut(item_ix).unwrap_or_else(||unreachable_unchecked()) }
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// =============
|
||||
// === Tests ===
|
||||
// =============
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
struct Test {
|
||||
pub item: usize
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_push_and_get() {
|
||||
let mut vec = Vec::new();
|
||||
let item = Test {item:10};
|
||||
let item_in_vec = vec.push_and_get(item);
|
||||
assert_eq!(item_in_vec.item, 10)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_push_and_get_mut() {
|
||||
let mut vec = Vec::new();
|
||||
let item = Test {item:10};
|
||||
let item_in_vec = vec.push_and_get_mut(item);
|
||||
item_in_vec.item = 20;
|
||||
assert_eq!(item_in_vec.item, 20);
|
||||
}
|
||||
}
|
@ -1,133 +0,0 @@
|
||||
//! This type defines Wrap / Unwrap utilities. Unwrap is like `Deref` but does not implement
|
||||
//! `impl<'a, T> Unwrap for &'a T` in order to make it less error prone. `Wrap` is like `pure` in
|
||||
//! applicative functors – if lifts a value to the specific type.
|
||||
|
||||
use crate::std_reexports::*;
|
||||
|
||||
// ===============
|
||||
// === Wrapper ===
|
||||
// ===============
|
||||
|
||||
/// Trait for any type which wraps other type. See docs of `Wrapper` to learn more.
|
||||
pub trait HasContent {
|
||||
type Content : ?Sized;
|
||||
}
|
||||
|
||||
/// Accessor for the wrapped value.
|
||||
pub type Content<T> = <T as HasContent>::Content;
|
||||
|
||||
/// Trait which enables `Sized` super-bound on the `Content` type.
|
||||
pub trait HasSizedContent = HasContent where Content<Self> : Sized;
|
||||
|
||||
/// Trait for objects which wrap values. Please note that this implements safe wrappers, so the
|
||||
/// object - value relation must be bijective.
|
||||
pub trait Wrapper = Wrap + ContentRef;
|
||||
|
||||
/// Wrapping utility for values.
|
||||
pub trait Wrap : HasSizedContent {
|
||||
/// Wraps the value and returns the wrapped type.
|
||||
fn wrap(t:Self::Content) -> Self;
|
||||
}
|
||||
|
||||
/// Unwrapping utility for wrapped types.
|
||||
///
|
||||
/// Please note that this trait is very similar to the Deref trait. However, there is a very
|
||||
/// important difference. Unlike `Deref`, there is no `impl<'a, T> Unwrap for &'a T` defined. The
|
||||
/// existence of such impl is very error prone when writing complex impls. The `Deref` docs warn
|
||||
/// about it explicitly: "[...] Because of this, Deref should only be implemented for smart pointers
|
||||
/// to avoid confusion.". As an example, consider the following code which contains infinite loop:
|
||||
///
|
||||
/// ```compile_fail
|
||||
/// pub trait HasId {
|
||||
/// fn id(&self) -> usize;
|
||||
/// }
|
||||
///
|
||||
/// // Notice the lack of bound `<T as Deref>::Target : HasId`
|
||||
/// impl<T:Deref> HasId for T {
|
||||
/// fn id(&self) -> usize {
|
||||
/// self.deref().id()
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// And the correct version:
|
||||
///
|
||||
/// ```compile_fail
|
||||
/// pub trait HasId {
|
||||
/// fn id(&self) -> usize;
|
||||
/// }
|
||||
///
|
||||
/// // Notice the lack of bound `<T as Deref>::Target : HasId`
|
||||
/// impl<T:Deref> HasId for T where <T as Deref>::Target : HasId {
|
||||
/// fn id(&self) -> usize {
|
||||
/// self.deref().id()
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// Both versions compile fine, but the former loops for ever.
|
||||
pub trait ContentRef : HasContent {
|
||||
/// Unwraps this type to get the inner value.
|
||||
fn content(&self) -> &Self::Content;
|
||||
}
|
||||
|
||||
/// Runs a function on the reference to the content.
|
||||
pub trait WithContent : HasSizedContent {
|
||||
/// Runs a function on the reference to the content.
|
||||
fn with_content<F,T>(&self,f:F) -> T where F : FnOnce(&Content<Self>) -> T;
|
||||
}
|
||||
|
||||
/// Unwraps the content by consuming this value.
|
||||
pub trait Unwrap : HasSizedContent {
|
||||
/// Unwraps the content by consuming this value.
|
||||
fn unwrap(self) -> Self::Content;
|
||||
}
|
||||
|
||||
|
||||
// === Utils ===
|
||||
|
||||
/// Wraps the value and returns the wrapped type.
|
||||
pub fn wrap<T:Wrap>(t:T::Content) -> T {
|
||||
T::wrap(t)
|
||||
}
|
||||
|
||||
/// Provides reference to the content of this value.
|
||||
pub fn content<T:ContentRef>(t:&T) -> &T::Content {
|
||||
T::content(t)
|
||||
}
|
||||
|
||||
/// Unwrap the content by consuming this value.
|
||||
pub fn unwrap<T:Unwrap>(t:T) -> T::Content {
|
||||
T::unwrap(t)
|
||||
}
|
||||
|
||||
|
||||
// === Default Impls ===
|
||||
|
||||
impl<T:ContentRef + HasSizedContent> WithContent for T {
|
||||
fn with_content<F,S>(&self,f:F) -> S
|
||||
where F : FnOnce(&Content<Self>) -> S {
|
||||
f(self.content())
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: This should be implemented with the marker trait overlapping rules magic.
|
||||
// impl<T:Deref> Unwrap for T
|
||||
// where <T as Deref>::Target: Unwrap {
|
||||
// default fn unwrap(&self) -> &Self::Content {
|
||||
// self.deref().unwrap()
|
||||
// }
|
||||
// }
|
||||
|
||||
|
||||
// === Impls ===
|
||||
|
||||
impl<T:?Sized> HasContent for Rc<T> { type Content = T; }
|
||||
impl<T> Wrap for Rc<T> { fn wrap(t:T) -> Self { Rc::new(t) } }
|
||||
impl<T:?Sized> ContentRef for Rc<T> { fn content(&self) -> &Self::Content { self.deref() }}
|
||||
|
||||
impl HasContent for String { type Content = char; }
|
||||
impl Wrap for String { fn wrap(t:char) -> Self { t.to_string() } }
|
||||
|
||||
impl<T> HasContent for Vec<T> { type Content = T; }
|
||||
impl<T> Wrap for Vec<T> { fn wrap(t:T) -> Self { vec![t] } }
|
@ -1,32 +0,0 @@
|
||||
[package]
|
||||
name = "enso-shapely"
|
||||
version = "0.1.0"
|
||||
authors = ["Enso Team <enso-dev@enso.org>"]
|
||||
edition = "2018"
|
||||
|
||||
description = "Automated typeclass derivation."
|
||||
readme = "README.md"
|
||||
homepage = "https://github.com/enso-org/enso/lib/rust/shapely/impl"
|
||||
repository = "https://github.com/enso-org/enso"
|
||||
license-file = "../../../../LICENSE"
|
||||
|
||||
keywords = ["typeclass", "deriving"]
|
||||
categories = ["algorithms"]
|
||||
|
||||
publish = true
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib", "rlib"]
|
||||
|
||||
[features]
|
||||
default = []
|
||||
|
||||
[dependencies]
|
||||
enso-shapely-macros = { version = "0.1.0" , path = "../macros" }
|
||||
paste = { version = "0.1" }
|
||||
derivative = { version = "1.0.3" }
|
||||
shrinkwraprs = { version = "0.3.0" }
|
||||
|
||||
[dev-dependencies]
|
||||
enso-prelude = { path = "../../enso-prelude" }
|
||||
wasm-bindgen-test = "0.2"
|
@ -1,3 +0,0 @@
|
||||
# Shapely
|
||||
|
||||
This crate provides automatic derivation for useful type classes.
|
@ -1,45 +0,0 @@
|
||||
/// Computes a cartesian product of the provided input.
|
||||
///
|
||||
/// For the following expression:
|
||||
/// ```compile_fail
|
||||
/// cartesian!(f [g] [a b c] [x y z]);
|
||||
/// ```
|
||||
///
|
||||
/// It expands to:
|
||||
/// ```compile_fail
|
||||
/// f! { [g] [ [a x] [a y] [a z] [b x] [b y] [b z] [c x] [c y] [c z] ] }
|
||||
/// ```
|
||||
///
|
||||
/// If you provide underscore as second argument, it is skipped in the ouput macro:
|
||||
///
|
||||
/// ```compile_fail
|
||||
/// cartesian!(f _ [a b c] [x y z]);
|
||||
/// ```
|
||||
///
|
||||
/// Expands to:
|
||||
/// ```compile_fail
|
||||
/// f! { [ [a x] [a y] [a z] [b x] [b y] [b z] [c x] [c y] [c z] ] }
|
||||
/// ```
|
||||
#[macro_export]
|
||||
macro_rules! cartesian {
|
||||
($f:tt [$($a:tt)*] [$($b:tt)*]) => {
|
||||
$crate::_cartesian_impl!{ $f [] [$($a)*] [$($b)*] [$($b)*] }
|
||||
};
|
||||
}
|
||||
|
||||
/// Internal helper for `cartesian` macro.
|
||||
#[macro_export]
|
||||
macro_rules! _cartesian_impl {
|
||||
([[$f:path]] $out:tt [] $b:tt $init_b:tt) => {
|
||||
$f!{ $out }
|
||||
};
|
||||
([[$f:path] $args:tt] $out:tt [] $b:tt $init_b:tt) => {
|
||||
$f!{ $args $out }
|
||||
};
|
||||
($f:tt $out:tt [$a:tt $($at:tt)*] [] $init_b:tt) => {
|
||||
$crate::_cartesian_impl!{ $f $out [$($at)*] $init_b $init_b }
|
||||
};
|
||||
($f:tt [$($out:tt)*] [$a:tt $($at:tt)*] [$b:tt $($bt:tt)*] $init_b:tt) => {
|
||||
$crate::_cartesian_impl!{ $f [$($out)* [$a $b]] [$a $($at)*] [$($bt)*] $init_b }
|
||||
};
|
||||
}
|
@ -1,90 +0,0 @@
|
||||
//! Helper code meant to be used by the code generated through usage of macros
|
||||
//! from `enso-shapely-macros` crate.
|
||||
|
||||
pub use enso_shapely_macros::*;
|
||||
|
||||
use derivative::Derivative;
|
||||
use std::ops::Generator;
|
||||
use std::ops::GeneratorState;
|
||||
use std::pin::Pin;
|
||||
use std::marker::PhantomData;
|
||||
|
||||
|
||||
|
||||
// ==========================
|
||||
// === GeneratingIterator ===
|
||||
// ==========================
|
||||
|
||||
/// Iterates over values yielded from the wrapped `Generator`.
|
||||
#[derive(Debug)]
|
||||
pub struct GeneratingIterator<G: Generator>(pub G);
|
||||
|
||||
impl<G> Iterator for GeneratingIterator<G>
|
||||
where G: Generator<Return = ()> + Unpin {
|
||||
type Item = G::Yield;
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
match Pin::new(&mut self.0).resume() {
|
||||
GeneratorState::Yielded(element) => Some(element),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// =====================
|
||||
// === EmptyIterator ===
|
||||
// =====================
|
||||
|
||||
/// An `Iterator` type that yields no values of the given type `T`.
|
||||
#[derive(Derivative)]
|
||||
#[derivative(Debug,Default(bound=""))]
|
||||
pub struct EmptyIterator<T>(PhantomData<T>);
|
||||
|
||||
impl<T> EmptyIterator<T> {
|
||||
/// Create a new empty iterator.
|
||||
pub fn new() -> Self {
|
||||
Default::default()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Iterator for EmptyIterator<T> {
|
||||
type Item = T;
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// =============
|
||||
// === Tests ===
|
||||
// =============
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn empty_iterator_works_for_any_type() {
|
||||
for elem in EmptyIterator::new() {
|
||||
elem: i32;
|
||||
}
|
||||
for elem in EmptyIterator::new() {
|
||||
elem: String;
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn generating_iterator_works() {
|
||||
let generator = || {
|
||||
yield 0;
|
||||
yield 1;
|
||||
yield 2;
|
||||
};
|
||||
let expected_numbers = vec!(0, 1, 2);
|
||||
let generator_iter = GeneratingIterator(generator);
|
||||
let collected_result: Vec<_> = generator_iter.collect();
|
||||
assert_eq!(collected_result, expected_numbers);
|
||||
}
|
||||
}
|
@ -1,405 +0,0 @@
|
||||
// README README README README README README README README README README README
|
||||
// README README README README README README README README README README README
|
||||
// README README README README README README README README README README README
|
||||
|
||||
// This library is in a very early stage. It will be refactored and improved
|
||||
// soon. It should not be reviewed now.
|
||||
|
||||
#![warn(unsafe_code)]
|
||||
#![warn(missing_copy_implementations)]
|
||||
#![warn(missing_debug_implementations)]
|
||||
#![feature(generators, generator_trait)]
|
||||
#![feature(specialization)]
|
||||
#![feature(type_ascription)]
|
||||
#![feature(overlapping_marker_traits)]
|
||||
|
||||
pub mod generator;
|
||||
pub mod shared;
|
||||
pub mod singleton;
|
||||
pub mod cartesian;
|
||||
|
||||
pub use enso_shapely_macros::*;
|
||||
|
||||
pub use generator::EmptyIterator;
|
||||
pub use generator::GeneratingIterator;
|
||||
|
||||
use shrinkwraprs::Shrinkwrap;
|
||||
|
||||
|
||||
/// Replaces the first argument with the second one. It is useful when creating macros which match
|
||||
/// a pattern and you want to generate as many repetitions of a token as there was matches. For
|
||||
/// example, when matching `$($name:ident)*`, you may want to generate as many empty tuples as
|
||||
/// the number of names matched. You can do it by using `$(replace!{$name,()})*`.
|
||||
#[macro_export]
|
||||
macro_rules! replace {
|
||||
($a:tt,$b:tt) => {$b}
|
||||
}
|
||||
|
||||
/// Generates a newtype wrapper for the provided types. It also generates a lot of impls,
|
||||
/// including Copy, Clone, Debug, Default, Display, From, Into, Deref, and DerefMut.
|
||||
///
|
||||
/// For the following input:
|
||||
/// ```compile_fail
|
||||
/// newtype_copy! {
|
||||
/// AttributeIndex(usize)
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// The following code is generated:
|
||||
/// ```compile_fail
|
||||
/// #[derive(Copy, Clone, Debug, Default, Display, From, Into)]
|
||||
/// pub struct AttributeIndex(usize);
|
||||
/// impl Deref for AttributeIndex {
|
||||
/// type Target = usize;
|
||||
/// fn deref(&self) -> &Self::Target {
|
||||
/// &self.0
|
||||
/// }
|
||||
/// }
|
||||
/// impl DerefMut for AttributeIndex {
|
||||
/// fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
/// &mut self.0
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
#[macro_export]
|
||||
macro_rules! newtype_copy {
|
||||
($( $(#$meta:tt)* $name:ident($type:ty); )*) => {$(
|
||||
$(#$meta)*
|
||||
#[derive(Copy,Clone,CloneRef,Debug,Default,Display,From,Into)]
|
||||
pub struct $name($type);
|
||||
|
||||
impl Deref for $name {
|
||||
type Target = $type;
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl DerefMut for $name {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
)*}
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! derive_clone_plus {
|
||||
($name:ident) => {
|
||||
impl<T:Clone+Into<$name>> From<&T> for $name {
|
||||
fn from(t: &T) -> Self {
|
||||
t.clone().into()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
////////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
use std::ops::Deref;
|
||||
|
||||
macro_rules! extension_struct {
|
||||
($name:ident { $($field:ident : $field_type:ty),* }) => { paste::item! {
|
||||
|
||||
////// With_NAME_ //////
|
||||
|
||||
#[derive(Shrinkwrap)]
|
||||
#[shrinkwrap(mutable)]
|
||||
struct [<With $name>]<T>($($field_type),*, #[shrinkwrap(main_field)] T);
|
||||
|
||||
////// Has_NAME_ //////
|
||||
|
||||
pub trait [<Has $name>] {
|
||||
$(fn $field(&self) -> &$field_type;)*
|
||||
}
|
||||
|
||||
impl<T: [<Has $name Indirect>]>
|
||||
[<Has $name>] for T {
|
||||
$(fn $field(&self) -> &$field_type {
|
||||
[<Has $name Spec1>]::$field(self)
|
||||
})*
|
||||
}
|
||||
|
||||
////// Has_NAME_Indirect //////
|
||||
|
||||
pub trait [<Has $name Indirect>] {}
|
||||
|
||||
impl<T>
|
||||
[<Has $name Indirect>] for [<With $name>]<T> {}
|
||||
|
||||
impl<T>
|
||||
[<Has $name Indirect>] for T
|
||||
where T: Deref, <Self as Deref>::Target : [<Has $name>] {}
|
||||
|
||||
////// Has_NAME_Spec1 //////
|
||||
|
||||
trait [<Has $name Spec1>] {
|
||||
$(fn $field(&self) -> &$field_type;)*
|
||||
}
|
||||
|
||||
impl<T>
|
||||
[<Has $name Spec1>] for [<With $name>]<T> {
|
||||
$(fn $field(&self) -> &$field_type {
|
||||
&self.0
|
||||
})*
|
||||
}
|
||||
|
||||
impl<T: [<Has $name Indirect>]>
|
||||
[<Has $name Spec1>] for T {
|
||||
$(default fn $field(&self) -> &$field_type {
|
||||
[<Has $name Spec2>]::$field(self)
|
||||
})*
|
||||
}
|
||||
|
||||
////// Has_NAME_Spec2 //////
|
||||
|
||||
trait [<Has $name Spec2>] {
|
||||
$(fn $field(&self) -> &$field_type;)*
|
||||
}
|
||||
|
||||
impl<T: [<Has $name Indirect>]>
|
||||
[<Has $name Spec2>] for T {
|
||||
$(default fn $field(&self) -> &$field_type {
|
||||
unreachable!();
|
||||
})*
|
||||
}
|
||||
|
||||
impl<T>
|
||||
[<Has $name Spec2>] for T
|
||||
where T: Deref, <Self as Deref>::Target : [<Has $name>] {
|
||||
$(fn $field(&self) -> &$field_type {
|
||||
self.deref().$field()
|
||||
})*
|
||||
}
|
||||
}};
|
||||
}
|
||||
|
||||
|
||||
extension_struct!(Label {
|
||||
label: String
|
||||
});
|
||||
|
||||
extension_struct!(Foo {
|
||||
t1: String
|
||||
});
|
||||
|
||||
|
||||
|
||||
// ==============
|
||||
// === WithID ===
|
||||
// ==============
|
||||
|
||||
struct WithID<T>(i32, T);
|
||||
|
||||
impl<T> Deref for WithID<T> {
|
||||
type Target = T;
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.1
|
||||
}
|
||||
}
|
||||
|
||||
struct WithID2<T>(i32, T);
|
||||
|
||||
impl<T> Deref for WithID2<T> {
|
||||
type Target = T;
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.1
|
||||
}
|
||||
}
|
||||
|
||||
//// === HasID ===
|
||||
//
|
||||
//pub trait HasID {
|
||||
// fn id(&self) -> &i32;
|
||||
//}
|
||||
//
|
||||
//impl<T: MarkerCtxForHasID> HasID for T {
|
||||
// fn id(&self) -> &i32 {
|
||||
// HasIDForVariantOrAny::id(self)
|
||||
// }
|
||||
//}
|
||||
//
|
||||
//// === MarkerCtxForHasID ===
|
||||
//
|
||||
//pub trait MarkerCtxForHasID {}
|
||||
//
|
||||
//impl<T> MarkerCtxForHasID for WithID<T> {}
|
||||
//
|
||||
//impl<T> MarkerCtxForHasID for T
|
||||
//where T: Deref, <T as Deref>::Target : HasID {}
|
||||
//
|
||||
//
|
||||
//// === HasIDForVariantOrAny ===
|
||||
//
|
||||
//trait HasIDForVariantOrAny {
|
||||
// fn id(&self) -> &i32;
|
||||
//}
|
||||
//impl<T> HasIDForVariantOrAny for WithID<T> {
|
||||
// fn id(&self) -> &i32 {
|
||||
// &self.0
|
||||
// }
|
||||
//}
|
||||
//impl<T: MarkerCtxForHasID> HasIDForVariantOrAny for T {
|
||||
// default fn id(&self) -> &i32 {
|
||||
// HasIDForDerefOrAny::id(self)
|
||||
// }
|
||||
//}
|
||||
//
|
||||
//// === HasIDForDerefOrAny ===
|
||||
//
|
||||
//trait HasIDForDerefOrAny {
|
||||
// fn id(&self) -> &i32;
|
||||
//}
|
||||
//impl<T> HasIDForDerefOrAny for T
|
||||
//where T: Deref, <Self as Deref>::Target : HasID {
|
||||
// fn id(&self) -> &i32 {
|
||||
// self.deref().id()
|
||||
// }
|
||||
//}
|
||||
//impl<T> HasIDForDerefOrAny for T {
|
||||
// default fn id(&self) -> &i32 {
|
||||
// unreachable!();
|
||||
// }
|
||||
//}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
// === HasID ===
|
||||
|
||||
pub trait HasID {
|
||||
fn id(&self) -> &i32;
|
||||
}
|
||||
|
||||
|
||||
//////////////////////////////////
|
||||
|
||||
#[overlappable]
|
||||
impl<T> HasID for T
|
||||
where T: Deref, <Self as Deref>::Target : HasID {
|
||||
fn id(&self) -> &i32 {
|
||||
self.deref().id()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: MarkerCtx_HasID> HasID for T {
|
||||
fn id(&self) -> &i32 {
|
||||
VariantOrAny_HasID::id(self)
|
||||
}
|
||||
}
|
||||
|
||||
// === MarkerCtx_HasID ===
|
||||
|
||||
#[allow(non_camel_case_types)]
|
||||
pub trait MarkerCtx_HasID {}
|
||||
|
||||
impl<T> MarkerCtx_HasID for T
|
||||
where T: Deref, <T as Deref>::Target : HasID {}
|
||||
|
||||
// === VariantOrAny_HasID ===
|
||||
|
||||
#[allow(non_camel_case_types)]
|
||||
trait VariantOrAny_HasID {
|
||||
fn id(&self) -> &i32;
|
||||
}
|
||||
|
||||
impl<T: MarkerCtx_HasID> VariantOrAny_HasID for T {
|
||||
default fn id(&self) -> &i32 {
|
||||
DerefOrAny_HasID::id(self)
|
||||
}
|
||||
}
|
||||
|
||||
// === DerefOrAny_HasID ===
|
||||
|
||||
#[allow(non_camel_case_types)]
|
||||
trait DerefOrAny_HasID {
|
||||
fn id(&self) -> &i32;
|
||||
}
|
||||
impl<T> DerefOrAny_HasID for T
|
||||
where T: Deref, <Self as Deref>::Target : HasID {
|
||||
fn id(&self) -> &i32 {
|
||||
self.deref().id()
|
||||
}
|
||||
}
|
||||
impl<T> DerefOrAny_HasID for T {
|
||||
default fn id(&self) -> &i32 {
|
||||
unreachable!();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/////////////////////////////////////////////
|
||||
|
||||
|
||||
//#[overlapping]
|
||||
//impl<T> HasID for WithID<T> {
|
||||
// fn id(&self) -> &i32 {
|
||||
// &self.0
|
||||
// }
|
||||
//}
|
||||
|
||||
impl<T> MarkerCtx_HasID for WithID<T> {}
|
||||
|
||||
impl<T> VariantOrAny_HasID for WithID<T> {
|
||||
fn id(&self) -> &i32 {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
//// NON-CONFLICTING:
|
||||
//
|
||||
//trait HasFoo2 {
|
||||
// fn foo(&self) -> i32;
|
||||
//}
|
||||
//impl<T> HasFoo2 for T {
|
||||
// default fn foo(&self) -> i32 {
|
||||
// 7
|
||||
// }
|
||||
//}
|
||||
//impl<T> HasFoo2 for WithID<T> {
|
||||
// default fn foo(&self) -> i32 {
|
||||
// 8
|
||||
// }
|
||||
//}
|
||||
//
|
||||
//// CONFLICTING
|
||||
//
|
||||
//trait HasFoo3 {
|
||||
// fn foo(&self) -> i32;
|
||||
//}
|
||||
//impl<T> HasFoo3 for T
|
||||
// where T: Deref,
|
||||
// <T as Deref>::Target: HasFoo3 {
|
||||
// default fn foo(&self) -> i32 {
|
||||
// self.deref().foo()
|
||||
// }
|
||||
//}
|
||||
//impl<T> HasFoo3 for WithID<T> {
|
||||
// default fn foo(&self) -> i32 {
|
||||
// 8
|
||||
// }
|
||||
//}
|
||||
|
||||
|
||||
// =============
|
||||
// === Usage ===
|
||||
// =============
|
||||
|
||||
struct _A(i32);
|
||||
|
||||
type _X = WithLabel<WithID<_A>>;
|
||||
|
||||
fn _test<T: HasID + HasLabel> (t: T) {
|
||||
println!("{:?}", t.label());
|
||||
println!("{:?}", t.id());
|
||||
}
|
||||
|
||||
fn _main() {
|
||||
let v1 = WithLabel("label1".to_string(), WithID(0, _A(1)));
|
||||
_test(v1); // THIS IS EXAMPLE USE CASE WHICH DOES NOT COMPILE
|
||||
|
||||
// println!("{}", 7.foo());
|
||||
}
|
@ -1,381 +0,0 @@
|
||||
/// This module implements the `shared` macro, an utility allowing for easy definition of
|
||||
/// `Rc<RefCell<...>>` wrappers.
|
||||
|
||||
|
||||
/// This macro provides an easy way to define secure `Rc<RefCell<...>>` wrappers for a given struct.
|
||||
///
|
||||
/// This macro accepts a body which is very similar to normal struct definition. There are a few
|
||||
/// notable differences:
|
||||
/// - The first token this macro accepts should be the name of the wrapped structure.
|
||||
/// - The implementation block does not have a name. It is always implemented for the struct.
|
||||
/// You are allowed to provide multiple impl blocks.
|
||||
///
|
||||
/// This macro traverses the definition and for each function, it generates a borrowing counterpart.
|
||||
/// It also handles the `new` function in a special way. Please note, that this macro generates
|
||||
/// only safe bindings. If your original function returns a reference, the generated code will fail.
|
||||
/// If you want to return references with some custom guard system, implement that outside of this
|
||||
/// macro usage.
|
||||
///
|
||||
/// For the given input:
|
||||
/// ```compile_fail
|
||||
/// shared! { Uniform
|
||||
///
|
||||
/// #[derive(Clone,Copy,Debug)]
|
||||
/// pub struct UniformData<Value> {
|
||||
/// value: Value,
|
||||
/// dirty: bool,
|
||||
/// }
|
||||
///
|
||||
/// impl<Value:UniformValue> {
|
||||
/// /// Constructor.
|
||||
/// pub fn new(value:Value) -> Self {
|
||||
/// let dirty = false;
|
||||
/// Self {value,dirty}
|
||||
/// }
|
||||
///
|
||||
/// /// Checks whether the uniform was changed and not yet updated.
|
||||
/// pub fn check_dirty(&self) -> bool {
|
||||
/// self.dirty
|
||||
/// }
|
||||
///
|
||||
/// /// Modifies the value stored by the uniform.
|
||||
/// pub fn modify<F:FnOnce(&mut Value)>(&mut self, f:F) {
|
||||
/// self.set_dirty();
|
||||
/// f(&mut self.value);
|
||||
/// }
|
||||
/// }}
|
||||
/// ```
|
||||
///
|
||||
/// The following output will be generated:
|
||||
///
|
||||
/// ```compile_fail
|
||||
/// #[derive(Clone,Copy,Debug)]
|
||||
/// pub struct UniformData<Value> {
|
||||
/// value: Value,
|
||||
/// dirty: bool,
|
||||
/// }
|
||||
///
|
||||
/// impl<Value:UniformValue> for UniformData<Value> {
|
||||
/// #[doc = r###"Constructor."###]
|
||||
/// pub fn new(value:Value) -> Self {
|
||||
/// let dirty = false;
|
||||
/// Self {value,dirty}
|
||||
/// }
|
||||
///
|
||||
/// #[doc = r###"Checks whether the uniform was changed and not yet updated."###]
|
||||
/// pub fn check_dirty(&self) -> bool {
|
||||
/// self.dirty
|
||||
/// }
|
||||
///
|
||||
/// #[doc = r###"Modifies the value stored by the uniform."###]
|
||||
/// pub fn modify<F:FnOnce(&mut Value)>(&mut self, f:F) {
|
||||
/// self.set_dirty();
|
||||
/// f(&mut self.value);
|
||||
/// }
|
||||
/// }
|
||||
///
|
||||
/// #[derive(Clone,Copy,Debug)]
|
||||
/// pub struct Uniform<Value> {
|
||||
/// rc: Rc<RefCell<UniformData<Value>>>
|
||||
/// }
|
||||
///
|
||||
/// impl<Value:UniformValue> for Uniform<Value> {
|
||||
/// #[doc = r###"Constructor."###]
|
||||
/// pub fn new(value:Value) -> Self {
|
||||
/// let rc = Rc::new(RefCell::new(UniformData::new(value)));
|
||||
/// Self {rc}
|
||||
/// }
|
||||
///
|
||||
/// #[doc = r###"Checks whether the uniform was changed and not yet updated."###]
|
||||
/// pub fn check_dirty(&self) -> bool {
|
||||
/// self.rc.borrow.check_dirty()
|
||||
/// }
|
||||
///
|
||||
/// #[doc = r###"Modifies the value stored by the uniform."###]
|
||||
/// pub fn modify<F:FnOnce(&mut Value)>(&self, f:F) {
|
||||
/// self.borrow_mut().modify(f)
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// **Note**
|
||||
/// Both the implementation as well as usage syntax of this macro will be nicer if it was
|
||||
/// implemented as procedural macro. However, no IDE supports expansion of procedural macros
|
||||
/// currently, so it was implemented as macro rules instead.
|
||||
#[macro_export]
|
||||
macro_rules! shared {
|
||||
($name:ident $($in:tt)*) => {
|
||||
$crate::angles_to_brackets_shallow! { shared_bracket [$name] $($in)* }
|
||||
}
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! shared_bracket_impl {
|
||||
([impl [$($impl_params:tt)*] $name:ident $name_mut:ident $([$($params:tt)*])?] [
|
||||
$(
|
||||
$(#[$($meta:tt)*])*
|
||||
$acc:vis fn $fn_name:ident
|
||||
$([$($fn_params:tt)*])? ($($fn_args:tt)*) $(-> $fn_type:ty)? $(where $($wt1:ty : $wt2:path),* )? {
|
||||
$($fn_body:tt)*
|
||||
}
|
||||
)*
|
||||
]) => {
|
||||
impl <$($impl_params)*> $name_mut $(<$($params)*>)? {
|
||||
$(
|
||||
$(#[$($meta)*])*
|
||||
$acc fn $fn_name $(<$($fn_params)*>)*
|
||||
($($fn_args)*) $(-> $fn_type)? $(where $($wt1 : $wt2),* )? {$($fn_body)*}
|
||||
)*
|
||||
}
|
||||
|
||||
impl <$($impl_params)*> $name $(<$($params)*>)? {
|
||||
$($crate::shared_bracket_fn! {
|
||||
$name_mut :: $(#[$($meta)*])*
|
||||
$acc fn $fn_name [$($($fn_params)*)*] ($($fn_args)*) $(-> $fn_type)? $(where $($wt1 : $wt2),* )?
|
||||
})*
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! shared_bracket_fn {
|
||||
( $base:ident :: $(#[$($meta:tt)*])* $acc:vis fn new $([$($params:tt)*])?
|
||||
($($arg:ident : $arg_type:ty),*) $(-> $type:ty)? $(where $($wt1:ty : $wt2:path),* )? ) => {
|
||||
$(#[$($meta)*])*
|
||||
$acc fn new $(<$($params)*>)* ($($arg : $arg_type),*) $(-> $type)? $(where $($wt1 : $wt2),* )? {
|
||||
Self { rc: Rc::new(RefCell::new($base::new($($arg),*))) }
|
||||
}
|
||||
};
|
||||
( $base:ident :: $(#[$($meta:tt)*])* $acc:vis fn $name:ident $([$($params:tt)*])?
|
||||
(&self $(,$($arg:ident : $arg_type:ty),+)?) $(-> $type:ty)? $(where $($wt1:ty : $wt2:path),* )? ) => {
|
||||
$(#[$($meta)*])*
|
||||
$acc fn $name $(<$($params)*>)* (&self $(,$($arg : $arg_type),*)?) $(-> $type)? $(where $($wt1 : $wt2),* )? {
|
||||
self.rc.borrow().$name($($($arg),*)?)
|
||||
}
|
||||
};
|
||||
( $base:ident :: $(#[$($meta:tt)*])* $acc:vis fn $name:ident $([$($params:tt)*])?
|
||||
(&mut self $(,$($arg:ident : $arg_type:ty),+)?) $(-> $type:ty)? $(where $($wt1:ty : $wt2:path),* )? ) => {
|
||||
$(#[$($meta)*])*
|
||||
$acc fn $name $(<$($params)*>)* (&self $(,$($arg : $arg_type),*)?) $(-> $type)? $(where $($wt1 : $wt2),* )? {
|
||||
self.rc.borrow_mut().$name($($($arg),*)?)
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! shared_bracket_normalized {
|
||||
( [$name:ident] [
|
||||
$(#[$($meta:tt)*])*
|
||||
$(##[$($imeta:tt)*])*
|
||||
pub struct $name_mut:ident $params:tt {
|
||||
$($(#[$($field_meta:tt)*])* $field:ident : $field_type:ty),* $(,)?
|
||||
}
|
||||
|
||||
$(impl $([$($impl_params:tt)*])? {$($impl_body:tt)*})*
|
||||
]) => {
|
||||
$crate::shared_struct! {
|
||||
$(#[$($meta)*])*
|
||||
$(##[$($imeta)*])*
|
||||
pub struct $name $name_mut $params {
|
||||
$($(#[$($field_meta)*])* $field : $field_type),*
|
||||
}
|
||||
}
|
||||
|
||||
$($crate::angles_to_brackets_shallow! {shared_bracket_impl
|
||||
[impl [$($($impl_params)*)?] $name $name_mut $params] $($impl_body)*
|
||||
})*
|
||||
};
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! shared_struct {
|
||||
(
|
||||
$(#[$($meta:tt)*])*
|
||||
$(##[$($imeta:tt)*])*
|
||||
pub struct $name:ident $name_mut:ident [$($params:tt)*] {
|
||||
$($(#[$($field_meta:tt)*])* $field:ident : $field_type:ty),* $(,)?
|
||||
}
|
||||
) => {
|
||||
$(#[$($meta)*])*
|
||||
#[derive(CloneRef)]
|
||||
pub struct $name <$($params)*> { rc: Rc<RefCell<$name_mut<$($params)*>>> }
|
||||
|
||||
$(#[$($meta)*])*
|
||||
$(#[$($imeta)*])*
|
||||
pub struct $name_mut <$($params)*> { $($(#[$($field_meta)*])* $field : $field_type),* }
|
||||
|
||||
impl<$($params)*> Clone for $name <$($params)*> {
|
||||
fn clone(&self) -> Self {
|
||||
let rc = self.rc.clone();
|
||||
Self {rc}
|
||||
}
|
||||
}
|
||||
|
||||
paste::item! {
|
||||
$(#[$($meta)*])*
|
||||
#[derive(CloneRef)]
|
||||
pub struct [<Weak $name>] <$($params)*> { weak: Weak<RefCell<$name_mut<$($params)*>>> }
|
||||
|
||||
impl<$($params)*> Clone for [<Weak $name>] <$($params)*> {
|
||||
fn clone(&self) -> Self {
|
||||
let weak = self.weak.clone();
|
||||
Self {weak}
|
||||
}
|
||||
}
|
||||
|
||||
impl<$($params)*> [<Weak $name>] <$($params)*> {
|
||||
/// Attempts to upgrade the weak pointer to an rc, delaying dropping of the inner
|
||||
/// value if successful.
|
||||
pub fn upgrade(&self) -> Option<$name <$($params)*>> {
|
||||
self.weak.upgrade().map(|rc| $name {rc})
|
||||
}
|
||||
}
|
||||
|
||||
impl<$($params)*> WeakElement for [<Weak $name>] <$($params)*> {
|
||||
type Strong = $name <$($params)*> ;
|
||||
|
||||
fn new(view: &Self::Strong) -> Self {
|
||||
view.downgrade()
|
||||
}
|
||||
|
||||
fn view(&self) -> Option<Self::Strong> {
|
||||
self.upgrade()
|
||||
}
|
||||
}
|
||||
|
||||
impl<$($params)*> $name <$($params)*> {
|
||||
/// Downgrade the reference to weak ref.
|
||||
pub fn downgrade(&self) -> [<Weak $name>] <$($params)*> {
|
||||
let weak = Rc::downgrade(&self.rc);
|
||||
[<Weak $name>] {weak}
|
||||
}
|
||||
|
||||
/// Call operation with borrowed data. Should be use in implementation of wrapper
|
||||
/// only.
|
||||
fn with_borrowed<F,R>(&self, operation:F) -> R
|
||||
where F : FnOnce(&mut $name_mut<$($params)*>) -> R {
|
||||
operation(&mut self.rc.borrow_mut())
|
||||
}
|
||||
|
||||
/// Wraps given data object into a shared handle.
|
||||
pub fn new_from_data(data:$name_mut<$($params)*>) -> Self {
|
||||
Self {rc:Rc::new(RefCell::new(data))}
|
||||
}
|
||||
|
||||
/// Check if the shared pointer points to the same struct as `other`.
|
||||
pub fn identity_equals(&self, other:&Self) -> bool {
|
||||
Rc::ptr_eq(&self.rc,&other.rc)
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! angles_to_brackets_shallow {
|
||||
($f:ident $f_arg:tt $($in:tt)*) => {
|
||||
$crate::_angles_to_brackets_shallow! { $f $f_arg [] [] [] $($in)* }
|
||||
}
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! _angles_to_brackets_shallow {
|
||||
( $f:ident $f_arg:tt [] [$($out:tt)*] [] ) => { $crate::$f! { $f_arg [$($out)*] } };
|
||||
( $f:ident $f_arg:tt [] [$($out:tt)*] [$($cout:tt)*] ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] [$($out)* $($cout)*] [] } };
|
||||
( $f:ident $f_arg:tt [] [$($out:tt)*] [$($cout:tt)*] < $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [.] [$($out)* $($cout)*] [] $($rest)* } };
|
||||
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] << $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [. .] $out [$($cout)* <] $($rest)* } };
|
||||
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] <<< $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [. . .] $out [$($cout)* <<] $($rest)* } };
|
||||
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] <<<< $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [. . . .] $out [$($cout)* <<<] $($rest)* } };
|
||||
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] <<<<< $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [. . . . .] $out [$($cout)* <<<<] $($rest)* } };
|
||||
( $f:ident $f_arg:tt [$($depth:tt)*] $out:tt [$($cout:tt)*] < $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [$($depth)* .] $out [$($cout)* <] $($rest)* } };
|
||||
( $f:ident $f_arg:tt [$($depth:tt)*] $out:tt [$($cout:tt)*] << $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [$($depth)* . .] $out [$($cout)* <<] $($rest)* } };
|
||||
( $f:ident $f_arg:tt [$($depth:tt)*] $out:tt [$($cout:tt)*] <<< $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [$($depth)* . . .] $out [$($cout)* <<<] $($rest)* } };
|
||||
( $f:ident $f_arg:tt [$($depth:tt)*] $out:tt [$($cout:tt)*] <<<< $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [$($depth)* . . . .] $out [$($cout)* <<<<] $($rest)* } };
|
||||
( $f:ident $f_arg:tt [$($depth:tt)*] $out:tt [$($cout:tt)*] <<<<< $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [$($depth)* . . . . .] $out [$($cout)* <<<<<] $($rest)* } };
|
||||
( $f:ident $f_arg:tt [. $($depth:tt)*] $out:tt [$($cout:tt)*] -> $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [. $($depth)*] $out [$($cout)* ->] $($rest)* } };
|
||||
( $f:ident $f_arg:tt [.] [$($out:tt)*] $cout:tt > $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] [$($out)* $cout] [] $($rest)* } };
|
||||
( $f:ident $f_arg:tt [. .] [$($out:tt)*] [$($cout:tt)*] >> $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] [$($out)* [$($cout)* >]] [] $($rest)* } };
|
||||
( $f:ident $f_arg:tt [. . .] [$($out:tt)*] [$($cout:tt)*] >>> $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] [$($out)* [$($cout)* >>]] [] $($rest)* } };
|
||||
( $f:ident $f_arg:tt [. . . .] [$($out:tt)*] [$($cout:tt)*] >>>> $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] [$($out)* [$($cout)* >>>]] [] $($rest)* } };
|
||||
( $f:ident $f_arg:tt [. . . . .] [$($out:tt)*] [$($cout:tt)*] >>>>> $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] [$($out)* [$($cout)* >>>>]] [] $($rest)* } };
|
||||
( $f:ident $f_arg:tt [. $($depth:tt)*] $out:tt [$($cout:tt)*] > $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [$($depth)*] $out [$($cout)* >] $($rest)* } };
|
||||
( $f:ident $f_arg:tt [. . $($depth:tt)*] $out:tt [$($cout:tt)*] >> $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [$($depth)*] $out [$($cout)* >>] $($rest)* } };
|
||||
( $f:ident $f_arg:tt [. . . $($depth:tt)*] $out:tt [$($cout:tt)*] >>> $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [$($depth)*] $out [$($cout)* >>>] $($rest)* } };
|
||||
( $f:ident $f_arg:tt [. . . . $($depth:tt)*] $out:tt [$($cout:tt)*] >>>> $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [$($depth)*] $out [$($cout)* >>>>] $($rest)* } };
|
||||
( $f:ident $f_arg:tt [. . . . . $($depth:tt)*] $out:tt [$($cout:tt)*] >>>>> $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [$($depth)*] $out [$($cout)* >>>>>] $($rest)* } };
|
||||
|
||||
// Function output handling
|
||||
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] -> $t1:tt {$($b:tt)*} $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] $out [$($cout)* -> $t1 {$($b)*}] $($rest)* } };
|
||||
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] -> $t1:tt $t2:tt {$($b:tt)*} $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] $out [$($cout)* -> $t1 $t2 {$($b)*}] $($rest)* } };
|
||||
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] -> $t1:tt $t2:tt $t3:tt {$($b:tt)*} $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] $out [$($cout)* -> $t1 $t2 $t3 {$($b)*}] $($rest)* } };
|
||||
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] -> $t1:tt $t2:tt $t3:tt $t4:tt {$($b:tt)*} $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] $out [$($cout)* -> $t1 $t2 $t3 $t4 {$($b)*}] $($rest)* } };
|
||||
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] -> $t1:tt $t2:tt $t3:tt $t4:tt $t5:tt {$($b:tt)*} $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] $out [$($cout)* -> $t1 $t2 $t3 $t4 $t5 {$($b)*}] $($rest)* } };
|
||||
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] -> $t1:tt $t2:tt $t3:tt $t4:tt $t5:tt $t6:tt {$($b:tt)*} $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] $out [$($cout)* -> $t1 $t2 $t3 $t4 $t5 $t6 {$($b)*}] $($rest)* } };
|
||||
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] -> $t1:tt $t2:tt $t3:tt $t4:tt $t5:tt $t6:tt $t7:tt {$($b:tt)*} $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] $out [$($cout)* -> $t1 $t2 $t3 $t4 $t5 $t6 $t7 {$($b)*}] $($rest)* } };
|
||||
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] -> $t1:tt $t2:tt $t3:tt $t4:tt $t5:tt $t6:tt $t7:tt $t8:tt {$($b:tt)*} $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] $out [$($cout)* -> $t1 $t2 $t3 $t4 $t5 $t6 $t7 $t8 {$($b)*}] $($rest)* } };
|
||||
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] -> $t1:tt $t2:tt $t3:tt $t4:tt $t5:tt $t6:tt $t7:tt $t8:tt $t9:tt {$($b:tt)*} $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] $out [$($cout)* -> $t1 $t2 $t3 $t4 $t5 $t6 $t7 $t8 $t9 {$($b)*}] $($rest)* } };
|
||||
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] -> $t1:tt $t2:tt $t3:tt $t4:tt $t5:tt $t6:tt $t7:tt $t8:tt $t9:tt $t10:tt {$($b:tt)*} $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] $out [$($cout)* -> $t1 $t2 $t3 $t4 $t5 $t6 $t7 $t8 $t9 $t10 {$($b)*}] $($rest)* } };
|
||||
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] -> $t1:tt $t2:tt $t3:tt $t4:tt $t5:tt $t6:tt $t7:tt $t8:tt $t9:tt $t10:tt $t11:tt {$($b:tt)*} $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] $out [$($cout)* -> $t1 $t2 $t3 $t4 $t5 $t6 $t7 $t8 $t9 $t10 $t11 {$($b)*}] $($rest)* } };
|
||||
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] -> $t1:tt $t2:tt $t3:tt $t4:tt $t5:tt $t6:tt $t7:tt $t8:tt $t9:tt $t10:tt $t11:tt $t12:tt {$($b:tt)*} $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] $out [$($cout)* -> $t1 $t2 $t3 $t4 $t5 $t6 $t7 $t8 $t9 $t10 $t11 $t12 {$($b)*}] $($rest)* } };
|
||||
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] -> $t1:tt $t2:tt $t3:tt $t4:tt $t5:tt $t6:tt $t7:tt $t8:tt $t9:tt $t10:tt $t11:tt $t12:tt $t13:tt {$($b:tt)*} $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] $out [$($cout)* -> $t1 $t2 $t3 $t4 $t5 $t6 $t7 $t8 $t9 $t10 $t11 $t12 $t13 {$($b)*}] $($rest)* } };
|
||||
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] -> $t1:tt $t2:tt $t3:tt $t4:tt $t5:tt $t6:tt $t7:tt $t8:tt $t9:tt $t10:tt $t11:tt $t12:tt $t13:tt $t14:tt {$($b:tt)*} $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] $out [$($cout)* -> $t1 $t2 $t3 $t4 $t5 $t6 $t7 $t8 $t9 $t10 $t11 $t12 $t13 $t14 {$($b)*}] $($rest)* } };
|
||||
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] -> $t1:tt $t2:tt $t3:tt $t4:tt $t5:tt $t6:tt $t7:tt $t8:tt $t9:tt $t10:tt $t11:tt $t12:tt $t13:tt $t14:tt $t15:tt {$($b:tt)*} $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] $out [$($cout)* -> $t1 $t2 $t3 $t4 $t5 $t6 $t7 $t8 $t9 $t10 $t11 $t12 $t13 $t14 $t15 {$($b)*}] $($rest)* } };
|
||||
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] -> $t1:tt $t2:tt $t3:tt $t4:tt $t5:tt $t6:tt $t7:tt $t8:tt $t9:tt $t10:tt $t11:tt $t12:tt $t13:tt $t14:tt $t15:tt $t16:tt {$($b:tt)*} $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] $out [$($cout)* -> $t1 $t2 $t3 $t4 $t5 $t6 $t7 $t8 $t9 $t10 $t11 $t12 $t13 $t14 $t15 $t16 {$($b)*}] $($rest)* } };
|
||||
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] -> $t1:tt $t2:tt $t3:tt $t4:tt $t5:tt $t6:tt $t7:tt $t8:tt $t9:tt $t10:tt $t11:tt $t12:tt $t13:tt $t14:tt $t15:tt $t16:tt $t17:tt {$($b:tt)*} $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] $out [$($cout)* -> $t1 $t2 $t3 $t4 $t5 $t6 $t7 $t8 $t9 $t10 $t11 $t12 $t13 $t14 $t15 $t16 $t17 {$($b)*}] $($rest)* } };
|
||||
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] -> $t1:tt $t2:tt $t3:tt $t4:tt $t5:tt $t6:tt $t7:tt $t8:tt $t9:tt $t10:tt $t11:tt $t12:tt $t13:tt $t14:tt $t15:tt $t16:tt $t17:tt $t18:tt {$($b:tt)*} $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] $out [$($cout)* -> $t1 $t2 $t3 $t4 $t5 $t6 $t7 $t8 $t9 $t10 $t11 $t12 $t13 $t14 $t15 $t16 $t17 $t18 {$($b)*}] $($rest)* } };
|
||||
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] -> $t1:tt $t2:tt $t3:tt $t4:tt $t5:tt $t6:tt $t7:tt $t8:tt $t9:tt $t10:tt $t11:tt $t12:tt $t13:tt $t14:tt $t15:tt $t16:tt $t17:tt $t18:tt $t19:tt {$($b:tt)*} $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] $out [$($cout)* -> $t1 $t2 $t3 $t4 $t5 $t6 $t7 $t8 $t9 $t10 $t11 $t12 $t13 $t14 $t15 $t16 $t17 $t18 $t19 {$($b)*}] $($rest)* } };
|
||||
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] -> $t1:tt $t2:tt $t3:tt $t4:tt $t5:tt $t6:tt $t7:tt $t8:tt $t9:tt $t10:tt $t11:tt $t12:tt $t13:tt $t14:tt $t15:tt $t16:tt $t17:tt $t18:tt $t19:tt $t20:tt {$($b:tt)*} $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] $out [$($cout)* -> $t1 $t2 $t3 $t4 $t5 $t6 $t7 $t8 $t9 $t10 $t11 $t12 $t13 $t14 $t15 $t16 $t17 $t18 $t19 $t20 {$($b)*}] $($rest)* } };
|
||||
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] -> $t1:tt $t2:tt $t3:tt $t4:tt $t5:tt $t6:tt $t7:tt $t8:tt $t9:tt $t10:tt $t11:tt $t12:tt $t13:tt $t14:tt $t15:tt $t16:tt $t17:tt $t18:tt $t19:tt $t20:tt $t21:tt {$($b:tt)*} $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] $out [$($cout)* -> $t1 $t2 $t3 $t4 $t5 $t6 $t7 $t8 $t9 $t10 $t11 $t12 $t13 $t14 $t15 $t16 $t17 $t18 $t19 $t20 $t21 {$($b)*}] $($rest)* } };
|
||||
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] -> $t1:tt $t2:tt $t3:tt $t4:tt $t5:tt $t6:tt $t7:tt $t8:tt $t9:tt $t10:tt $t11:tt $t12:tt $t13:tt $t14:tt $t15:tt $t16:tt $t17:tt $t18:tt $t19:tt $t20:tt $t21:tt $t22:tt {$($b:tt)*} $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] $out [$($cout)* -> $t1 $t2 $t3 $t4 $t5 $t6 $t7 $t8 $t9 $t10 $t11 $t12 $t13 $t14 $t15 $t16 $t17 $t18 $t19 $t20 $t21 $t22 {$($b)*}] $($rest)* } };
|
||||
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] -> $t1:tt $t2:tt $t3:tt $t4:tt $t5:tt $t6:tt $t7:tt $t8:tt $t9:tt $t10:tt $t11:tt $t12:tt $t13:tt $t14:tt $t15:tt $t16:tt $t17:tt $t18:tt $t19:tt $t20:tt $t21:tt $t22:tt $t23:tt {$($b:tt)*} $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] $out [$($cout)* -> $t1 $t2 $t3 $t4 $t5 $t6 $t7 $t8 $t9 $t10 $t11 $t12 $t13 $t14 $t15 $t16 $t17 $t18 $t19 $t20 $t21 $t22 $t23 {$($b)*}] $($rest)* } };
|
||||
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] -> $t1:tt $t2:tt $t3:tt $t4:tt $t5:tt $t6:tt $t7:tt $t8:tt $t9:tt $t10:tt $t11:tt $t12:tt $t13:tt $t14:tt $t15:tt $t16:tt $t17:tt $t18:tt $t19:tt $t20:tt $t21:tt $t22:tt $t23:tt $t24:tt {$($b:tt)*} $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] $out [$($cout)* -> $t1 $t2 $t3 $t4 $t5 $t6 $t7 $t8 $t9 $t10 $t11 $t12 $t13 $t14 $t15 $t16 $t17 $t18 $t19 $t20 $t21 $t22 $t23 $t24 {$($b)*}] $($rest)* } };
|
||||
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] -> $t1:tt $t2:tt $t3:tt $t4:tt $t5:tt $t6:tt $t7:tt $t8:tt $t9:tt $t10:tt $t11:tt $t12:tt $t13:tt $t14:tt $t15:tt $t16:tt $t17:tt $t18:tt $t19:tt $t20:tt $t21:tt $t22:tt $t23:tt $t24:tt $t25:tt {$($b:tt)*} $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] $out [$($cout)* -> $t1 $t2 $t3 $t4 $t5 $t6 $t7 $t8 $t9 $t10 $t11 $t12 $t13 $t14 $t15 $t16 $t17 $t18 $t19 $t20 $t21 $t22 $t23 $t24 $t25 {$($b)*}] $($rest)* } };
|
||||
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] -> $t1:tt $t2:tt $t3:tt $t4:tt $t5:tt $t6:tt $t7:tt $t8:tt $t9:tt $t10:tt $t11:tt $t12:tt $t13:tt $t14:tt $t15:tt $t16:tt $t17:tt $t18:tt $t19:tt $t20:tt $t21:tt $t22:tt $t23:tt $t24:tt $t25:tt $t26:tt {$($b:tt)*} $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] $out [$($cout)* -> $t1 $t2 $t3 $t4 $t5 $t6 $t7 $t8 $t9 $t10 $t11 $t12 $t13 $t14 $t15 $t16 $t17 $t18 $t19 $t20 $t21 $t22 $t23 $t24 $t25 $t26 {$($b)*}] $($rest)* } };
|
||||
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] -> $t1:tt $t2:tt $t3:tt $t4:tt $t5:tt $t6:tt $t7:tt $t8:tt $t9:tt $t10:tt $t11:tt $t12:tt $t13:tt $t14:tt $t15:tt $t16:tt $t17:tt $t18:tt $t19:tt $t20:tt $t21:tt $t22:tt $t23:tt $t24:tt $t25:tt $t26:tt $t27:tt {$($b:tt)*} $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] $out [$($cout)* -> $t1 $t2 $t3 $t4 $t5 $t6 $t7 $t8 $t9 $t10 $t11 $t12 $t13 $t14 $t15 $t16 $t17 $t18 $t19 $t20 $t21 $t22 $t23 $t24 $t25 $t26 $t27 {$($b)*}] $($rest)* } };
|
||||
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] -> $t1:tt $t2:tt $t3:tt $t4:tt $t5:tt $t6:tt $t7:tt $t8:tt $t9:tt $t10:tt $t11:tt $t12:tt $t13:tt $t14:tt $t15:tt $t16:tt $t17:tt $t18:tt $t19:tt $t20:tt $t21:tt $t22:tt $t23:tt $t24:tt $t25:tt $t26:tt $t27:tt $t28:tt {$($b:tt)*} $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] $out [$($cout)* -> $t1 $t2 $t3 $t4 $t5 $t6 $t7 $t8 $t9 $t10 $t11 $t12 $t13 $t14 $t15 $t16 $t17 $t18 $t19 $t20 $t21 $t22 $t23 $t24 $t25 $t26 $t27 $t28 {$($b)*}] $($rest)* } };
|
||||
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] -> $t1:tt $t2:tt $t3:tt $t4:tt $t5:tt $t6:tt $t7:tt $t8:tt $t9:tt $t10:tt $t11:tt $t12:tt $t13:tt $t14:tt $t15:tt $t16:tt $t17:tt $t18:tt $t19:tt $t20:tt $t21:tt $t22:tt $t23:tt $t24:tt $t25:tt $t26:tt $t27:tt $t28:tt $t29:tt {$($b:tt)*} $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] $out [$($cout)* -> $t1 $t2 $t3 $t4 $t5 $t6 $t7 $t8 $t9 $t10 $t11 $t12 $t13 $t14 $t15 $t16 $t17 $t18 $t19 $t20 $t21 $t22 $t23 $t24 $t25 $t26 $t27 $t28 $t29 {$($b)*}] $($rest)* } };
|
||||
( $f:ident $f_arg:tt [] $out:tt [$($cout:tt)*] -> $t1:tt $t2:tt $t3:tt $t4:tt $t5:tt $t6:tt $t7:tt $t8:tt $t9:tt $t10:tt $t11:tt $t12:tt $t13:tt $t14:tt $t15:tt $t16:tt $t17:tt $t18:tt $t19:tt $t20:tt $t21:tt $t22:tt $t23:tt $t24:tt $t25:tt $t26:tt $t27:tt $t28:tt $t29:tt $t30:tt {$($b:tt)*} $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg [] $out [$($cout)* -> $t1 $t2 $t3 $t4 $t5 $t6 $t7 $t8 $t9 $t10 $t11 $t12 $t13 $t14 $t15 $t16 $t17 $t18 $t19 $t20 $t21 $t22 $t23 $t24 $t25 $t26 $t27 $t28 $t29 $t30 {$($b)*}] $($rest)* } };
|
||||
|
||||
// Any token handling
|
||||
( $f:ident $f_arg:tt $depth:tt $out:tt [$($cout:tt)*] $t:tt $($rest:tt)* ) => { $crate::_angles_to_brackets_shallow! { $f $f_arg $depth $out [$($cout)* $t] $($rest)* } };
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! shared_bracket {
|
||||
([$name:ident] [$($in:tt)*]) => {
|
||||
$crate::normalize_input! { shared_bracket_normalized [$name] $($in)* }
|
||||
}
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! normalize_input {
|
||||
($f:ident $f_args:tt $($in:tt)*) => {
|
||||
$crate::_normalize_input! { $f $f_args [] $($in)* }
|
||||
}
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! _normalize_input {
|
||||
// Finish.
|
||||
( $f:ident $f_args:tt $out:tt ) => {
|
||||
$crate::$f! { $f_args $out }
|
||||
};
|
||||
|
||||
// Structs.
|
||||
( $f:ident $f_args:tt [$($out:tt)*]
|
||||
$(#[$($meta:tt)*])*
|
||||
pub struct $name:tt $([$($params:tt)*])? {$($body:tt)*}
|
||||
$($rest:tt)*
|
||||
) => {
|
||||
$crate::_normalize_input! { $f $f_args
|
||||
[$($out)*
|
||||
$(#[$($meta)*])*
|
||||
pub struct $name [$($($params)*)?] {$($body)*}
|
||||
] $($rest)* }
|
||||
};
|
||||
|
||||
// Any token.
|
||||
( $f:ident $f_args:tt [$($out:tt)*] $in:tt $($rest:tt)* ) => {
|
||||
$crate::_normalize_input! { $f $f_args [$($out)* $in] $($rest)* }
|
||||
};
|
||||
}
|
@ -1,143 +0,0 @@
|
||||
//! This module defines helpers for defining singletons and associated enum types. A singleton is
|
||||
//! a type with one possible value. It is used mainly for a type level programming purposes.
|
||||
|
||||
/// Defines singleton types. For the following input:
|
||||
/// ```compile_fail
|
||||
/// define_singletons!{
|
||||
/// /// A Foo!
|
||||
/// Foo,
|
||||
/// /// A Bar!
|
||||
/// Bar,
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// It expands to:
|
||||
///
|
||||
/// ```
|
||||
/// #[allow(missing_docs)]
|
||||
/// #[derive(Copy, Clone, Debug)]
|
||||
/// #[doc = r###"A Foo!"###]
|
||||
/// pub struct Foo;
|
||||
/// impl Default for Foo {
|
||||
/// fn default() -> Self {
|
||||
/// Self
|
||||
/// }
|
||||
/// }
|
||||
/// #[allow(missing_docs)]
|
||||
/// #[derive(Copy, Clone, Debug)]
|
||||
/// #[doc = r###"A Bar!"###]
|
||||
/// pub struct Bar;
|
||||
/// impl Default for Bar {
|
||||
/// fn default() -> Self {
|
||||
/// Self
|
||||
/// }
|
||||
/// }
|
||||
///
|
||||
/// ```
|
||||
#[macro_export]
|
||||
macro_rules! define_singletons {
|
||||
( $( $(#$meta:tt)* $name:ident ),* $(,)? ) => {$(
|
||||
#[allow(missing_docs)]
|
||||
#[derive(Copy,Clone,Debug,PartialEq,Eq)]
|
||||
$(#$meta)*
|
||||
pub struct $name;
|
||||
|
||||
impl Default for $name {
|
||||
fn default() -> Self {
|
||||
Self
|
||||
}
|
||||
}
|
||||
)*}
|
||||
}
|
||||
|
||||
/// Defines an associated enum type for predefined singletons.
|
||||
///
|
||||
/// For the following input:
|
||||
/// ```compile_fail
|
||||
/// define_singleton_enum!{
|
||||
/// MyEnum {
|
||||
/// /// A Foo!
|
||||
/// Foo,
|
||||
/// /// A Bar!
|
||||
/// Bar,
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// It expands to:
|
||||
///
|
||||
/// ```compile_fail
|
||||
/// #[allow(missing_docs)]
|
||||
/// #[derive(Copy, Clone, Debug)]
|
||||
/// pub enum MyEnum {
|
||||
/// #[doc = r###"A Foo!"###]
|
||||
/// Foo,
|
||||
/// #[doc = r###"A Bar!"###]
|
||||
/// Bar,
|
||||
/// }
|
||||
/// impl From<Foo> for MyEnum {
|
||||
/// fn from(_: Foo) -> Self {
|
||||
/// Self::Foo
|
||||
/// }
|
||||
/// }
|
||||
/// impl From<PhantomData<Foo>> for MyEnum {
|
||||
/// fn from(_: PhantomData<Foo>) -> Self {
|
||||
/// Self::Foo
|
||||
/// }
|
||||
/// }
|
||||
/// impl From<Bar> for MyEnum {
|
||||
/// fn from(_: Bar) -> Self {
|
||||
/// Self::Bar
|
||||
/// }
|
||||
/// }
|
||||
/// impl From<PhantomData<Bar>> for MyEnum {
|
||||
/// fn from(_: PhantomData<Bar>) -> Self {
|
||||
/// Self::Bar
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
#[macro_export]
|
||||
macro_rules! define_singleton_enum_from {
|
||||
(
|
||||
$(#$meta:tt)*
|
||||
$name:ident {
|
||||
$( $(#$field_meta:tt)* $field:ident ),* $(,)?
|
||||
}
|
||||
) => {
|
||||
#[allow(missing_docs)]
|
||||
#[derive(Copy,Clone,Debug,PartialEq,Eq)]
|
||||
$(#$meta)*
|
||||
pub enum $name {
|
||||
$( $(#$field_meta)* $field ),*
|
||||
}
|
||||
|
||||
$(
|
||||
impl From<$field> for $name {
|
||||
fn from(_:$field) -> Self {
|
||||
Self::$field
|
||||
}
|
||||
}
|
||||
|
||||
impl From<PhantomData<$field>> for $name {
|
||||
fn from(_:PhantomData<$field>) -> Self {
|
||||
Self::$field
|
||||
}
|
||||
}
|
||||
)*
|
||||
}
|
||||
}
|
||||
|
||||
/// Defines singletons and an associated enum type.
|
||||
/// It expands to the same as `define_singletons` and `define_singleton_enum_from`.
|
||||
#[macro_export]
|
||||
macro_rules! define_singleton_enum {
|
||||
(
|
||||
$(#$meta:tt)*
|
||||
$name:ident {
|
||||
$( $(#$field_meta:tt)* $field:ident ),* $(,)?
|
||||
}
|
||||
) => {
|
||||
$crate::define_singletons! { $($(#$field_meta)* $field),* }
|
||||
$crate::define_singleton_enum_from! { $(#$meta)* $name {$($(#$field_meta)* $field),*}}
|
||||
}
|
||||
}
|
@ -1,174 +0,0 @@
|
||||
#![feature(generators)]
|
||||
#![feature(type_alias_impl_trait)]
|
||||
|
||||
use enso_shapely::*;
|
||||
|
||||
|
||||
|
||||
// =============
|
||||
// === Utils ===
|
||||
// =============
|
||||
|
||||
/// To fail compilation if `T` is not `IntoIterator`.
|
||||
fn is_into_iterator<T: IntoIterator>(){}
|
||||
|
||||
fn to_vector<T>(t: T) -> Vec<T::Item>
|
||||
where T : IntoIterator,
|
||||
T::Item: Copy {
|
||||
t.into_iter().collect()
|
||||
}
|
||||
|
||||
|
||||
|
||||
// =====================================
|
||||
// === Struct with single type param ===
|
||||
// =====================================
|
||||
|
||||
#[derive(Iterator, IteratorMut, Eq, PartialEq, Debug)]
|
||||
pub struct PairTT<T>(T, T);
|
||||
|
||||
#[test]
|
||||
fn derive_iterator_single_t() {
|
||||
is_into_iterator::<& PairTT<i32>>();
|
||||
is_into_iterator::<&mut PairTT<i32>>();
|
||||
|
||||
let get_pair = || PairTT(4, 49);
|
||||
|
||||
// just collect values
|
||||
let pair = get_pair();
|
||||
let collected = pair.iter().copied().collect::<Vec<i32>>();
|
||||
assert_eq!(collected, vec![4, 49]);
|
||||
|
||||
// IntoIterator for &mut Val
|
||||
let mut pair = get_pair();
|
||||
for i in &mut pair {
|
||||
*i = *i + 1
|
||||
}
|
||||
assert_eq!(pair, PairTT(5, 50));
|
||||
|
||||
// iter_mut
|
||||
for i in pair.iter_mut() {
|
||||
*i = *i + 1
|
||||
}
|
||||
assert_eq!(pair, PairTT(6, 51));
|
||||
|
||||
// IntoIterator for & Val
|
||||
let pair = get_pair(); // not mut anymore
|
||||
let mut sum = 0;
|
||||
for i in &pair {
|
||||
sum += i;
|
||||
}
|
||||
assert_eq!(sum, pair.0 + pair.1)
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ===================================
|
||||
// === Struct with two type params ===
|
||||
// ===================================
|
||||
|
||||
#[derive(Iterator, IteratorMut, Eq, PartialEq, Debug)]
|
||||
pub struct PairUV<U,V>(U,V);
|
||||
|
||||
#[test]
|
||||
fn two_params() {
|
||||
// verify that iter uses only the last type param field
|
||||
let pair = PairUV(5, 10);
|
||||
assert_eq!(to_vector(pair.iter().copied()), vec![10]);
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ======================================
|
||||
// === Struct without any type params ===
|
||||
// ======================================
|
||||
|
||||
#[derive(Iterator, Eq, PartialEq, Debug)]
|
||||
pub struct Monomorphic(i32);
|
||||
|
||||
#[test]
|
||||
fn no_params() {
|
||||
// `derive(Iterator)` is no-op for structures with no type parameters.
|
||||
// We just make sure that it does not cause compilation error.
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ========================
|
||||
// === Enumeration Type ===
|
||||
// ========================
|
||||
|
||||
#[derive(Iterator)]
|
||||
#[warn(dead_code)] // value is never read and shouldn't be
|
||||
pub struct Unrecognized{ pub value : String }
|
||||
|
||||
#[derive(Iterator)]
|
||||
pub enum Foo<U, T> {
|
||||
Con1(PairUV<U, T>),
|
||||
Con2(PairTT<T>),
|
||||
Con3(Unrecognized)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn enum_is_into_iterator() {
|
||||
is_into_iterator::<&Foo<i32, i32>>();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn enum_iter1() {
|
||||
let v = Foo::Con1(PairUV(4, 50));
|
||||
let mut v_iter = v.into_iter();
|
||||
assert_eq!(*v_iter.next().unwrap(),50);
|
||||
assert!(v_iter.next().is_none());
|
||||
}
|
||||
#[test]
|
||||
fn enum_iter2() {
|
||||
let v: Foo<i32, i32> = Foo::Con2(PairTT(6,60));
|
||||
let mut v_iter = v.into_iter();
|
||||
assert_eq!(*v_iter.next().unwrap(),6);
|
||||
assert_eq!(*v_iter.next().unwrap(),60);
|
||||
assert!(v_iter.next().is_none());
|
||||
}
|
||||
#[test]
|
||||
fn enum_iter3() {
|
||||
let v: Foo<i32, i32> = Foo::Con3(Unrecognized{value:"foo".into()});
|
||||
let mut v_iter = v.into_iter();
|
||||
assert!(v_iter.next().is_none());
|
||||
}
|
||||
|
||||
|
||||
|
||||
// =======================
|
||||
// === Dependent Types ===
|
||||
// =======================
|
||||
|
||||
#[derive(Iterator)]
|
||||
#[derive(IteratorMut)]
|
||||
pub struct DependentTest<U, T> {
|
||||
a:T,
|
||||
b:(T,U,PairUV<U, T>),
|
||||
// is never used, as it doesn't depend on `T` (last param)
|
||||
#[allow(dead_code)]
|
||||
c:PairTT<U>,
|
||||
d:(i32, Option<Vec<T>>),
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn dependent_test_iter() {
|
||||
let val = DependentTest{
|
||||
a : 1,
|
||||
b : (2,3,PairUV(4,5)),
|
||||
c : PairTT(6,6),
|
||||
d : (7, Some(vec![8,9])),
|
||||
};
|
||||
let mut v_iter = val.into_iter();
|
||||
assert_eq!(*v_iter.next().unwrap(), 1);
|
||||
assert_eq!(*v_iter.next().unwrap(), 2);
|
||||
// 3 is `U` in tuple
|
||||
// 4 is `U` in <U,T> pair
|
||||
assert_eq!(*v_iter.next().unwrap(), 5);
|
||||
// 7 is `i32` in tuple
|
||||
assert_eq!(*v_iter.next().unwrap(), 8);
|
||||
assert_eq!(*v_iter.next().unwrap(), 9);
|
||||
assert!(v_iter.next().is_none());
|
||||
}
|
@ -1,54 +0,0 @@
|
||||
// This module contains dead code. Its purpose is making sure that it compiles
|
||||
#![allow(dead_code)]
|
||||
|
||||
use enso_prelude::*;
|
||||
|
||||
#[derive(Clone,CloneRef)] struct StructUnit;
|
||||
|
||||
#[derive(Clone,CloneRef)] struct StructUnnamedEmpty();
|
||||
|
||||
#[derive(Clone,CloneRef)] struct StructUnnamed(Rc<i32>,Rc<String>);
|
||||
|
||||
#[derive(Clone,CloneRef)] struct StructNamedEmpty{}
|
||||
|
||||
#[derive(Clone,CloneRef)] struct StructNamed{named0:Rc<i32>,named1:Rc<String>}
|
||||
|
||||
#[derive(Clone,CloneRef)] enum EnumEmpty {}
|
||||
|
||||
#[derive(Clone,CloneRef)] enum Enum {
|
||||
VariantUnit,
|
||||
VariantNamedEmpty {},
|
||||
VariantNamed {named0:Rc<i32>,named1:Rc<String>},
|
||||
VariantUnnamedEmpty(),
|
||||
VariantUnnamed(Rc<i32>,Rc<String>),
|
||||
}
|
||||
|
||||
#[derive(CloneRef,Derivative)]
|
||||
#[derivative(Clone(bound=""))]
|
||||
struct StructUnnamedUnbound<T>(Rc<T>);
|
||||
|
||||
#[derive(CloneRef,Clone)]
|
||||
#[clone_ref(bound="T:CloneRef")]
|
||||
struct StructUnnamedBound<T>(T);
|
||||
|
||||
#[derive(CloneRef,Clone)]
|
||||
#[clone_ref(bound="T:CloneRef,U:CloneRef")]
|
||||
struct StructUnnamedBoundTwoPatams<T,U>(T,U);
|
||||
|
||||
#[derive(Clone,CloneRef)]
|
||||
#[clone_ref(bound="T:Clone+Display")]
|
||||
struct StructBoundGeneric<T:Display>(Rc<T>);
|
||||
|
||||
#[derive(CloneRef,Derivative)]
|
||||
#[derivative(Clone(bound=""))]
|
||||
// Note: CloneRef "knows" about `Display` bound.
|
||||
struct StructGenericLifetime<'t>(Rc<&'t String>);
|
||||
|
||||
#[derive(CloneRef,Derivative)]
|
||||
#[derivative(Clone(bound=""))]
|
||||
struct StructWhereClause<T>(Rc<T>) where T:Debug;
|
||||
|
||||
#[derive(CloneRef,Clone)]
|
||||
#[clone_ref(bound="T:CloneRef")]
|
||||
// Here derive macro must correctly merge user-provided bound, generics list bound and where clause.
|
||||
struct StructVariousBounds<T:Display>(T) where T:Debug;
|
@ -1,39 +0,0 @@
|
||||
[package]
|
||||
name = "enso-shapely-macros"
|
||||
version = "0.1.0"
|
||||
authors = ["Enso Team <enso-dev@enso.org>"]
|
||||
edition = "2018"
|
||||
|
||||
description = "Automated typeclass derivation."
|
||||
readme = "README.md"
|
||||
homepage = "https://github.com/enso-org/enso/lib/rust/shapely/macros/"
|
||||
repository = "https://github.com/enso-org/enso"
|
||||
license-file = "../../../../LICENSE"
|
||||
|
||||
keywords = ["typeclass", "deriving", "macro"]
|
||||
categories = ["algorithms"]
|
||||
|
||||
publish = true
|
||||
|
||||
[lib]
|
||||
proc-macro = true
|
||||
|
||||
[features]
|
||||
default = []
|
||||
|
||||
[dependencies]
|
||||
enso-macro-utils = { version = "0.1.0" , path = "../../enso-macro-utils" }
|
||||
proc-macro2 = "1.0"
|
||||
quote = "1.0"
|
||||
Inflector = "0.11.4"
|
||||
itertools = "0.8.1"
|
||||
boolinator = "2.4.0"
|
||||
|
||||
[dependencies.syn]
|
||||
version = "1.0"
|
||||
features = [
|
||||
'extra-traits', 'visit', 'full'
|
||||
]
|
||||
|
||||
[dev-dependencies]
|
||||
wasm-bindgen-test = "0.2"
|
@ -1,3 +0,0 @@
|
||||
# Shapely Macros
|
||||
|
||||
This crate provides macros for typeclass derivation.
|
@ -1,224 +0,0 @@
|
||||
use crate::prelude::*;
|
||||
|
||||
use enso_macro_utils::field_names;
|
||||
use enso_macro_utils::identifier_sequence;
|
||||
use enso_macro_utils::index_sequence;
|
||||
use enso_macro_utils::path_matching_ident;
|
||||
use syn::Attribute;
|
||||
use syn::DeriveInput;
|
||||
use syn::Data;
|
||||
use syn::DataEnum;
|
||||
use syn::DataStruct;
|
||||
use syn::Fields;
|
||||
use syn::Ident;
|
||||
use syn::Lit;
|
||||
use syn::Meta;
|
||||
use syn::MetaNameValue;
|
||||
use syn::NestedMeta;
|
||||
use syn::Variant;
|
||||
use syn::WhereClause;
|
||||
use syn::WherePredicate;
|
||||
|
||||
|
||||
|
||||
// ==============
|
||||
// === Consts ===
|
||||
// ==============
|
||||
|
||||
/// Name of the custom attribute allowing customizing behavior of the generated `CloneRef`
|
||||
/// implementation.
|
||||
const CLONE_REF_ATTR:&str = "clone_ref";
|
||||
|
||||
/// Name of the property within customization attribute that allows defining custom bounds for
|
||||
/// the generated `CloneRef` implementation.
|
||||
const BOUND_NAME:&str = "bound";
|
||||
|
||||
|
||||
|
||||
// ============================
|
||||
// === CloneRef for structs ===
|
||||
// ============================
|
||||
|
||||
/// `clone_ref` function body for a given `struct` definition.
|
||||
pub fn body_for_struct(ident:&Ident, data:&DataStruct) -> TokenStream {
|
||||
match data.fields {
|
||||
Fields::Unit =>
|
||||
// Foo
|
||||
quote!( #ident ),
|
||||
Fields::Unnamed(ref fields) => {
|
||||
let indices = index_sequence(fields.unnamed.len());
|
||||
// Foo(self.0.clone_ref())
|
||||
quote!(
|
||||
#ident(#(self.#indices.clone_ref()),*)
|
||||
)
|
||||
}
|
||||
Fields::Named(ref fields) => {
|
||||
let names = field_names(fields);
|
||||
// Foo { field0 : self.field0.clone_ref() }
|
||||
quote!(
|
||||
#ident {
|
||||
#(#names : self.#names.clone_ref()),*
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ==========================
|
||||
// === CloneRef for enums ===
|
||||
// ==========================
|
||||
|
||||
/// Prepares a match arm for a single variant that `clone_ref`s such value.
|
||||
pub fn arm_for_variant(data_ident:&Ident,variant:&Variant) -> TokenStream {
|
||||
let fields = &variant.fields;
|
||||
let variant_ident = &variant.ident;
|
||||
match fields {
|
||||
Fields::Unit => {
|
||||
// Enum::Var => Enum::Var
|
||||
quote!(
|
||||
#data_ident::#variant_ident => #data_ident::#variant_ident
|
||||
)
|
||||
}
|
||||
Fields::Named(fields) => {
|
||||
let names = field_names(fields);
|
||||
// Enum::Var {field0} => Enum::Var {field0 : field0.clone_ref()}
|
||||
quote!(
|
||||
#data_ident::#variant_ident { #(#names),* } =>
|
||||
#data_ident::#variant_ident {
|
||||
#( #names : #names.clone_ref() ),*
|
||||
}
|
||||
)
|
||||
}
|
||||
Fields::Unnamed(fields) => {
|
||||
let names = identifier_sequence(fields.unnamed.len());
|
||||
// Enum::Var(field0) => Enum::Var(field0.clone_ref())
|
||||
quote!(
|
||||
#data_ident::#variant_ident(#(#names),*) =>
|
||||
#data_ident::#variant_ident(
|
||||
#( #names.clone_ref() ),*
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// `clone_ref` function body for a given `enum` definition.
|
||||
pub fn body_for_enum(ident:&Ident, data:&DataEnum) -> TokenStream {
|
||||
if data.variants.is_empty() {
|
||||
quote!(panic!("There cannot exist value of empty enum, so its clone_ref must not be called."))
|
||||
} else {
|
||||
let make_arm = |variant| arm_for_variant(ident,variant);
|
||||
let arms = data.variants.iter().map(make_arm);
|
||||
quote!(
|
||||
match self { #(#arms),* }
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ============================
|
||||
// === Bounds customization ===
|
||||
// ============================
|
||||
|
||||
/// Checks if the given attribute is our customization attribute.
|
||||
pub fn is_clone_ref_customization(attr:&Attribute) -> bool {
|
||||
path_matching_ident(&attr.path,CLONE_REF_ATTR)
|
||||
}
|
||||
|
||||
/// Checks if the given Meta name-val pair defines user-provided bounds.
|
||||
pub fn is_custom_bound(name_val:&MetaNameValue) -> bool {
|
||||
path_matching_ident(&name_val.path,BOUND_NAME)
|
||||
}
|
||||
|
||||
/// If this is our customization attribute, we retrieve user-provided bounds for the generated
|
||||
/// `CloneRef` implementation.
|
||||
///
|
||||
/// Returns `None` is this is third-party attribute.
|
||||
/// Panics if this is our attribute but the syntax is not correct.
|
||||
pub fn clone_ref_bounds(attr:&Attribute) -> Option<Vec<WherePredicate>> {
|
||||
// Silently ignore foreign attributes. Be picky only about our one.
|
||||
is_clone_ref_customization(attr).then(())?;
|
||||
|
||||
let meta = attr.parse_meta().expect("Failed to parse attribute contents.");
|
||||
let list = match meta {
|
||||
Meta::List(ml) => ml.nested,
|
||||
_ => panic!("Attribute contents does not conform to meta item."),
|
||||
};
|
||||
if list.len() > 1 {
|
||||
panic!("Only a single entry within `{}` attribute is allowed.",CLONE_REF_ATTR);
|
||||
}
|
||||
let bound_value = match list.first() {
|
||||
Some(NestedMeta::Meta(Meta::NameValue(name_val))) => {
|
||||
if is_custom_bound(name_val) {
|
||||
&name_val.lit
|
||||
} else {
|
||||
panic!("`{}` attribute can define value only for `{}`.",CLONE_REF_ATTR,BOUND_NAME)
|
||||
}
|
||||
}
|
||||
Some(_) =>
|
||||
panic!("`{}` attribute must contain a single name=value assignment.",CLONE_REF_ATTR),
|
||||
None =>
|
||||
panic!("`{}` attribute must not be empty.",CLONE_REF_ATTR),
|
||||
};
|
||||
let bound_str = if let Lit::Str(lit_str) = bound_value {
|
||||
lit_str
|
||||
} else {
|
||||
panic!("`{}` value must be a string literal describing `where` predicates.",BOUND_NAME)
|
||||
};
|
||||
let bounds_text = format!("where {}", bound_str.value());
|
||||
let bounds = syn::parse_str::<WhereClause>(&bounds_text);
|
||||
let bounds = bounds.unwrap_or_else(|_| {
|
||||
panic!("Failed to parse user-provided where clause: `{}`.",bounds_text)
|
||||
});
|
||||
let ret = bounds.predicates.into_iter().collect();
|
||||
Some(ret)
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ===================
|
||||
// === Entry Point ===
|
||||
// ===================
|
||||
|
||||
/// Derives `CloneRef` implementation, refer to `crate::derive_clone_ref` for details.
|
||||
pub fn derive
|
||||
(input:proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||
let decl = syn::parse_macro_input!(input as DeriveInput);
|
||||
let ident = &decl.ident;
|
||||
let body = match &decl.data {
|
||||
Data::Struct(data_struct) => body_for_struct(ident,data_struct),
|
||||
Data::Enum(data_enum) => body_for_enum(ident,data_enum),
|
||||
Data::Union(_) =>
|
||||
panic!("CloneRef cannot be derived for an untagged union input."),
|
||||
};
|
||||
|
||||
let (impl_generics, ty_generics, inherent_where_clause_opt) = &decl.generics.split_for_impl();
|
||||
|
||||
// Where clause must contain both user-provided bounds and bounds inherent due to type
|
||||
// declaration-level where clause.
|
||||
let user_requested_bounds = decl.attrs.iter().filter_map(clone_ref_bounds).flatten();
|
||||
let mut where_clause = enso_macro_utils::new_where_clause(user_requested_bounds);
|
||||
for inherent_where_clause in inherent_where_clause_opt {
|
||||
where_clause.predicates.extend(inherent_where_clause.predicates.iter().cloned())
|
||||
}
|
||||
|
||||
let output = quote!{
|
||||
impl #impl_generics CloneRef for #ident #ty_generics
|
||||
#where_clause {
|
||||
fn clone_ref(&self) -> Self {
|
||||
#body
|
||||
}
|
||||
}
|
||||
|
||||
impl #impl_generics From<& #ident #ty_generics> for #ident #ty_generics
|
||||
#where_clause {
|
||||
fn from(t:& #ident #ty_generics) -> Self {
|
||||
t.clone_ref()
|
||||
}
|
||||
}
|
||||
};
|
||||
output.into()
|
||||
}
|
@ -1,449 +0,0 @@
|
||||
use crate::prelude::*;
|
||||
|
||||
use enso_macro_utils::fields_list;
|
||||
use enso_macro_utils::field_ident_token;
|
||||
use enso_macro_utils::type_depends_on;
|
||||
use enso_macro_utils::type_matches;
|
||||
use enso_macro_utils::ty_path_type_args;
|
||||
use enso_macro_utils::variant_depends_on;
|
||||
use boolinator::Boolinator;
|
||||
use inflector::Inflector;
|
||||
use itertools::Itertools;
|
||||
|
||||
|
||||
|
||||
// =============
|
||||
// === IsMut ===
|
||||
// =============
|
||||
|
||||
/// Describes whether a mutable or immutable iterator is being derived.
|
||||
#[derive(Clone,Copy,Debug,PartialEq)]
|
||||
pub enum IsMut {
|
||||
Mutable,
|
||||
Immutable,
|
||||
}
|
||||
|
||||
impl IsMut {
|
||||
fn is_mut(self) -> bool {
|
||||
self == IsMut::Mutable
|
||||
}
|
||||
|
||||
/// Returns `mut` token for mutable iterator derivation.
|
||||
fn to_token(self) -> Option<syn::Token![mut]> {
|
||||
self.is_mut().as_some(<syn::Token![mut]>::default())
|
||||
}
|
||||
|
||||
/// Name of method for generating iterator.
|
||||
fn iter_method(self) -> TokenStream {
|
||||
if self.is_mut() {
|
||||
quote!(iter_mut)
|
||||
} else {
|
||||
quote!(iter)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// ======================
|
||||
// === DependentValue ===
|
||||
// ======================
|
||||
|
||||
/// A value dependent on out target parameter.
|
||||
///
|
||||
/// Helper methods can be used to generate code yielding values from this.
|
||||
pub struct DependentValue<'t> {
|
||||
/// Type of the value (ref-stripped).
|
||||
pub ty : &'t syn::Type,
|
||||
/// Tokens yielding the value.
|
||||
pub value : TokenStream,
|
||||
/// Parameter type we want to iterate over.
|
||||
pub target_param: &'t syn::GenericParam,
|
||||
/// Are the value yielded as reference.
|
||||
pub through_ref : bool
|
||||
}
|
||||
|
||||
impl<'t> DependentValue<'t> {
|
||||
/// Returns Some when type is dependent and None otherwise.
|
||||
pub fn try_new
|
||||
(ty: &'t syn::Type, value:TokenStream, target_param:&'t syn::GenericParam)
|
||||
-> Option<DependentValue<'t>> {
|
||||
if type_depends_on(ty, target_param) {
|
||||
Some(DependentValue{ty,value,target_param,through_ref:false})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Collects dependent sub-values from the tuple value.
|
||||
pub fn collect_tuple
|
||||
(tuple:&'t syn::TypeTuple, target_param:&'t syn::GenericParam)
|
||||
-> Vec<DependentValue<'t>> {
|
||||
tuple.elems.iter().enumerate().filter_map(|(ix,ty)| {
|
||||
let ix = syn::Index::from(ix);
|
||||
let ident = quote!(t.#ix);
|
||||
DependentValue::try_new(ty,ident,target_param)
|
||||
}).collect()
|
||||
}
|
||||
|
||||
/// Generates code yielding all values of target type accessible from this
|
||||
/// value.
|
||||
pub fn yield_value(&self, is_mut:IsMut) -> TokenStream {
|
||||
match self.ty {
|
||||
syn::Type::Tuple(tuple) => self.yield_tuple_value(tuple, is_mut),
|
||||
syn::Type::Path(path) => {
|
||||
if type_matches(&self.ty, &self.target_param) {
|
||||
self.yield_direct_value(is_mut)
|
||||
} else {
|
||||
self.yield_dependent_ty_path_value(path,is_mut)
|
||||
}
|
||||
}
|
||||
_ =>
|
||||
panic!("Don't know how to yield value of type {} from type {}"
|
||||
, repr(&self.target_param), repr(&self.ty)),
|
||||
}
|
||||
}
|
||||
|
||||
/// Code yielding value that directly matches the target parameter type.
|
||||
pub fn yield_direct_value
|
||||
(&self, is_mut:IsMut) -> TokenStream {
|
||||
let value = &self.value;
|
||||
let opt_mut = is_mut.to_token();
|
||||
let opt_ref = (!self.through_ref).as_some(quote!( & #opt_mut ));
|
||||
|
||||
// yield &mut value;
|
||||
quote!( yield #opt_ref #value; )
|
||||
}
|
||||
|
||||
/// Code yielding values from tuple dependent on the target parameter type.
|
||||
pub fn yield_tuple_value
|
||||
(&self, ty:&syn::TypeTuple,is_mut:IsMut)
|
||||
-> TokenStream {
|
||||
let value = &self.value;
|
||||
let mut_kwd = is_mut.to_token();
|
||||
let subfields = DependentValue::collect_tuple(ty, self.target_param);
|
||||
let yield_sub = subfields.iter().map(|f| {
|
||||
f.yield_value(is_mut)
|
||||
}).collect_vec();
|
||||
|
||||
// yield &mut t.0;
|
||||
// yield &mut t.2;
|
||||
quote!( {
|
||||
let t = & #mut_kwd #value;
|
||||
#(#yield_sub)*
|
||||
})
|
||||
}
|
||||
|
||||
/// Obtain the type of iterator-yielded value.
|
||||
///
|
||||
/// Panics when given a type which is not supported for derivation, like
|
||||
/// having dependent type on the non-last position.
|
||||
pub fn type_path_elem_type(&self, ty_path:&'t syn::TypePath) -> &syn::Type {
|
||||
let mut type_args = ty_path_type_args(ty_path);
|
||||
let last_arg = match type_args.pop() {
|
||||
Some(arg) => arg,
|
||||
None => panic!("Type {} has no segments!", repr(&ty_path))
|
||||
};
|
||||
|
||||
// Last and only last type argument is dependent.
|
||||
for non_last_segment in type_args {
|
||||
assert!(!type_depends_on(non_last_segment, self.target_param)
|
||||
, "Type {} has non-last argument {} that depends on {}"
|
||||
, repr(ty_path)
|
||||
, repr(non_last_segment)
|
||||
, repr(self.target_param)
|
||||
);
|
||||
}
|
||||
assert!(type_depends_on(last_arg, self.target_param));
|
||||
last_arg
|
||||
}
|
||||
|
||||
/// Code yielding values from data dependent on the target parameter type.
|
||||
pub fn yield_dependent_ty_path_value
|
||||
(&self, ty_path:&'t syn::TypePath, is_mut:IsMut)
|
||||
-> TokenStream {
|
||||
let opt_mut = is_mut.to_token();
|
||||
let elem_ty = self.type_path_elem_type(ty_path);
|
||||
let elem = quote!(t);
|
||||
|
||||
let elem_info = DependentValue{
|
||||
value : elem.clone(),
|
||||
target_param : self.target_param,
|
||||
ty : elem_ty,
|
||||
through_ref : true,
|
||||
};
|
||||
let yield_elem = elem_info.yield_value(is_mut);
|
||||
let value = &self.value;
|
||||
let iter_method = if is_mut.is_mut() {
|
||||
quote!(iter_mut)
|
||||
} else {
|
||||
quote!(iter)
|
||||
};
|
||||
|
||||
quote! {
|
||||
for #opt_mut #elem in #value.#iter_method() {
|
||||
#yield_elem
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Describe relevant fields of the struct definition.
|
||||
pub fn collect_struct
|
||||
(data:&'t syn::DataStruct, target_param:&'t syn::GenericParam)
|
||||
-> Vec<DependentValue<'t>> {
|
||||
let fields = fields_list(&data.fields);
|
||||
let dep_field = fields.iter().enumerate().filter_map(|(i,f)| {
|
||||
let ident = field_ident_token(f,i.into());
|
||||
let value = quote!(t.#ident);
|
||||
DependentValue::try_new(&f.ty,value,target_param)
|
||||
});
|
||||
dep_field.collect()
|
||||
}
|
||||
}
|
||||
|
||||
/// Parts of derivation output that are specific to enum- or struct- target.
|
||||
pub struct OutputParts<'ast> {
|
||||
pub iterator_tydefs : TokenStream,
|
||||
pub iter_body : TokenStream,
|
||||
pub iterator_params : Vec<&'ast syn::GenericParam>,
|
||||
}
|
||||
|
||||
/// Common data used when generating derived Iterator impls.
|
||||
///
|
||||
/// Examples are given for `pub struct Foo<S, T> { foo: T }`
|
||||
pub struct DerivingIterator<'ast> {
|
||||
pub data : &'ast syn::Data, // { foo: T }
|
||||
pub ident : &'ast syn::Ident, // Foo
|
||||
pub params : Vec<&'ast syn::GenericParam>, // <S, T>
|
||||
pub t_iterator : syn::Ident, // FooIterator{Mut}
|
||||
pub iterator : syn::Ident, // foo_iterator{_mut}
|
||||
pub target_param : &'ast syn::GenericParam, // T
|
||||
pub is_mut : IsMut, // are we mutable iterator?
|
||||
}
|
||||
|
||||
impl DerivingIterator<'_> {
|
||||
pub fn new<'ast>
|
||||
( decl :&'ast syn::DeriveInput
|
||||
, target_param:&'ast syn::GenericParam
|
||||
, is_mut :IsMut
|
||||
) -> DerivingIterator<'ast> {
|
||||
let mut_or_not = if is_mut.is_mut() { "Mut" } else { "" };
|
||||
let data = &decl.data;
|
||||
let params = decl.generics.params.iter().collect();
|
||||
let ident = &decl.ident;
|
||||
let t_iterator = format!("{}Iterator{}", ident, mut_or_not);
|
||||
let iterator = t_iterator.to_snake_case();
|
||||
let t_iterator = syn::Ident::new(&t_iterator, Span::call_site());
|
||||
let iterator = syn::Ident::new(&iterator , Span::call_site());
|
||||
DerivingIterator {
|
||||
data,
|
||||
ident,
|
||||
params,
|
||||
t_iterator,
|
||||
iterator,
|
||||
target_param,
|
||||
is_mut,
|
||||
}
|
||||
}
|
||||
|
||||
/// Handles all enum-specific parts.
|
||||
pub fn prepare_parts_enum(&self, data:&syn::DataEnum) -> OutputParts {
|
||||
let opt_mut = &self.is_mut.to_token();
|
||||
let t_iterator = &self.t_iterator;
|
||||
let ident = &self.ident;
|
||||
let target_param = &self.target_param;
|
||||
let iterator_params = vec!(self.target_param);
|
||||
let iterator_tydefs = quote!(
|
||||
// type FooIterator<'t, U> =
|
||||
// Box<dyn Iterator<Item=&'t U> + 't>;
|
||||
// type FooIteratorMut<'t, U> =
|
||||
// Box<dyn Iterator<Item=&'t mut U> + 't>;
|
||||
type #t_iterator<'t, #(#iterator_params),*> =
|
||||
Box<dyn Iterator<Item=&'t #opt_mut #target_param> + 't>;
|
||||
);
|
||||
// For types that use target type parameter, refer to their
|
||||
// `IntoIterator` implementation. Otherwise, use `EmptyIterator`.
|
||||
let arms = data.variants.iter().map(|var| {
|
||||
let con = &var.ident;
|
||||
let iter = if variant_depends_on(var, target_param) {
|
||||
quote!(elem.into_iter())
|
||||
} else {
|
||||
quote!(enso_shapely::EmptyIterator::new())
|
||||
};
|
||||
quote!(#ident::#con(elem) => Box::new(#iter))
|
||||
});
|
||||
|
||||
// match t {
|
||||
// Foo::Con1(elem) => Box::new(elem.into_iter()),
|
||||
// Foo::Con2(elem) => Box::new(enso-shapely::EmptyIterator::new()),
|
||||
// }
|
||||
let iter_body = quote!( match t { #(#arms,)* } );
|
||||
OutputParts{iterator_tydefs,iter_body,iterator_params}
|
||||
}
|
||||
|
||||
/// Handles all struct-specific parts.
|
||||
pub fn prepare_parts_struct(&self, data:&syn::DataStruct) -> OutputParts {
|
||||
let opt_mut = &self.is_mut.to_token();
|
||||
let t_iterator = &self.t_iterator;
|
||||
let target_param = &self.target_param;
|
||||
let iterator_params = self.params.clone();
|
||||
let iterator_tydefs = quote!(
|
||||
// type FooIterator<'t, T> = impl Iterator<Item = &'t T>;
|
||||
// type FooIteratorMut<'t, T> = impl Iterator<Item = &'t mut T>;
|
||||
type #t_iterator<'t, #(#iterator_params),*> =
|
||||
impl Iterator<Item = &'t #opt_mut #target_param>;
|
||||
);
|
||||
let matched_fields = DependentValue::collect_struct(data, target_param);
|
||||
let yield_fields = matched_fields.iter().map(|field| {
|
||||
field.yield_value(self.is_mut)
|
||||
}).collect_vec();
|
||||
|
||||
// enso-shapely::EmptyIterator::new()
|
||||
let empty_body = quote! { enso_shapely::EmptyIterator::new() };
|
||||
|
||||
// enso-shapely::GeneratingIterator(move || {
|
||||
// yield &t.foo;
|
||||
// })
|
||||
// enso-shapely::GeneratingIterator(move || {
|
||||
// yield &mut t.foo;
|
||||
// })
|
||||
let body = quote! {
|
||||
enso_shapely::GeneratingIterator
|
||||
(move || { #(#yield_fields)* })
|
||||
};
|
||||
|
||||
let iter_body = if matched_fields.is_empty() {
|
||||
empty_body
|
||||
} else {
|
||||
body
|
||||
};
|
||||
OutputParts{iterator_tydefs,iter_body,iterator_params}
|
||||
}
|
||||
|
||||
/// Handles common (between enum and struct) code and assembles it all
|
||||
/// into a final derivation output.
|
||||
#[allow(clippy::cognitive_complexity)]
|
||||
pub fn assemble_output(&self, parts:OutputParts) -> TokenStream {
|
||||
let iterator_tydefs = &parts.iterator_tydefs;
|
||||
let iter_body = &parts.iter_body;
|
||||
let iterator_params = &parts.iterator_params;
|
||||
let opt_mut = &self.is_mut.to_token();
|
||||
let iterator = &self.iterator;
|
||||
let t_iterator = &self.t_iterator;
|
||||
let params = &self.params;
|
||||
let ident = &self.ident;
|
||||
let target_param = &self.target_param;
|
||||
let iter_method = &self.is_mut.iter_method();
|
||||
|
||||
quote!{
|
||||
#iterator_tydefs
|
||||
|
||||
// pub fn foo_iterator<'t, T>
|
||||
// (t: &'t Foo<T>) -> FooIterator<'t, T> {
|
||||
// enso-shapely::GeneratingIterator(move || {
|
||||
// yield &t.foo;
|
||||
// })
|
||||
// }
|
||||
// pub fn foo_iterator_mut<'t, T>
|
||||
// (t: &'t mut Foo<T>) -> FooIteratorMut<'t, T> {
|
||||
// enso-shapely::GeneratingIterator(move || {
|
||||
// yield &t.foo;
|
||||
// })
|
||||
// }
|
||||
pub fn #iterator<'t, #(#params),*>
|
||||
(t: &'t #opt_mut #ident<#(#params),*>)
|
||||
-> #t_iterator<'t, #(#iterator_params),*> {
|
||||
#iter_body
|
||||
}
|
||||
|
||||
// impl<'t, T>
|
||||
// IntoIterator for &'t Foo<T> {
|
||||
// type Item = &'t T;
|
||||
// type IntoIter = FooIterator<'t, T>;
|
||||
// fn into_iter(self) -> FooIterator<'t, T> {
|
||||
// foo_iterator(self)
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// impl<'t, T>
|
||||
// IntoIterator for &'t mut Foo<T> {
|
||||
// type Item = &'t mut T;
|
||||
// type IntoIter = FooIteratorMut<'t, T>;
|
||||
// fn into_iter(self) -> FooIteratorMut<'t, T> {
|
||||
// foo_iterator_mut(self)
|
||||
// }
|
||||
// }
|
||||
impl<'t, #(#params),*>
|
||||
IntoIterator for &'t #opt_mut #ident<#(#params),*> {
|
||||
type Item = &'t #opt_mut #target_param;
|
||||
type IntoIter = #t_iterator<'t, #(#iterator_params),*>;
|
||||
fn into_iter(self) -> #t_iterator<'t, #(#iterator_params),*> {
|
||||
#iterator(self)
|
||||
}
|
||||
}
|
||||
|
||||
// impl Foo<T> {
|
||||
// pub fn iter(&self) -> FooIterator<'_, T> {
|
||||
// #foo_iterator(self)
|
||||
// }
|
||||
// pub fn iter_mut(&mut self) -> FooIteratorMut<'_, T> {
|
||||
// #foo_iterator_mut (self)
|
||||
// }
|
||||
// }
|
||||
impl<#(#params),*> #ident<#(#params),*> {
|
||||
pub fn #iter_method
|
||||
(& #opt_mut self) -> #t_iterator<'_, #(#iterator_params),*> {
|
||||
#iterator(self)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Generates the code that derives desired iterator.
|
||||
pub fn output(&self) -> TokenStream {
|
||||
let parts = match self.data {
|
||||
syn::Data::Struct(data) => self.prepare_parts_struct(data),
|
||||
syn::Data::Enum (data) => self.prepare_parts_enum (data),
|
||||
_ =>
|
||||
panic!("Only Structs and Enums can derive(Iterator)!"),
|
||||
};
|
||||
self.assemble_output(parts)
|
||||
}
|
||||
}
|
||||
|
||||
/// Common implementation for deriving iterator through `derive(Iterator)` and
|
||||
/// `derive(IteratorMut)`.
|
||||
pub fn derive
|
||||
(input:proc_macro::TokenStream, is_mut:IsMut) -> proc_macro::TokenStream {
|
||||
let decl = syn::parse_macro_input!(input as syn::DeriveInput);
|
||||
let params = &decl.generics.params.iter().collect::<Vec<_>>();
|
||||
let output = match params.last() {
|
||||
Some(last_param) => {
|
||||
let der = DerivingIterator::new(&decl,last_param,is_mut);
|
||||
der.output()
|
||||
}
|
||||
None =>
|
||||
TokenStream::new(),
|
||||
};
|
||||
output.into()
|
||||
}
|
||||
|
||||
// Note [Expansion Example]
|
||||
// ~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
// In order to make the definition easier to read, an example expansion of the
|
||||
// following definition was provided for each quotation:
|
||||
//
|
||||
// #[derive(Iterator)]
|
||||
// pub struct Foo<S, T> { foo: T }
|
||||
//
|
||||
// When different output is generated for mutable and immutable content, both
|
||||
// expansions are presented.
|
||||
//
|
||||
// For examples that are enum-specific rather than struct-specific, the
|
||||
// following definition is assumed:
|
||||
//
|
||||
// #[derive(Iterator)]
|
||||
// pub enum Foo<T> {
|
||||
// Con1(Bar<T>),
|
||||
// Con2(Baz),
|
||||
// }
|
||||
|
@ -1,98 +0,0 @@
|
||||
//! This crate defines a custom derive macro `Iterator`. Should not be used
|
||||
//! directly, but only through `enso-shapely` crate, as it provides utilities
|
||||
//! necessary for the generated code to compile.
|
||||
|
||||
#![feature(bool_to_option)]
|
||||
#![feature(exact_size_is_empty)]
|
||||
#![warn(missing_docs)]
|
||||
#![warn(trivial_casts)]
|
||||
#![warn(trivial_numeric_casts)]
|
||||
#![warn(unused_import_braces)]
|
||||
#![warn(unused_qualifications)]
|
||||
#![warn(unsafe_code)]
|
||||
#![warn(missing_copy_implementations)]
|
||||
#![warn(missing_debug_implementations)]
|
||||
|
||||
extern crate proc_macro;
|
||||
|
||||
mod derive_clone_ref;
|
||||
mod derive_iterator;
|
||||
mod overlappable;
|
||||
|
||||
mod prelude {
|
||||
pub use enso_macro_utils::repr;
|
||||
pub use proc_macro2::Span;
|
||||
pub use proc_macro2::TokenStream;
|
||||
pub use quote::quote;
|
||||
}
|
||||
|
||||
use crate::derive_iterator::IsMut;
|
||||
|
||||
/// For `struct Foo<T>` or `enum Foo<T>` provides:
|
||||
/// * `IntoIterator` implementations for `&'t Foo<T>`, `iter` and `into_iter`
|
||||
/// methods.
|
||||
///
|
||||
/// The iterators will:
|
||||
/// * for structs: go over each field that declared type is same as the
|
||||
/// struct's last type parameter.
|
||||
/// * enums: delegate to current constructor's nested value's iterator.
|
||||
///
|
||||
/// Enums are required to use only a single element tuple-like variant. This
|
||||
/// limitation should be lifted in the future.
|
||||
///
|
||||
/// Any dependent type stored in struct, tuple or wrapped in enum should have
|
||||
/// dependency only in its last type parameter. All dependent types that are not
|
||||
/// tuples nor directly the yielded type, are required to provide `iter` method
|
||||
/// that returns a compatible iterator (possible also derived).
|
||||
///
|
||||
/// Caller must have the following features enabled:
|
||||
/// ```
|
||||
/// #![feature(generators)]
|
||||
/// #![feature(type_alias_impl_trait)]
|
||||
/// ```
|
||||
///
|
||||
/// When used on type that takes no type parameters, like `struct Foo`, does
|
||||
/// nothing but yields no errors.
|
||||
#[proc_macro_derive(Iterator)]
|
||||
pub fn derive_iterator
|
||||
(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||
derive_iterator::derive(input,IsMut::Immutable)
|
||||
}
|
||||
|
||||
/// Same as `derive(Iterator)` but generates mutable iterator.
|
||||
///
|
||||
/// It is separate, as some types allow deriving immutable iterator but ont the
|
||||
/// mutable one.
|
||||
#[proc_macro_derive(IteratorMut)]
|
||||
pub fn derive_iterator_mut
|
||||
(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||
derive_iterator::derive(input,IsMut::Mutable)
|
||||
}
|
||||
|
||||
/// Derives `CloneRef` implementation for given type. It performs `clone_ref` on every member
|
||||
/// field. The input type must implement `Clone` and its every field must implement `CloneRef`.
|
||||
///
|
||||
/// For generic types no bounds are introduced in the generated implementation. To customize this
|
||||
/// behavior user might add `#[clone_ref(bound="…")]` attribute. Then the generated implementation
|
||||
/// will use the provided bounds.
|
||||
///
|
||||
/// Moreover, for a given struct `X` this macro generates also `impl From<&X> for X` which uses
|
||||
/// `CloneRef` under the hood. The semantics of `CloneRef` makes each object to naturally provide
|
||||
/// transformation from reference to an owned type.
|
||||
///
|
||||
/// Supported inputs are structs (unit, named, unnamed), enums (with unit, named, unnamed and no
|
||||
/// variants at all). Unions are currently not supported.
|
||||
#[proc_macro_derive(CloneRef, attributes(clone_ref))]
|
||||
pub fn derive_clone_ref
|
||||
(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||
derive_clone_ref::derive(input)
|
||||
}
|
||||
|
||||
#[allow(missing_docs)]
|
||||
#[proc_macro_attribute]
|
||||
pub fn overlappable
|
||||
( attrs : proc_macro::TokenStream
|
||||
, input : proc_macro::TokenStream
|
||||
) -> proc_macro::TokenStream {
|
||||
overlappable::overlappable(attrs,input)
|
||||
}
|
@ -1,50 +0,0 @@
|
||||
use crate::prelude::*;
|
||||
|
||||
use proc_macro2::Ident;
|
||||
|
||||
pub fn overlappable
|
||||
( attrs : proc_macro::TokenStream
|
||||
, input : proc_macro::TokenStream
|
||||
) -> proc_macro::TokenStream {
|
||||
let _attrs: TokenStream = attrs.into();
|
||||
let decl = syn::parse_macro_input!(input as syn::ItemImpl);
|
||||
// let mut path = decl.trait_.unwrap().1.clone();
|
||||
// let path = path.segments.last_mut().iter().map(|ident| {
|
||||
// Ident::new(&format!("MarketCtx_{}", repr(ident)) , Span::call_site());
|
||||
// });
|
||||
|
||||
let mut marker_ctx_impl = decl;
|
||||
let mut trait_ = marker_ctx_impl.trait_.as_mut();
|
||||
trait_.iter_mut().for_each(|t| {
|
||||
let path = &mut t.1;
|
||||
path.segments.last_mut().iter_mut().for_each(|s| {
|
||||
let rr = repr(&s);
|
||||
s.ident = Ident::new(&format!("MarketCtx_{}", rr) , Span::call_site());
|
||||
});
|
||||
});
|
||||
|
||||
// let mut marker_ctx_impl = decl.clone();
|
||||
// let path = &mut marker_ctx_impl.trait_.as_mut().unwrap().1;
|
||||
// path.segments.last_mut().iter_mut().for_each(|s| {
|
||||
// let rr = repr(&s);
|
||||
// s.ident = Ident::new(&format!("MarketCtx_{}", rr) , Span::call_site());
|
||||
// });
|
||||
|
||||
// let name = repr(path);
|
||||
|
||||
// let marker_ctx_impl = syn::ItemImpl {
|
||||
// .. decl
|
||||
// };
|
||||
|
||||
|
||||
let _output_tmp = quote! {
|
||||
#marker_ctx_impl
|
||||
};
|
||||
let output = quote! {
|
||||
|
||||
};
|
||||
// println!("------------------");
|
||||
// println!("{}", output_tmp);
|
||||
output.into()
|
||||
}
|
||||
|
@ -22,10 +22,10 @@ test = true
|
||||
bench = true
|
||||
|
||||
[dependencies]
|
||||
enso-logger = { path = "../enso-logger", version = "0.1.0" }
|
||||
enso-prelude = { path = "../enso-prelude", version = "0.1.0" }
|
||||
lazy-reader = { path = "../lazy-reader", version = "0.1.0" }
|
||||
enso-macro-utils = { path = "../enso-macro-utils", version = "0.1.0" }
|
||||
enso-logger = { version = "0.1.1" }
|
||||
enso-prelude = { version = "0.1.3" }
|
||||
enso-lazy-reader = { version = "0.1.0" }
|
||||
enso-macro-utils = { version = "0.1.1" }
|
||||
|
||||
itertools = "0.8"
|
||||
proc-macro2 = "1.0.19"
|
||||
|
@ -4,6 +4,8 @@ use crate::automata::alphabet;
|
||||
use crate::automata::state;
|
||||
use crate::data::matrix::Matrix;
|
||||
|
||||
|
||||
|
||||
// =====================================
|
||||
// === Deterministic Finite Automata ===
|
||||
// =====================================
|
||||
|
@ -222,6 +222,8 @@ impl From<&NFA> for DFA {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ===========
|
||||
// == Tests ==
|
||||
// ===========
|
||||
|
@ -1,25 +0,0 @@
|
||||
[package]
|
||||
name = "lazy-reader"
|
||||
version = "0.1.0"
|
||||
authors = ["Enso Team <enso-dev@enso.org>"]
|
||||
edition = "2018"
|
||||
|
||||
description = "An efficient buffered reader."
|
||||
readme = "README.md"
|
||||
homepage = "https://github.com/enso-org/enso/lib/rust/lazy-reader"
|
||||
repository = "https://github.com/enso-org/enso"
|
||||
license-file = "../../../LICENSE"
|
||||
|
||||
keywords = ["read", "UTF"]
|
||||
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
name = "lazy_reader"
|
||||
crate-type = ["cdylib", "rlib"]
|
||||
test = true
|
||||
bench = true
|
||||
|
||||
[dependencies]
|
||||
itertools = "0.8"
|
||||
enso-prelude = { path = "../enso-prelude", version = "0.1.0" }
|
@ -1,196 +0,0 @@
|
||||
#![allow(unsafe_code)]
|
||||
|
||||
//! This module exports various UTF decoders for decoding UTF32 characters.
|
||||
|
||||
use std::fmt::Debug;
|
||||
|
||||
|
||||
|
||||
// ===============
|
||||
// === Decoder ===
|
||||
// ===============
|
||||
|
||||
/// The error for an invalid character.
|
||||
#[derive(Debug,Clone,Copy)]
|
||||
pub struct InvalidChar();
|
||||
|
||||
/// Trait for decoding UTF32 characters.
|
||||
pub trait Decoder {
|
||||
/// The input of the decoder.
|
||||
type Word : Default + Copy + Debug;
|
||||
/// The maximum amount of words needed to decode one symbol.
|
||||
const MAX_CODEPOINT_LEN: usize;
|
||||
|
||||
/// Decodes the first symbol from the slice and returns it with its length (in words).
|
||||
///
|
||||
/// This function can panic if `words.len() < MAX_CODEPOINT_LEN`.
|
||||
fn decode(words:&[Self::Word]) -> Char<InvalidChar>;
|
||||
}
|
||||
|
||||
|
||||
// === Char ===
|
||||
|
||||
/// The result of `decoder.decode`.
|
||||
#[derive(Debug,Clone,Copy,PartialEq)]
|
||||
pub struct Char<Error> {
|
||||
/// The decoded character.
|
||||
pub char: Result<char,Error>,
|
||||
/// The number of words read.
|
||||
pub size: usize,
|
||||
}
|
||||
|
||||
impl Char<crate::Error> {
|
||||
/// Check if the character represents the end of file.
|
||||
pub fn is_eof(&self) -> bool {
|
||||
match self.char {
|
||||
Ok(_) => false,
|
||||
Err(crate::Error::EOF) => true,
|
||||
Err(_) => false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// =====================
|
||||
// === UTF-8 Decoder ===
|
||||
// =====================
|
||||
|
||||
/// Decoder for UTF-8.
|
||||
///
|
||||
/// For more info on UTF-8 and the algorithm used see [UTF-8](https://en.wikipedia.org/wiki/UTF-8).
|
||||
#[derive(Debug,Copy,Clone)]
|
||||
pub struct DecoderUTF8();
|
||||
|
||||
|
||||
// === Trait Impls ===
|
||||
|
||||
impl Decoder for DecoderUTF8 {
|
||||
type Word = u8;
|
||||
|
||||
const MAX_CODEPOINT_LEN: usize = 4;
|
||||
|
||||
fn decode(words: &[u8]) -> Char<InvalidChar> {
|
||||
let size = match !words[0] >> 4 {
|
||||
0 => 4,
|
||||
1 => 3,
|
||||
2 | 3 => 2,
|
||||
_ => 1,
|
||||
};
|
||||
|
||||
let mut char = (words[0] << size >> size) as u32;
|
||||
for word in &words[1..size] {
|
||||
char = char << 6 | (word & 0b_0011_1111) as u32;
|
||||
}
|
||||
|
||||
Char{char:std::char::from_u32(char).ok_or_else(InvalidChar),size}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ======================
|
||||
// === UTF-16 Decoder ===
|
||||
// ======================
|
||||
|
||||
/// Decoder for UTF-16.
|
||||
///
|
||||
/// For more info on UTF-16 and the algorithm used see [UTF-16](https://en.wikipedia.org/wiki/UTF-16).
|
||||
#[derive(Debug,Copy,Clone)]
|
||||
pub struct DecoderUTF16();
|
||||
|
||||
|
||||
// === Trait Impls ===
|
||||
|
||||
impl Decoder for DecoderUTF16 {
|
||||
type Word = u16;
|
||||
|
||||
const MAX_CODEPOINT_LEN: usize = 2;
|
||||
|
||||
fn decode(words: &[u16]) -> Char<InvalidChar> {
|
||||
if words[0] < 0xD800 || 0xDFFF < words[0] {
|
||||
let char = Ok(unsafe{std::char::from_u32_unchecked(words[0] as u32)});
|
||||
return Char{char,size:1};
|
||||
}
|
||||
let char = (((words[0] - 0xD800) as u32) << 10 | (words[1] - 0xDC00) as u32) + 0x1_0000;
|
||||
|
||||
Char{char:std::char::from_u32(char).ok_or_else(InvalidChar), size:2}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ======================
|
||||
// === UTF-32 Decoder ===
|
||||
// ======================
|
||||
|
||||
/// Trivial decoder for UTF-32 (`char`).
|
||||
#[derive(Debug,Copy,Clone)]
|
||||
pub struct DecoderUTF32();
|
||||
|
||||
|
||||
// === Trait Impls ===
|
||||
|
||||
impl Decoder for DecoderUTF32 {
|
||||
type Word = char;
|
||||
|
||||
const MAX_CODEPOINT_LEN: usize = 1;
|
||||
|
||||
fn decode(words: &[char]) -> Char<InvalidChar> {
|
||||
Char{char:Ok(words[0]), size:1}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// =============
|
||||
// === Tests ===
|
||||
// =============
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use itertools::Itertools;
|
||||
|
||||
|
||||
|
||||
#[test]
|
||||
fn test_utf8() {
|
||||
let string = "a.b^c! #𤭢界んにち𤭢#𤭢";
|
||||
let mut buf = string.as_bytes();
|
||||
let mut str = String::from("");
|
||||
while !buf.is_empty() {
|
||||
let char = DecoderUTF8::decode(buf);
|
||||
str.push(char.char.unwrap());
|
||||
buf = &buf[char.size..];
|
||||
}
|
||||
assert_eq!(str, string);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_utf16() {
|
||||
let string = "a.b^c! #𤭢界んにち𤭢#𤭢";
|
||||
let buffer = string.encode_utf16().collect_vec();
|
||||
let mut buf = &buffer[..];
|
||||
let mut str = String::from("");
|
||||
while !buf.is_empty() {
|
||||
let char = DecoderUTF16::decode(buf);
|
||||
str.push(char.char.unwrap());
|
||||
buf = &buf[char.size..];
|
||||
}
|
||||
assert_eq!(str, string);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_utf32() {
|
||||
let string = "a.b^c! #𤭢界んにち𤭢#𤭢".chars().collect_vec();
|
||||
let mut buf = &string[..];
|
||||
let mut str = vec![];
|
||||
while !buf.is_empty() {
|
||||
let char = DecoderUTF32::decode(buf);
|
||||
str.push(char.char.unwrap());
|
||||
buf = &buf[char.size..];
|
||||
}
|
||||
assert_eq!(str, string);
|
||||
}
|
||||
}
|
@ -1,568 +0,0 @@
|
||||
#![feature(test)]
|
||||
#![deny(unconditional_recursion)]
|
||||
#![warn(missing_copy_implementations)]
|
||||
#![warn(missing_debug_implementations)]
|
||||
#![warn(missing_docs)]
|
||||
#![warn(trivial_casts)]
|
||||
#![warn(trivial_numeric_casts)]
|
||||
#![warn(unsafe_code)]
|
||||
#![warn(unused_import_braces)]
|
||||
|
||||
//! This module exports a reader that is able to process large textual inputs in constant memory.
|
||||
|
||||
pub mod decoder;
|
||||
|
||||
use enso_prelude::*;
|
||||
|
||||
use crate::decoder::Char;
|
||||
use crate::decoder::InvalidChar;
|
||||
use decoder::Decoder;
|
||||
|
||||
|
||||
|
||||
// ============
|
||||
// === Read ===
|
||||
// ============
|
||||
|
||||
/// Trait for reading input data into a buffer.
|
||||
///
|
||||
/// Compared to `std::io::Read` this reader supports multiple input encodings.
|
||||
pub trait Read {
|
||||
/// The type of the data in the buffer.
|
||||
type Item;
|
||||
|
||||
/// Fills the buffer and returns amount of elements read.
|
||||
///
|
||||
/// In case it isn't possible to fill the whole buffer (i.e. if an error like EOF is
|
||||
/// encountered), the buffer will be filled with all the data read before encountering such an
|
||||
/// error.
|
||||
fn read(&mut self,buffer:&mut [Self::Item]) -> usize;
|
||||
}
|
||||
|
||||
|
||||
// === Trait Impls ===
|
||||
|
||||
impl<R:std::io::Read> Read for R {
|
||||
type Item = u8;
|
||||
|
||||
fn read(&mut self,mut buffer:&mut [u8]) -> usize {
|
||||
let length = buffer.len();
|
||||
while !buffer.is_empty() {
|
||||
match self.read(buffer) {
|
||||
Err(_) => break,
|
||||
Ok (0) => break,
|
||||
Ok (n) => {
|
||||
buffer = &mut buffer[n..];
|
||||
}
|
||||
}
|
||||
}
|
||||
length - buffer.len()
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// =============
|
||||
// === Error ===
|
||||
// =============
|
||||
|
||||
/// Set of errors returned by lazy reader.
|
||||
#[derive(Debug,Clone,Copy,PartialEq,Eq)]
|
||||
pub enum Error {
|
||||
/// End Of Input.
|
||||
EOF,
|
||||
/// Couldn't decode character.
|
||||
InvalidChar,
|
||||
/// The lexer has found no matching rule in the current state.
|
||||
EndOfGroup,
|
||||
}
|
||||
|
||||
impl Error {
|
||||
/// The `u32` value that corresponds to EOF.
|
||||
pub const END_OF_FILE:u32 = u32::max_value();
|
||||
/// The `u32` value that corresponds to an invalid unicode character.
|
||||
pub const INVALID_CHAR:u32 = 0xFFFF;
|
||||
/// The `u32` value corresponding to the end of group.
|
||||
pub const END_OF_GROUP:u32 = u32::max_value() - 1;
|
||||
}
|
||||
|
||||
|
||||
// === Trait Impls ===
|
||||
|
||||
impl From<decoder::Char<decoder::InvalidChar>> for decoder::Char<Error> {
|
||||
fn from(char:Char<InvalidChar>) -> Self {
|
||||
let size = char.size;
|
||||
let char = match char.char {
|
||||
Ok(char) => Ok(char),
|
||||
Err(_) => Err(Error::InvalidChar),
|
||||
};
|
||||
decoder::Char{char,size}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<decoder::Char<Error>> for u32 {
|
||||
fn from(char:decoder::Char<Error>) -> Self {
|
||||
match char.char {
|
||||
Ok (char) => char as u32,
|
||||
Err(Error::EOF) => Error::END_OF_FILE,
|
||||
Err(Error::InvalidChar) => Error::INVALID_CHAR,
|
||||
Err(Error::EndOfGroup) => Error::END_OF_GROUP,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ==================
|
||||
// === BookmarkId ===
|
||||
// ==================
|
||||
|
||||
/// Strongly typed identifier of `Bookmark`
|
||||
#[derive(Debug,Clone,Copy,PartialEq)]
|
||||
pub struct BookmarkId {
|
||||
#[allow(missing_docs)]
|
||||
id: usize
|
||||
}
|
||||
|
||||
impl BookmarkId {
|
||||
/// Creates a new bookmark handle using the specified identifier.
|
||||
pub fn new(id:usize) -> BookmarkId {
|
||||
BookmarkId{id}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ==================
|
||||
// === LazyReader ===
|
||||
// ==================
|
||||
|
||||
/// The behaviour needed by the lazy reader interface.
|
||||
pub trait LazyReader {
|
||||
/// Read the next character from input.
|
||||
fn next_char(&mut self, bookmarks:&mut BookmarkManager) -> Result<char,Error>;
|
||||
/// Advance along the input without returning the character.
|
||||
fn advance_char(&mut self, bookmarks:&mut BookmarkManager);
|
||||
/// Get the current character from the reader.
|
||||
fn character(&self) -> decoder::Char<Error>;
|
||||
/// Check if the reader has finished reading.
|
||||
///
|
||||
/// A reader is finished when it has no further input left to read, and when it does not need to
|
||||
/// rewind to any point.
|
||||
fn finished(&self, bookmarks:&BookmarkManager) -> bool;
|
||||
/// Check if the reader is empty.
|
||||
fn empty(&self) -> bool;
|
||||
/// Fill the buffer with words from the input.
|
||||
fn fill(&mut self, bookmarks:&mut BookmarkManager);
|
||||
/// Get the maximum possible rewind for the reader.
|
||||
fn max_possible_rewind_len(&self, bookmarks:&BookmarkManager) -> usize;
|
||||
/// Append the provided character to the reader's result.
|
||||
fn append_result(&mut self, char:char);
|
||||
/// Return `self.result` and sets the internal result to empty.
|
||||
fn pop_result(&mut self) -> String;
|
||||
/// Get the reader's current offset in the buffer.
|
||||
fn offset(&self) -> usize;
|
||||
/// Get an immutable reference to the reader's result.
|
||||
fn result(&self) -> &String;
|
||||
/// Get a mutable reference to the reader's result.
|
||||
fn result_mut(&mut self) -> &mut String;
|
||||
/// Get the current length of the reader's buffer.
|
||||
fn buffer_len(&self) -> usize;
|
||||
/// Set the buffer offset to the specified value.
|
||||
fn set_offset(&mut self, off:usize);
|
||||
/// Truncate the current match to the provided length.
|
||||
fn truncate_match(&mut self, len:usize);
|
||||
}
|
||||
|
||||
/// The default size of the buffer.
|
||||
pub const BUFFER_SIZE: usize = 32768;
|
||||
|
||||
|
||||
|
||||
// ==============
|
||||
// === Reader ===
|
||||
// ==============
|
||||
|
||||
/// A buffered reader able to efficiently read big inputs in constant memory.
|
||||
///
|
||||
/// It supports various encodings via `Decoder` and also bookmarks which allow it to return
|
||||
/// back to a character at specific offset.
|
||||
#[derive(Debug,Clone,PartialEq)]
|
||||
pub struct Reader<D:Decoder,Read> {
|
||||
/// The reader that holds the input.
|
||||
pub reader: Read,
|
||||
/// The buffer that stores the input data.
|
||||
pub buffer: Vec<D::Word>,
|
||||
/// The string representation of data that has been read.
|
||||
pub result: String,
|
||||
/// The buffer offset of the current element read.
|
||||
pub offset: usize,
|
||||
/// The number of elements stored in buffer.
|
||||
pub length: usize,
|
||||
/// The last character read.
|
||||
pub character: decoder::Char<Error>,
|
||||
}
|
||||
|
||||
impl<D:Decoder,R:Read<Item=D::Word>> Reader<D,R> {
|
||||
/// Creates a new instance of the reader.
|
||||
pub fn new(reader:R, _decoder:D) -> Self {
|
||||
let mut reader = Reader::<D,R> {
|
||||
reader,
|
||||
buffer : vec![D::Word::default(); BUFFER_SIZE],
|
||||
result : String::from(""),
|
||||
offset : 0,
|
||||
length : 0,
|
||||
character : decoder::Char{char:Err(Error::EOF), size:0},
|
||||
};
|
||||
reader.length = reader.reader.read(&mut reader.buffer[..]);
|
||||
reader
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// === Trait Impls ===
|
||||
|
||||
impl<D:Decoder, R:Read<Item=D::Word>> LazyReader for Reader<D,R> {
|
||||
fn next_char(&mut self, bookmarks:&mut BookmarkManager) -> Result<char,Error> {
|
||||
if self.empty() { self.character.char = Err(Error::EOF); return Err(Error::EOF) }
|
||||
|
||||
if self.offset >= self.buffer.len() - D::MAX_CODEPOINT_LEN {
|
||||
self.fill(bookmarks);
|
||||
}
|
||||
|
||||
self.character = D::decode(&self.buffer[self.offset..]).into();
|
||||
self.offset += self.character.size;
|
||||
|
||||
self.character.char
|
||||
}
|
||||
|
||||
fn advance_char(&mut self, bookmarks:&mut BookmarkManager) {
|
||||
let _ = self.next_char(bookmarks);
|
||||
}
|
||||
|
||||
fn character(&self) -> Char<Error> {
|
||||
self.character
|
||||
}
|
||||
|
||||
fn finished(&self, _bookmarks:&BookmarkManager) -> bool {
|
||||
let rewinded = self.max_possible_rewind_len(_bookmarks) != 0;
|
||||
self.empty() && rewinded
|
||||
}
|
||||
|
||||
fn empty(&self) -> bool {
|
||||
self.length < self.buffer.len() && self.length <= self.offset
|
||||
}
|
||||
|
||||
fn fill(&mut self, bookmarks:&mut BookmarkManager) {
|
||||
let len = self.buffer.len();
|
||||
let words = len - self.offset;
|
||||
self.offset = self.max_possible_rewind_len(bookmarks);
|
||||
if self.offset == len {
|
||||
panic!("Rewind won't be possible. Buffer is too small.")
|
||||
}
|
||||
|
||||
bookmarks.decrease_bookmark_offsets(len - self.offset);
|
||||
for i in 1..=self.offset {
|
||||
self.buffer[self.offset - i] = self.buffer[len - i];
|
||||
}
|
||||
self.length = self.offset + self.reader.read(&mut self.buffer[self.offset..]);
|
||||
self.offset -= words;
|
||||
}
|
||||
|
||||
fn max_possible_rewind_len(&self, bookmarks:&BookmarkManager) -> usize {
|
||||
if let Some(offset) = bookmarks.min_offset() {
|
||||
return self.buffer_len() - offset
|
||||
}
|
||||
D::MAX_CODEPOINT_LEN
|
||||
}
|
||||
|
||||
fn append_result(&mut self,char:char) {
|
||||
self.result.push(char);
|
||||
}
|
||||
|
||||
fn pop_result(&mut self) -> String {
|
||||
let str = self.result.clone();
|
||||
self.result.truncate(0);
|
||||
str
|
||||
}
|
||||
|
||||
fn offset(&self) -> usize {
|
||||
self.offset
|
||||
}
|
||||
|
||||
fn result(&self) -> &String {
|
||||
&self.result
|
||||
}
|
||||
|
||||
fn result_mut(&mut self) -> &mut String {
|
||||
&mut self.result
|
||||
}
|
||||
|
||||
fn buffer_len(&self) -> usize {
|
||||
self.buffer.len()
|
||||
}
|
||||
|
||||
fn set_offset(&mut self, off: usize) {
|
||||
self.offset = off;
|
||||
}
|
||||
|
||||
fn truncate_match(&mut self, len: usize) {
|
||||
self.result.truncate(len);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// ================
|
||||
// === Bookmark ===
|
||||
// ================
|
||||
|
||||
/// Bookmarks a specific character in buffer, so that `LazyReader` can return to it when needed.
|
||||
#[derive(Debug,Clone,Copy,Default,PartialEq)]
|
||||
pub struct Bookmark {
|
||||
/// The position of the bookmarked character in the `reader.buffer`.
|
||||
offset: usize,
|
||||
/// The length of `reader.result` up to the bookmarked character.
|
||||
length: usize,
|
||||
/// Whether or not the bookmark has been set by the user.
|
||||
set:bool
|
||||
}
|
||||
|
||||
|
||||
|
||||
// =======================
|
||||
// === BookmarkManager ===
|
||||
// =======================
|
||||
|
||||
/// Contains and manages bookmarks for a running lexer.
|
||||
///
|
||||
/// Some of its operations operate on a specific [`Reader`]. It is undefined behaviour to not pass
|
||||
/// the same reader to all calls for a given bookmark manager.
|
||||
#[allow(missing_docs)]
|
||||
#[derive(Clone,Debug,PartialEq)]
|
||||
pub struct BookmarkManager {
|
||||
bookmarks: Vec<Bookmark>,
|
||||
/// The bookmark used by the flexer to mark the end of the last matched segment of the input.
|
||||
pub matched_bookmark: BookmarkId,
|
||||
/// A bookmark used by the flexer to deal with overlapping rules that may fail later.
|
||||
pub rule_bookmark: BookmarkId,
|
||||
}
|
||||
|
||||
#[allow(missing_docs)]
|
||||
impl BookmarkManager {
|
||||
/// Create a new bookmark manager, with no associated bookmarks.
|
||||
pub fn new() -> BookmarkManager {
|
||||
let mut bookmarks = Vec::new();
|
||||
let matched_bookmark = BookmarkManager::make_bookmark(&mut bookmarks);
|
||||
let rule_bookmark = BookmarkManager::make_bookmark(&mut bookmarks);
|
||||
BookmarkManager {bookmarks,matched_bookmark,rule_bookmark}
|
||||
}
|
||||
|
||||
/// Create a new bookmark in the manager, returning a handle to it.
|
||||
fn make_bookmark(bookmarks:&mut Vec<Bookmark>) -> BookmarkId {
|
||||
bookmarks.push(Bookmark::default());
|
||||
BookmarkId::new(bookmarks.len() - 1)
|
||||
}
|
||||
|
||||
/// Add a bookmark to the manager, returning a handle to that bookmark.
|
||||
pub fn add_bookmark(&mut self) -> BookmarkId {
|
||||
BookmarkManager::make_bookmark(&mut self.bookmarks)
|
||||
}
|
||||
|
||||
/// Bookmarks the current position in `reader` using `bookmark`.
|
||||
pub fn bookmark<R:LazyReader>(&mut self, bookmark:BookmarkId, reader:&mut R) {
|
||||
self.bookmarks[bookmark.id].offset = reader.offset() - reader.character().size;
|
||||
self.bookmarks[bookmark.id].length = reader.result().len();
|
||||
self.bookmarks[bookmark.id].set = true
|
||||
}
|
||||
|
||||
/// Unsets a bookmark.
|
||||
pub fn unset<R:LazyReader>(&mut self, bookmark:BookmarkId) {
|
||||
self.bookmarks[bookmark.id].offset = 0;
|
||||
self.bookmarks[bookmark.id].length = 0;
|
||||
self.bookmarks[bookmark.id].set = false
|
||||
}
|
||||
|
||||
/// Decrease the offset for all bookmarks by the specified `amount` in preparation for
|
||||
/// truncating the reader's buffer.
|
||||
pub fn decrease_bookmark_offsets(&mut self, amount:usize) {
|
||||
for bookmark in self.bookmarks.iter_mut() {
|
||||
if bookmark.set {
|
||||
bookmark.offset -= amount
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Rewind the reader to the position marked by `bookmark`.
|
||||
pub fn rewind<R:LazyReader>(&mut self, bookmark:BookmarkId, reader:&mut R) {
|
||||
let bookmark = self.bookmarks.get(bookmark.id).expect("Bookmark must exist.");
|
||||
reader.set_offset(bookmark.offset);
|
||||
reader.truncate_match(bookmark.length);
|
||||
reader.advance_char(self);
|
||||
}
|
||||
|
||||
/// Obtains the minimum offset from the start of the buffer for any bookmark.
|
||||
pub fn min_offset(&self) -> Option<usize> {
|
||||
self.bookmarks.iter().filter_map(|b| b.set.and_option(Some(b.offset))).min()
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// === Trait Impls ===
|
||||
|
||||
impl Default for BookmarkManager {
|
||||
fn default() -> Self {
|
||||
BookmarkManager::new()
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// =============
|
||||
// === Tests ===
|
||||
// =============
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
extern crate test;
|
||||
|
||||
use super::*;
|
||||
use decoder::*;
|
||||
|
||||
use test::Bencher;
|
||||
|
||||
// ================
|
||||
// === Repeater ===
|
||||
// ================
|
||||
|
||||
/// Struct that holds state of `Reader` that repeats an input n times.
|
||||
#[derive(Debug, Clone)]
|
||||
struct Repeat<T> {
|
||||
/// The input to be repeated.
|
||||
buffer: Vec<T>,
|
||||
/// The current offset of element currently read from buffer.
|
||||
offset: usize,
|
||||
/// How many more times the input should be repeated.
|
||||
repeat: usize,
|
||||
}
|
||||
|
||||
/// Creates a reader that repeats an input n times.
|
||||
fn repeat<T:Copy>(input:Vec<T>, repeat:usize) -> impl Read<Item=T> {
|
||||
Repeat { buffer:input, repeat, offset: 0 }
|
||||
}
|
||||
|
||||
|
||||
// === Trait Impls ===
|
||||
|
||||
impl<T:Copy> Read for Repeat<T> {
|
||||
type Item = T;
|
||||
|
||||
fn read(&mut self, mut buffer:&mut [Self::Item]) -> usize {
|
||||
if self.repeat == 0 { return 0 }
|
||||
|
||||
let len = self.buffer.len();
|
||||
let read = buffer.len();
|
||||
|
||||
if read < len - self.offset {
|
||||
buffer.copy_from_slice(&self.buffer[self.offset..self.offset + read]);
|
||||
self.offset += read;
|
||||
return read
|
||||
}
|
||||
|
||||
buffer[..len - self.offset].copy_from_slice(&self.buffer[self.offset..]);
|
||||
buffer = &mut buffer[len - self.offset..];
|
||||
|
||||
let repeat = std::cmp::min(buffer.len() / len, self.repeat - 1);
|
||||
self.repeat = self.repeat - repeat - 1;
|
||||
for _ in 0..repeat {
|
||||
buffer[..len].copy_from_slice(&self.buffer[..]);
|
||||
buffer = &mut buffer[len..];
|
||||
}
|
||||
|
||||
if self.repeat == 0 {
|
||||
return len - self.offset + repeat * len
|
||||
}
|
||||
buffer.copy_from_slice(&self.buffer[..buffer.len()]);
|
||||
self.offset = buffer.len();
|
||||
read
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// =============
|
||||
// === Utils ===
|
||||
// =============
|
||||
|
||||
/// Constructs an _empty_ bookmark manager for testing purposes.
|
||||
pub fn bookmark_manager() -> BookmarkManager {
|
||||
BookmarkManager::new()
|
||||
}
|
||||
|
||||
|
||||
|
||||
// =============
|
||||
// === Tests ===
|
||||
// =============
|
||||
|
||||
#[test]
|
||||
fn test_repeater_with_small_buffer() {
|
||||
let mut repeater = repeat(vec![1, 2, 3], 1);
|
||||
let mut buffer = [0; 2];
|
||||
assert_eq!(repeater.read(&mut buffer), 2);
|
||||
assert_eq!(&buffer, &[1, 2]);
|
||||
assert_eq!(repeater.read(&mut buffer), 1);
|
||||
assert_eq!(&buffer, &[3, 2])
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_repeater_with_big_buffer() {
|
||||
let mut repeater = repeat(vec![1, 2], 3);
|
||||
let mut buffer = [0; 5];
|
||||
assert_eq!(repeater.read(&mut buffer), 5);
|
||||
assert_eq!(&buffer, &[1, 2, 1, 2, 1]);
|
||||
assert_eq!(repeater.read(&mut buffer), 1);
|
||||
assert_eq!(&buffer, &[2, 2, 1, 2, 1])
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_reader_small_input() {
|
||||
let mut mgr = bookmark_manager();
|
||||
let str = "a.b^c! #𤭢界んにち𤭢#𤭢";
|
||||
let mut reader = Reader::new(str.as_bytes(), DecoderUTF8());
|
||||
let mut result = String::from("");
|
||||
while let Ok(char) = reader.next_char(&mut mgr) {
|
||||
result.push(char);
|
||||
}
|
||||
assert_eq!(&result, str);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_reader_big_input() {
|
||||
let mut mgr = bookmark_manager();
|
||||
let str = "a.b^c! #𤭢界んにち𤭢#𤭢".repeat(10_000);
|
||||
let mut reader = Reader::new(str.as_bytes(), DecoderUTF8());
|
||||
let mut result = String::from("");
|
||||
while let Ok(char) = reader.next_char(&mut mgr) {
|
||||
mgr.bookmark(mgr.matched_bookmark,&mut reader);
|
||||
result.push(char);
|
||||
}
|
||||
assert_eq!(&result, &str);
|
||||
assert_eq!(reader.buffer.len(), BUFFER_SIZE);
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn bench_reader(bencher:&mut Bencher) {
|
||||
let run = || {
|
||||
let mut mgr = bookmark_manager();
|
||||
let str = repeat("Hello, World!".as_bytes().to_vec(), 10_000_000);
|
||||
let mut reader = Reader::new(str, DecoderUTF8());
|
||||
let mut count = 0;
|
||||
while reader.next_char(&mut mgr) != Err(Error::EOF) {
|
||||
count += 1;
|
||||
}
|
||||
count
|
||||
};
|
||||
bencher.iter(run);
|
||||
}
|
||||
}
|
@ -13,6 +13,6 @@ bench = true
|
||||
|
||||
[dependencies]
|
||||
flexer = { path = "../../flexer", version = "0.1.0" }
|
||||
enso-prelude = { path = "../../enso-prelude", version = "0.1.0" }
|
||||
enso-prelude = { version = "0.1.3" }
|
||||
|
||||
uuid = { version = "0.8.1" , features = ["serde","v4","wasm-bindgen"] }
|
||||
|
@ -13,7 +13,7 @@ bench = true
|
||||
|
||||
[dependencies]
|
||||
flexer = { path = "../../flexer", version = "0.1.0" }
|
||||
enso-prelude = { path = "../../enso-prelude", version = "0.1.0" }
|
||||
enso-prelude = { version = "0.1.3" }
|
||||
lexer-definition = { path = "../definition", version = "0.1.0" }
|
||||
|
||||
[build-dependencies]
|
||||
|
Loading…
Reference in New Issue
Block a user