diff --git a/Cargo.lock b/Cargo.lock index 6229e1be32..7af375330d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -36,6 +36,18 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" +[[package]] +name = "aes" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e8b47f52ea9bae42228d07ec09eb676433d7c4ed1ebdf0f1d1c29ed446f1ab8" +dependencies = [ + "cfg-if 1.0.0", + "cipher", + "cpufeatures", + "opaque-debug", +] + [[package]] name = "aho-corasick" version = "0.7.18" @@ -112,6 +124,12 @@ version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "904dfeac50f3cdaba28fc6f57fdcddb75f49ed61346676a78c4ffe55877802fd" +[[package]] +name = "base64ct" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a32fd6af2b5827bce66c29053ba0e7c42b9dcab01835835058558c10851a46b" + [[package]] name = "bincode" version = "1.3.3" @@ -201,6 +219,9 @@ name = "cc" version = "1.0.72" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "22a9137b95ea06864e018375b72adfb7db6e6f68cfc8df5a04d00288050485ee" +dependencies = [ + "jobserver", +] [[package]] name = "cfg-if" @@ -223,6 +244,15 @@ dependencies = [ "envmnt", ] +[[package]] +name = "cipher" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ee52072ec15386f770805afd189a01c8841be8696bed250fa2f13c4c0d6dfb7" +dependencies = [ + "generic-array", +] + [[package]] name = "clap" version = "2.34.0" @@ -290,6 +320,12 @@ dependencies = [ "winapi 0.3.9", ] +[[package]] +name = "constant_time_eq" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "245097e9a4535ee1e3e3931fcfcd55a796a44c643e8596ff6566d68f09b87bbc" + [[package]] name = "core-foundation" version = "0.9.2" @@ -317,9 +353,9 @@ dependencies = [ [[package]] name = "crc32fast" -version = "1.3.1" +version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2209c310e29876f7f0b2721e7e26b84aff178aa3da5d091f9bfbf47669e60e3" +checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d" dependencies = [ "cfg-if 1.0.0", ] @@ -461,6 +497,7 @@ checksum = "f2fb860ca6fafa5552fb6d0e816a69c8e49f0908bf524e30a90d97c85892d506" dependencies = [ "block-buffer", "crypto-common", + "subtle", ] [[package]] @@ -798,6 +835,15 @@ dependencies = [ "libc", ] +[[package]] +name = "hmac" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" +dependencies = [ + "digest", +] + [[package]] name = "http" version = "0.2.6" @@ -974,6 +1020,15 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1aab8fc367588b89dcee83ab0fd66b72b50b72fa1904d7095045ace2b0c81c35" +[[package]] +name = "jobserver" +version = "0.1.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af25a77299a7f711a01975c35a6a424eb6862092cc2d6c72c4ed6cbc56dfc1fa" +dependencies = [ + "libc", +] + [[package]] name = "js-sys" version = "0.3.56" @@ -1095,7 +1150,7 @@ dependencies = [ "toml", "tracing", "tracing-subscriber", - "zip", + "zip 0.6.0", ] [[package]] @@ -1109,7 +1164,7 @@ dependencies = [ "toml", "tracing", "walkdir", - "zip", + "zip 0.6.0", ] [[package]] @@ -1419,6 +1474,15 @@ dependencies = [ "libc", ] +[[package]] +name = "num_threads" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aba1801fb138d8e85e11d0fc70baf4fe1cdfffda7c6cd34a854905df588e5ed0" +dependencies = [ + "libc", +] + [[package]] name = "number_prefix" version = "0.4.0" @@ -1446,6 +1510,12 @@ version = "11.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0ab1bc2a289d34bd04a330323ac98a1b4bc82c9d9fcb1e66b63caa84da26b575" +[[package]] +name = "opaque-debug" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5" + [[package]] name = "openssl" version = "0.10.38" @@ -1488,6 +1558,29 @@ dependencies = [ "memchr", ] +[[package]] +name = "password-hash" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d791538a6dcc1e7cb7fe6f6b58aca40e7f79403c45b2bc274008b5e647af1d8" +dependencies = [ + "base64ct", + "rand_core 0.6.3", + "subtle", +] + +[[package]] +name = "pbkdf2" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "271779f35b581956db91a3e55737327a03aa051e90b1c47aeb189508533adfd7" +dependencies = [ + "digest", + "hmac", + "password-hash", + "sha2", +] + [[package]] name = "percent-encoding" version = "2.1.0" @@ -1938,7 +2031,7 @@ dependencies = [ "semver 0.11.0", "serde_json", "tempfile", - "zip", + "zip 0.5.13", ] [[package]] @@ -2031,6 +2124,17 @@ dependencies = [ "yaml-rust", ] +[[package]] +name = "sha1" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c77f4e7f65455545c2153c1253d25056825e77ee2533f0e41deb65a93a34852f" +dependencies = [ + "cfg-if 1.0.0", + "cpufeatures", + "digest", +] + [[package]] name = "sha2" version = "0.10.2" @@ -2139,6 +2243,12 @@ dependencies = [ "syn", ] +[[package]] +name = "subtle" +version = "2.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601" + [[package]] name = "syn" version = "1.0.86" @@ -2263,6 +2373,24 @@ dependencies = [ "winapi 0.3.9", ] +[[package]] +name = "time" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "004cbc98f30fa233c61a38bc77e96a9106e65c88f2d3bef182ae952027e5753d" +dependencies = [ + "itoa 1.0.1", + "libc", + "num_threads", + "time-macros", +] + +[[package]] +name = "time-macros" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "25eb0ca3468fc0acc11828786797f6ef9aa1555e4a211a60d64cc8e4d1be47d6" + [[package]] name = "tinytemplate" version = "1.2.1" @@ -2697,10 +2825,56 @@ version = "0.5.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "93ab48844d61251bb3835145c521d88aa4031d7139e8485990f60ca911fa0815" dependencies = [ + "byteorder", + "crc32fast", + "thiserror", + "time 0.1.43", +] + +[[package]] +name = "zip" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6fa4aa90e99fb8d701bda16fb040d8ed2f9c7176fb44de750e880a74b580315" +dependencies = [ + "aes", "byteorder", "bzip2", + "constant_time_eq", "crc32fast", "flate2", - "thiserror", - "time", + "hmac", + "pbkdf2", + "sha1", + "time 0.3.7", + "zstd", +] + +[[package]] +name = "zstd" +version = "0.10.0+zstd.1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b1365becbe415f3f0fcd024e2f7b45bacfb5bdd055f0dc113571394114e7bdd" +dependencies = [ + "zstd-safe", +] + +[[package]] +name = "zstd-safe" +version = "4.1.4+zstd.1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f7cd17c9af1a4d6c24beb1cc54b17e2ef7b593dc92f19e9d9acad8b182bbaee" +dependencies = [ + "libc", + "zstd-sys", +] + +[[package]] +name = "zstd-sys" +version = "1.6.3+zstd.1.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc49afa5c8d634e75761feda8c592051e7eeb4683ba827211eb0d731d3402ea8" +dependencies = [ + "cc", + "libc", ] diff --git a/Cargo.toml b/Cargo.toml index 9eee984c34..9064a12f60 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -110,7 +110,7 @@ version = "0.3.9" features = [ "fmt" ] [dependencies.zip] -version = "0.5" +version = "0.6" [target."cfg(windows)".dependencies.ansi_term] version = "0.12.1" diff --git a/compiler/ast/src/chars/char_value.rs b/compiler/ast/src/chars/char_value.rs index 37857a3c5d..37b6fbd87f 100644 --- a/compiler/ast/src/chars/char_value.rs +++ b/compiler/ast/src/chars/char_value.rs @@ -48,7 +48,7 @@ impl fmt::Display for Char { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Self::Scalar(c) => write!(f, "{}", c), - Self::NonScalar(c) => write!(f, "{}", c), + Self::NonScalar(c) => write!(f, "{:X}", c), } } } diff --git a/compiler/ast/src/common/positive_number.rs b/compiler/ast/src/common/positive_number.rs index a083e45b26..b05416560b 100644 --- a/compiler/ast/src/common/positive_number.rs +++ b/compiler/ast/src/common/positive_number.rs @@ -16,21 +16,19 @@ use serde::{Deserialize, Serialize}; use std::fmt; -use tendril::StrTendril; /// A number string guaranteed to be positive by the pest grammar. #[derive(Clone, Serialize, Deserialize, Debug, PartialEq, Eq, Hash)] pub struct PositiveNumber { /// The string representation of the positive number. // FIXME(Centril): This should become an `u128`. - #[serde(with = "leo_span::tendril_json")] - pub value: StrTendril, + pub value: String, } impl PositiveNumber { /// Returns `true` if this number is zero. pub fn is_zero(&self) -> bool { - self.value.as_ref().eq("0") + self.value.eq("0") } } diff --git a/compiler/ast/src/expression/value.rs b/compiler/ast/src/expression/value.rs index ccda457d4a..566abd4321 100644 --- a/compiler/ast/src/expression/value.rs +++ b/compiler/ast/src/expression/value.rs @@ -14,8 +14,6 @@ // You should have received a copy of the GNU General Public License // along with the Leo library. If not, see . -use tendril::StrTendril; - use super::*; use crate::{Char, CharValue}; @@ -24,37 +22,21 @@ use crate::{Char, CharValue}; pub enum ValueExpression { // todo: deserialize values here /// An address literal, e.g., `aleo1qnr4dkkvkgfqph0vzc3y6z2eu975wnpz2925ntjccd5cfqxtyu8sta57j8`. - Address( - #[serde(with = "leo_span::tendril_json")] StrTendril, - #[serde(with = "leo_span::span_json")] Span, - ), + Address(String, #[serde(with = "leo_span::span_json")] Span), /// A boolean literal, either `true` or `false`. - Boolean( - #[serde(with = "leo_span::tendril_json")] StrTendril, - #[serde(with = "leo_span::span_json")] Span, - ), + Boolean(String, #[serde(with = "leo_span::span_json")] Span), /// A char literal, e.g., `'a'`, representing a single unicode code point. Char(CharValue), /// A field literal, e.g., `42field`. /// That is, a signed number followed by the keyword `field`. - Field( - #[serde(with = "leo_span::tendril_json")] StrTendril, - #[serde(with = "leo_span::span_json")] Span, - ), + Field(String, #[serde(with = "leo_span::span_json")] Span), /// A group literal, either product or affine. /// For example, `42group` or `(12, 52)group`. Group(Box), /// A negated non-integer literal, e.g., `-4.2`. - Implicit( - #[serde(with = "leo_span::tendril_json")] StrTendril, - #[serde(with = "leo_span::span_json")] Span, - ), + Implicit(String, #[serde(with = "leo_span::span_json")] Span), /// An integer literal, e.g., `42`. - Integer( - IntegerType, - #[serde(with = "leo_span::tendril_json")] StrTendril, - #[serde(with = "leo_span::span_json")] Span, - ), + Integer(IntegerType, String, #[serde(with = "leo_span::span_json")] Span), /// A string literal, e.g., `"foobar"`. String(Vec, #[serde(with = "leo_span::span_json")] Span), } diff --git a/compiler/ast/src/groups/group_coordinate.rs b/compiler/ast/src/groups/group_coordinate.rs index 8fb39aee29..54960bb2c0 100644 --- a/compiler/ast/src/groups/group_coordinate.rs +++ b/compiler/ast/src/groups/group_coordinate.rs @@ -18,16 +18,12 @@ use leo_span::Span; use serde::{Deserialize, Serialize}; use std::fmt; -use tendril::StrTendril; /// A coordinate in a affine group literal. #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] pub enum GroupCoordinate { /// A number, e.g., `42`. - Number( - #[serde(with = "leo_span::tendril_json")] StrTendril, - #[serde(with = "leo_span::span_json")] Span, - ), + Number(String, #[serde(with = "leo_span::span_json")] Span), /// A sign high recovery, i.e. `+`. SignHigh, /// A sign low recovery, i.e., `-`. diff --git a/compiler/ast/src/groups/group_value.rs b/compiler/ast/src/groups/group_value.rs index 567527fd90..78e355e49e 100644 --- a/compiler/ast/src/groups/group_value.rs +++ b/compiler/ast/src/groups/group_value.rs @@ -19,16 +19,12 @@ use leo_span::Span; use serde::{Deserialize, Serialize}; use std::fmt; -use tendril::StrTendril; /// A group literal. #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] pub enum GroupValue { /// Product group literal, e.g., `42group`. - Single( - #[serde(with = "leo_span::tendril_json")] StrTendril, - #[serde(with = "leo_span::span_json")] Span, - ), + Single(String, #[serde(with = "leo_span::span_json")] Span), /// An affine group literal with (x, y) coordinates. Tuple(GroupTuple), } diff --git a/compiler/ast/src/input/input_value.rs b/compiler/ast/src/input/input_value.rs index 9ca0a286d0..782b6dec25 100644 --- a/compiler/ast/src/input/input_value.rs +++ b/compiler/ast/src/input/input_value.rs @@ -38,22 +38,20 @@ impl TryFrom<(Type, Expression)> for InputValue { Ok(match value { (type_, Expression::Value(value)) => { match (type_, value) { - (Type::Address, ValueExpression::Address(value, _)) => Self::Address(value.to_string()), + (Type::Address, ValueExpression::Address(value, _)) => Self::Address(value), (Type::Boolean, ValueExpression::Boolean(value, span)) => { let bool_value = value.parse::().map_err(|_| ParserError::unexpected_eof(&span))?; // TODO: change error Self::Boolean(bool_value) } (Type::Char, ValueExpression::Char(value)) => Self::Char(value), (Type::Field, ValueExpression::Field(value, _) | ValueExpression::Implicit(value, _)) => { - Self::Field(value.to_string()) + Self::Field(value) } (Type::Group, ValueExpression::Group(value)) => Self::Group(*value), - (Type::IntegerType(type_), ValueExpression::Implicit(value, _)) => { - Self::Integer(type_, value.to_string()) - } + (Type::IntegerType(type_), ValueExpression::Implicit(value, _)) => Self::Integer(type_, value), (Type::IntegerType(expected), ValueExpression::Integer(actual, value, span)) => { if expected == actual { - Self::Integer(expected, value.to_string()) + Self::Integer(expected, value) } else { return Err(InputError::unexpected_type(expected.to_string(), actual, &span).into()); } diff --git a/compiler/parser/default.profraw b/compiler/parser/default.profraw new file mode 100644 index 0000000000..d9ada89c28 Binary files /dev/null and b/compiler/parser/default.profraw differ diff --git a/compiler/parser/src/parser/context.rs b/compiler/parser/src/parser/context.rs index 5cee96b54d..55abbf3ba0 100644 --- a/compiler/parser/src/parser/context.rs +++ b/compiler/parser/src/parser/context.rs @@ -22,7 +22,6 @@ use leo_errors::{LeoError, ParserError, Result}; use leo_span::{Span, Symbol}; use std::{borrow::Cow, unreachable}; -use tendril::format_tendril; /// Stores a program in tokenized format plus additional context. /// May be converted into a [`Program`] AST by parsing all tokens. @@ -176,7 +175,7 @@ impl<'a> ParserContext<'a> { span, }) => { *i -= 1; - GroupCoordinate::Number(format_tendril!("-{}", value), span.clone()) + GroupCoordinate::Number(format!("-{}", value), span.clone()) } _ => GroupCoordinate::SignLow, }, diff --git a/compiler/parser/src/parser/expression.rs b/compiler/parser/src/parser/expression.rs index 7fdb919966..58d0b4a4c2 100644 --- a/compiler/parser/src/parser/expression.rs +++ b/compiler/parser/src/parser/expression.rs @@ -19,8 +19,6 @@ use super::*; use leo_errors::{ParserError, Result}; use leo_span::sym; -use tendril::format_tendril; - const INT_TYPES: &[Token] = &[ Token::I8, Token::I16, @@ -253,17 +251,10 @@ impl ParserContext<'_> { // hack for const signed integer overflow issues if matches!(operation, UnaryOperation::Negate) { if let Expression::Value(ValueExpression::Integer(type_, value, span)) = inner { - inner = Expression::Value(ValueExpression::Integer( - type_, - format_tendril!("-{}", value), - &op.span + &span, - )); + inner = Expression::Value(ValueExpression::Integer(type_, format!("-{}", value), &op.span + &span)); continue; } else if let Expression::Value(ValueExpression::Implicit(value, span)) = inner { - inner = Expression::Value(ValueExpression::Implicit( - format_tendril!("-{}", value), - &op.span + &span, - )); + inner = Expression::Value(ValueExpression::Implicit(format!("-{}", value), &op.span + &span)); continue; } } diff --git a/compiler/parser/src/parser/mod.rs b/compiler/parser/src/parser/mod.rs index adcd32e72f..89020f61b6 100644 --- a/compiler/parser/src/parser/mod.rs +++ b/compiler/parser/src/parser/mod.rs @@ -51,14 +51,14 @@ pub(crate) fn assert_no_whitespace(left_span: &Span, right_span: &Span, left: &s /// Creates a new program from a given file path and source code text. pub fn parse(handler: &Handler, path: &str, source: &str) -> Result { - let mut tokens = ParserContext::new(handler, crate::tokenize(path, source.into())?); + let mut tokens = ParserContext::new(handler, crate::tokenize(path, source)?); tokens.parse_program() } /// Parses an input file at the given file `path` and `source` code text. pub fn parse_input(handler: &Handler, path: &str, source: &str) -> Result { - let mut tokens = ParserContext::new(handler, crate::tokenize(path, source.into())?); + let mut tokens = ParserContext::new(handler, crate::tokenize(path, source)?); tokens.parse_input() } diff --git a/compiler/parser/src/test.rs b/compiler/parser/src/test.rs index 07d6100094..12bdf89074 100644 --- a/compiler/parser/src/test.rs +++ b/compiler/parser/src/test.rs @@ -35,7 +35,7 @@ impl Namespace for TokenNamespace { fn run_test(&self, test: Test) -> Result { create_session_if_not_set_then(|_| { - tokenizer::tokenize("test", test.content.into()) + tokenizer::tokenize("test", &test.content) .map(|tokens| { Value::String( tokens @@ -80,7 +80,7 @@ fn implicit_value_expr() -> Expression { } fn tokenize(test: Test) -> Result, String> { - tokenizer::tokenize("test", test.content.into()).map_err(|x| x.to_string()) + tokenizer::tokenize("test", &test.content).map_err(|x| x.to_string()) } fn all_are_comments(tokens: &[SpannedToken]) -> bool { diff --git a/compiler/parser/src/tokenizer/lexer.rs b/compiler/parser/src/tokenizer/lexer.rs index 7df2a7f0dd..233ea4d0a0 100644 --- a/compiler/parser/src/tokenizer/lexer.rs +++ b/compiler/parser/src/tokenizer/lexer.rs @@ -19,454 +19,395 @@ use leo_errors::{ParserError, Result}; use leo_span::{Span, Symbol}; use serde::{Deserialize, Serialize}; -use tendril::StrTendril; -use std::fmt; - -/// -/// Returns the length of the given `wanted` string if the string can be eaten, otherwise returns [`None`]. -/// A string can be eaten if its bytes are at the front of the given `input` array. -/// -fn eat(input: &[u8], wanted: &str) -> Option { - let wanted = wanted.as_bytes(); - if input.len() < wanted.len() { - return None; - } - if &input[..wanted.len()] == wanted { - return Some(wanted.len()); - } - None -} +use std::{fmt, iter::Peekable}; /// /// Returns a new `StrTendril` string if an identifier can be eaten, otherwise returns [`None`]. /// An identifier can be eaten if its bytes are at the front of the given `input_tendril` string. /// -fn eat_identifier(input_tendril: &StrTendril) -> Option { - let input = input_tendril.as_bytes(); - - if !input.get(0)?.is_ascii_alphabetic() { - return None; +fn eat_identifier(input: &mut Peekable>) -> Option { + match input.peek() { + None => return None, + Some(c) if !c.is_ascii_alphabetic() => return None, + _ => {} } - let mut i = 1usize; - while i < input.len() { - if !input.get(i)?.is_ascii_alphanumeric() && *input.get(i)? != b'_' { - break; - } - i += 1; + let mut ident = String::new(); + while let Some(c) = input.next_if(|c| c.is_ascii_alphanumeric() || c == &'_') { + ident.push(c); } - Some(input_tendril.subtendril(0, i as u32)) + Some(ident) } impl Token { - /// - /// Returns a `char` if a character can be eaten, otherwise returns [`None`]. - /// - fn eat_char(input_tendril: StrTendril, escaped: bool, hex: bool, unicode: bool) -> Result { - if input_tendril.is_empty() { + // Eats the parts of the unicode character after \u. + fn eat_unicode_char(input: &mut Peekable>) -> Result<(usize, Char)> { + let mut unicode = String::new(); + // Account for the chars '\' and 'u'. + let mut len = 2; + + if input.next_if_eq(&'{').is_some() { + len += 1; + } else if let Some(c) = input.next() { + return Err(ParserError::lexer_unopened_escaped_unicode_char(c).into()); + } else { return Err(ParserError::lexer_empty_input_tendril().into()); } - if escaped { - let string = input_tendril.to_string(); - let escaped = &string[1..input_tendril.len()]; + while let Some(c) = input.next_if(|c| c != &'}') { + len += 1; + unicode.push(c); + } - if escaped.len() != 1 { - return Err(ParserError::lexer_escaped_char_incorrect_length(escaped).into()); - } else if let Some(character) = escaped.chars().next() { - return match character { - '0' => Ok(Char::Scalar(0 as char)), - 't' => Ok(Char::Scalar(9 as char)), - 'n' => Ok(Char::Scalar(10 as char)), - 'r' => Ok(Char::Scalar(13 as char)), - '\"' => Ok(Char::Scalar(34 as char)), - '\'' => Ok(Char::Scalar(39 as char)), - '\\' => Ok(Char::Scalar(92 as char)), - _ => return Err(ParserError::lexer_expected_valid_escaped_char(character).into()), - }; + if input.next_if_eq(&'}').is_some() { + len += 1; + } else { + return Err(ParserError::lexer_unclosed_escaped_unicode_char(unicode).into()); + } + + // Max of 6 digits. + // Minimum of 1 digit. + if unicode.len() > 6 || unicode.is_empty() { + return Err(ParserError::lexer_invalid_escaped_unicode_length(unicode).into()); + } + + if let Ok(hex) = u32::from_str_radix(&unicode, 16) { + if let Some(character) = std::char::from_u32(hex) { + // scalar + Ok((len, Char::Scalar(character))) + } else if hex <= 0x10FFFF { + Ok((len, Char::NonScalar(hex))) } else { - return Err(ParserError::lexer_unclosed_escaped_char().into()); + Err(ParserError::lexer_invalid_character_exceeded_max_value(unicode).into()) } + } else { + Err(ParserError::lexer_expected_valid_hex_char(unicode).into()) + } + } + + // Eats the parts of the hex character after \x. + fn eat_hex_char(input: &mut Peekable>) -> Result<(usize, Char)> { + let mut hex = String::new(); + // Account for the chars '\' and 'x'. + let mut len = 2; + + // First hex character. + if let Some(c) = input.next_if(|c| c != &'\'') { + len += 1; + hex.push(c); + } else if let Some(c) = input.next() { + return Err(ParserError::lexer_expected_valid_hex_char(c).into()); + } else { + return Err(ParserError::lexer_empty_input_tendril().into()); } - if hex { - let string = input_tendril.to_string(); - let hex_string = &string[2..string.len()]; - - if hex_string.len() != 2 { - return Err(ParserError::lexer_escaped_hex_incorrect_length(hex_string).into()); - } else if let Ok(ascii_number) = u8::from_str_radix(hex_string, 16) { - // According to RFC, we allow only values less than 128. - if ascii_number > 127 { - return Err(ParserError::lexer_expected_valid_hex_char(ascii_number).into()); - } else { - return Ok(Char::Scalar(ascii_number as char)); - } - } + // Second hex character. + if let Some(c) = input.next_if(|c| c != &'\'') { + len += 1; + hex.push(c); + } else if let Some(c) = input.next() { + return Err(ParserError::lexer_expected_valid_hex_char(c).into()); + } else { + return Err(ParserError::lexer_empty_input_tendril().into()); } - if unicode { - let string = input_tendril.to_string(); - if string.find('{').is_none() { - return Err(ParserError::lexer_unopened_escaped_unicode_char(string).into()); - } else if string.find('}').is_none() { - return Err(ParserError::lexer_unclosed_escaped_unicode_char(string).into()); + if let Ok(ascii_number) = u8::from_str_radix(&hex, 16) { + // According to RFC, we allow only values less than 128. + if ascii_number > 127 { + return Err(ParserError::lexer_expected_valid_hex_char(hex).into()); } - let unicode_number = &string[3..string.len() - 1]; - let len = unicode_number.len(); - if !(1..=6).contains(&len) { - return Err(ParserError::lexer_invalid_escaped_unicode_length(unicode_number).into()); - } else if let Ok(hex) = u32::from_str_radix(unicode_number, 16) { - if let Some(character) = std::char::from_u32(hex) { - // scalar - return Ok(Char::Scalar(character)); - } else if hex <= 0x10FFFF { - return Ok(Char::NonScalar(hex)); - } else { - return Err(ParserError::lexer_invalid_character_exceeded_max_value(unicode_number).into()); - } - } + Ok((len, Char::Scalar(ascii_number as char))) + } else { + Err(ParserError::lexer_expected_valid_hex_char(hex).into()) } + } - if input_tendril.to_string().chars().count() != 1 { - // If char doesn't close. - return Err(ParserError::lexer_char_not_closed(&input_tendril).into()); - } else if let Some(character) = input_tendril.to_string().chars().next() { - // If its a simple char. - return Ok(Char::Scalar(character)); + fn eat_escaped_char(input: &mut Peekable>) -> Result<(usize, Char)> { + match input.next() { + None => Err(ParserError::lexer_empty_input_tendril().into()), + // Length of 2 to account the '\'. + Some('0') => Ok((2, Char::Scalar(0 as char))), + Some('t') => Ok((2, Char::Scalar(9 as char))), + Some('n') => Ok((2, Char::Scalar(10 as char))), + Some('r') => Ok((2, Char::Scalar(13 as char))), + Some('\"') => Ok((2, Char::Scalar(34 as char))), + Some('\'') => Ok((2, Char::Scalar(39 as char))), + Some('\\') => Ok((2, Char::Scalar(92 as char))), + Some('u') => Self::eat_unicode_char(input), + Some('x') => Self::eat_hex_char(input), + Some(c) => Err(ParserError::lexer_expected_valid_escaped_char(c).into()), } + } - Err(ParserError::lexer_invalid_char(input_tendril.to_string()).into()) + /// + /// Returns a `char` if a character can be eaten, otherwise returns [`None`]. + /// + fn eat_char(input: &mut Peekable>) -> Result<(usize, Char)> { + match input.next() { + None => Err(ParserError::lexer_empty_input_tendril().into()), + Some('\\') => Self::eat_escaped_char(input), + Some(c) => Ok((c.len_utf8(), Char::Scalar(c))), + } } /// /// Returns a tuple: [(integer length, integer token)] if an integer can be eaten, otherwise returns [`None`]. /// An integer can be eaten if its bytes are at the front of the given `input_tendril` string. /// - fn eat_integer(input_tendril: &StrTendril) -> Result<(usize, Token)> { - if input_tendril.is_empty() { + fn eat_integer(input: &mut Peekable>) -> Result<(usize, Token)> { + if input.peek().is_none() { return Err(ParserError::lexer_empty_input_tendril().into()); } - let input = input_tendril.as_bytes(); - if !input[0].is_ascii_digit() { - return Err(ParserError::lexer_eat_integer_leading_zero(String::from_utf8_lossy(input)).into()); - } - let mut i = 1; - while i < input.len() { - if i == 1 && input[0] == b'0' && input[i] == b'x' { - return Err(ParserError::lexer_hex_number_provided( - &input_tendril[..input_tendril.find('\n').unwrap_or(i) + 1], - ) - .into()); - } - if !input[i].is_ascii_digit() { - break; + let mut int = String::new(); + while let Some(c) = input.next_if(|c| c.is_ascii_digit()) { + if c == '0' && matches!(input.peek(), Some('x')) { + int.push(c); + int.push(input.next().unwrap()); + return Err(ParserError::lexer_hex_number_provided(int).into()); } - i += 1; + int.push(c); } - Ok((i, Token::Int(input_tendril.subtendril(0, i as u32)))) - } - /// Returns the number of bytes in an utf-8 encoding that starts with this byte. - fn utf8_byte_count(byte: u8) -> usize { - let mut mask = 0x80; - let mut result = 0; - while byte & mask > 0 { - result += 1; - mask >>= 1; - } - if result == 0 { - 1 - } else if result > 4 { - 4 - } else { - result - } + Ok((int.len(), Token::Int(int))) } /// /// Returns a tuple: [(token length, token)] if the next token can be eaten, otherwise returns [`None`]. /// The next token can be eaten if the bytes at the front of the given `input_tendril` string can be scanned into a token. /// - pub(crate) fn eat(input_tendril: StrTendril) -> Result<(usize, Token)> { + pub(crate) fn eat(input_tendril: &str) -> Result<(usize, Token)> { if input_tendril.is_empty() { return Err(ParserError::lexer_empty_input_tendril().into()); } - let input = input_tendril.as_bytes(); - match input[0] { - x if x.is_ascii_whitespace() => return Ok((1, Token::WhiteSpace)), - b'"' => { - let mut i = 1; - let mut len = 1; - let mut start = 1; - let mut in_escape = false; - let mut escaped = false; - let mut hex = false; - let mut unicode = false; - let mut end = false; - let mut string = Vec::new(); - while i < input.len() { - // Get the length of the utf-8 encoding here - // and position i at the last byte. - if input[i] & 0x80 > 0 { - len = Self::utf8_byte_count(input[i]); - i += len; + let mut input = input_tendril.chars().peekable(); - if unicode { - return Err( - ParserError::lexer_emoji_inside_escaped_unicode_char(&input_tendril[0..i]).into(), - ); - } - - continue; - } - - if !in_escape { - if input[i] == b'"' { - end = true; - break; - } else if input[i] == b'\\' { - in_escape = true; - start = i; - i += 1; - continue; - } - } else { - len += 1; - - match input[i] { - b'x' => { - hex = true; - } - b'u' => { - unicode = true; - } - b'}' if unicode => { - in_escape = false; - } - _ if !hex && !unicode => { - escaped = true; - in_escape = false; - } - _ if hex && len == 4 => { - in_escape = false; - } - _ => {} - } - } - - if !in_escape { - let character = Self::eat_char( - input_tendril.subtendril(start as u32, len as u32), - escaped, - hex, - unicode, - )?; - len = 1; - escaped = false; - hex = false; - unicode = false; - string.push(character.into()); - } - - i += 1; - - if !escaped && !hex && !unicode { - start = i; - } - } - - if i == input.len() || !end { - return Err(ParserError::lexer_string_not_closed(String::from_utf8_lossy(&input[..i])).into()); - } - - return Ok((i + 1, Token::StringLit(string))); + match input.peek() { + Some(x) if x.is_ascii_whitespace() => { + input.next(); + return Ok((1, Token::WhiteSpace)); } - b'\'' => { - let mut i = 1; - let mut in_escape = false; - let mut escaped = false; - let mut hex = false; - let mut escaped_unicode = false; - let mut unicode_char = false; - let mut end = false; + Some('"') => { + let mut string: Vec = Vec::new(); + input.next(); - while i < input.len() { - if input[i] & 0x80 > 0 && !unicode_char { - i += Self::utf8_byte_count(input[i]); - unicode_char = true; - continue; - } else if input[i] & 0x80 > 0 && unicode_char { - i += Self::utf8_byte_count(input[i]); - return Err(ParserError::lexer_invalid_char(&input_tendril[..i]).into()); - } else if !in_escape || unicode_char { - if input[i] == b'\'' { - end = true; - break; - } else if unicode_char { - return Err(ParserError::lexer_invalid_char( - // grab the contents of everything between the '' if possible. - // else just show the character right before stuff went wrong. - &input_tendril[..input_tendril[1..].find('\'').unwrap_or(i - 1) + 1], - ) - .into()); - } else if input[i] == b'\\' { - in_escape = true; - } - } else { - if input[i] == b'x' { - hex = true; - } else if input[i] == b'u' { - let one_ahead = input.get(i + 1); - if matches!(one_ahead, Some(b'{')) { - escaped_unicode = true; - } else if one_ahead.is_some() { - return Err(ParserError::lexer_expected_valid_escaped_char(input[i + 1]).into()); - } else { - return Err(ParserError::lexer_expected_valid_escaped_char(input[i]).into()); - } - } else { - escaped = true; - } - - in_escape = false; + let mut len = 0; + while let Some(c) = input.peek() { + if c == &'"' { + break; } - - i += 1; + let (char_len, character) = Self::eat_char(&mut input)?; + len += char_len; + string.push(character.into()); } - if !end { - return Err(ParserError::lexer_char_not_closed(String::from_utf8_lossy(&input[..i])).into()); + if input.next_if_eq(&'"').is_some() { + return Ok((len + 2, Token::StringLit(string))); } - let character = Self::eat_char( - input_tendril.subtendril(1, (i - 1) as u32), - escaped, - hex, - escaped_unicode, - )?; - return Ok((i + 1, Token::CharLit(character))); + return Err(ParserError::lexer_string_not_closed(string).into()); } - x if x.is_ascii_digit() => { - return Self::eat_integer(&input_tendril); + Some('\'') => { + input.next(); + + let (len, character) = Self::eat_char(&mut input)?; + + if input.next_if_eq(&'\'').is_some() { + input.next(); + return Ok((len + 2, Token::CharLit(character))); + } else if input.next().is_some() { + return Err(ParserError::lexer_char_not_closed(character).into()); + } else { + return Err(ParserError::lexer_empty_input_tendril().into()); + } } - b'!' => { - if let Some(len) = eat(input, "!=") { - return Ok((len, Token::NotEq)); + Some(x) if x.is_ascii_digit() => { + return Self::eat_integer(&mut input); + } + Some('!') => { + input.next(); + if input.next_if_eq(&'=').is_some() { + return Ok((2, Token::NotEq)); } return Ok((1, Token::Not)); } - b'?' => { + Some('?') => { + input.next(); return Ok((1, Token::Question)); } - b'&' => { - if let Some(len) = eat(input, "&&") { - return Ok((len, Token::And)); + Some('&') => { + input.next(); + if input.next_if_eq(&'&').is_some() { + return Ok((2, Token::And)); } return Ok((1, Token::Ampersand)); } - b'(' => return Ok((1, Token::LeftParen)), - b')' => return Ok((1, Token::RightParen)), - b'_' => return Ok((1, Token::Underscore)), - b'*' => { - if let Some(len) = eat(input, "**") { - if let Some(inner_len) = eat(&input[len..], "=") { - return Ok((len + inner_len, Token::ExpEq)); + Some('(') => { + input.next(); + return Ok((1, Token::LeftParen)); + } + Some(')') => { + input.next(); + return Ok((1, Token::RightParen)); + } + Some('_') => { + input.next(); + return Ok((1, Token::Underscore)); + } + Some('*') => { + input.next(); + if input.next_if_eq(&'*').is_some() { + if input.next_if_eq(&'=').is_some() { + return Ok((3, Token::ExpEq)); } - return Ok((len, Token::Exp)); - } else if let Some(len) = eat(input, "*=") { - return Ok((len, Token::MulEq)); + return Ok((2, Token::Exp)); + } else if input.next_if_eq(&'=').is_some() { + return Ok((2, Token::MulEq)); } return Ok((1, Token::Mul)); } - b'+' => { - if let Some(len) = eat(input, "+=") { - return Ok((len, Token::AddEq)); + Some('+') => { + input.next(); + if input.next_if_eq(&'=').is_some() { + return Ok((2, Token::AddEq)); } return Ok((1, Token::Add)); } - b',' => return Ok((1, Token::Comma)), - b'-' => { - if let Some(len) = eat(input, "->") { - return Ok((len, Token::Arrow)); - } else if let Some(len) = eat(input, "-=") { - return Ok((len, Token::MinusEq)); + Some(',') => { + input.next(); + return Ok((1, Token::Comma)); + } + Some('-') => { + input.next(); + if input.next_if_eq(&'>').is_some() { + return Ok((2, Token::Arrow)); + } else if input.next_if_eq(&'=').is_some() { + return Ok((2, Token::MinusEq)); } return Ok((1, Token::Minus)); } - b'.' => { - if let Some(len) = eat(input, "...") { - return Ok((len, Token::DotDotDot)); - } else if let Some(len) = eat(input, "..") { - return Ok((len, Token::DotDot)); + Some('.') => { + input.next(); + if input.next_if_eq(&'.').is_some() { + if input.next_if_eq(&'.').is_some() { + return Ok((3, Token::DotDotDot)); + } else { + return Ok((2, Token::DotDot)); + } } return Ok((1, Token::Dot)); } - b'/' => { - if eat(input, "//").is_some() { - let eol = input.iter().position(|x| *x == b'\n'); - let len = if let Some(eol) = eol { eol + 1 } else { input.len() }; - return Ok((len, Token::CommentLine(input_tendril.subtendril(0, len as u32)))); - } else if eat(input, "/*").is_some() { - let eol = input.windows(2).skip(2).position(|x| x[0] == b'*' && x[1] == b'/'); - let len = if let Some(eol) = eol { - eol + 4 - } else { - return Err(ParserError::lexer_block_comment_does_not_close_before_eof( - String::from_utf8_lossy(input), - ) - .into()); - }; - return Ok((len, Token::CommentBlock(input_tendril.subtendril(0, len as u32)))); - } else if let Some(len) = eat(input, "/=") { - return Ok((len, Token::DivEq)); + Some(c) if c == &'/' => { + input.next(); + if input.next_if_eq(&'/').is_some() { + let mut comment = String::from("//"); + + while let Some(c) = input.next_if(|c| c != &'\n') { + comment.push(c); + } + + if let Some(newline) = input.next_if_eq(&'\n') { + comment.push(newline); + return Ok((comment.len(), Token::CommentLine(comment))); + } + + return Ok((comment.len(), Token::CommentLine(comment))); + } else if input.next_if_eq(&'*').is_some() { + let mut comment = String::from("/*"); + + if input.peek().is_none() { + return Err(ParserError::lexer_empty_block_comment().into()); + } + + let mut ended = false; + while let Some(c) = input.next() { + comment.push(c); + if c == '*' && input.next_if_eq(&'/').is_some() { + comment.push('/'); + ended = true; + break; + } + } + + if !ended { + return Err(ParserError::lexer_block_comment_does_not_close_before_eof(comment).into()); + } + return Ok((comment.len(), Token::CommentBlock(comment))); + } else if input.next_if_eq(&'=').is_some() { + return Ok((2, Token::DivEq)); } return Ok((1, Token::Div)); } - b':' => { - if let Some(len) = eat(input, "::") { - return Ok((len, Token::DoubleColon)); + Some(':') => { + input.next(); + if input.next_if_eq(&':').is_some() { + return Ok((2, Token::DoubleColon)); } else { return Ok((1, Token::Colon)); } } - b';' => return Ok((1, Token::Semicolon)), - b'<' => { - if let Some(len) = eat(input, "<=") { - return Ok((len, Token::LtEq)); + Some(';') => { + input.next(); + return Ok((1, Token::Semicolon)); + } + Some('<') => { + input.next(); + if input.next_if_eq(&'=').is_some() { + return Ok((2, Token::LtEq)); } return Ok((1, Token::Lt)); } - b'>' => { - if let Some(len) = eat(input, ">=") { - return Ok((len, Token::GtEq)); + Some('>') => { + input.next(); + if input.next_if_eq(&'=').is_some() { + return Ok((2, Token::GtEq)); } return Ok((1, Token::Gt)); } - b'=' => { - if let Some(len) = eat(input, "==") { - return Ok((len, Token::Eq)); + Some('=') => { + input.next(); + if input.next_if_eq(&'=').is_some() { + return Ok((2, Token::Eq)); } return Ok((1, Token::Assign)); } - b'@' => return Ok((1, Token::At)), - b'[' => return Ok((1, Token::LeftSquare)), - b']' => return Ok((1, Token::RightSquare)), - b'{' => return Ok((1, Token::LeftCurly)), - b'}' => return Ok((1, Token::RightCurly)), - b'|' => { - if let Some(len) = eat(input, "||") { - return Ok((len, Token::Or)); + Some('@') => { + input.next(); + return Ok((1, Token::At)); + } + Some('[') => { + input.next(); + return Ok((1, Token::LeftSquare)); + } + Some(']') => { + input.next(); + return Ok((1, Token::RightSquare)); + } + Some('{') => { + input.next(); + return Ok((1, Token::LeftCurly)); + } + Some('}') => { + input.next(); + return Ok((1, Token::RightCurly)); + } + Some('|') => { + input.next(); + if input.next_if_eq(&'|').is_some() { + return Ok((2, Token::Or)); + } else if let Some(found) = input.next() { + return Err(ParserError::lexer_expected_but_found(found, '|').into()); + } else { + return Err(ParserError::lexer_empty_input_tendril().into()); } } _ => (), } - if let Some(ident) = eat_identifier(&input_tendril) { + if let Some(ident) = eat_identifier(&mut input) { return Ok(( ident.len(), match &*ident { @@ -511,7 +452,7 @@ impl Token { )); } - Err(ParserError::could_not_lex(String::from_utf8_lossy(input)).into()) + Err(ParserError::could_not_lex(input.collect::()).into()) } } diff --git a/compiler/parser/src/tokenizer/mod.rs b/compiler/parser/src/tokenizer/mod.rs index 81d674b0b1..7b6d8fd23d 100644 --- a/compiler/parser/src/tokenizer/mod.rs +++ b/compiler/parser/src/tokenizer/mod.rs @@ -31,38 +31,16 @@ pub(crate) use self::lexer::*; use leo_errors::{ParserError, Result}; use leo_span::Span; -use tendril::StrTendril; - /// Creates a new vector of spanned tokens from a given file path and source code text. -pub(crate) fn tokenize(path: &str, input: StrTendril) -> Result> { +pub(crate) fn tokenize(path: &str, input: &str) -> Result> { let path = Arc::new(path.to_string()); let mut tokens = vec![]; let mut index = 0usize; let mut line_no = 1usize; let mut line_start = 0usize; while input.len() > index { - match Token::eat(input.subtendril(index as u32, (input.len() - index) as u32))? { + match Token::eat(&input[index..])? { (token_len, Token::WhiteSpace) => { - if token_len == 0 && index == input.len() { - break; - } else if token_len == 0 { - return Err(ParserError::unexpected_token( - &input[index..].chars().next().unwrap(), - &Span::new( - line_no, - line_no, - index - line_start + 1, - index - line_start + 2, - path, - input.subtendril( - line_start as u32, - input[line_start..].find('\n').unwrap_or(input.len()) as u32, - ), - ), - ) - .into()); - } - let bytes = input.as_bytes(); if bytes[index] == 0x000D && matches!(bytes.get(index + 1), Some(0x000A)) { // Check carriage return followed by newline. @@ -83,10 +61,12 @@ pub(crate) fn tokenize(path: &str, input: StrTendril) -> Result { @@ -129,6 +109,8 @@ mod tests { let tokens = tokenize( "test_path", r#" + 'a' + '😭' "test" "test{}test" "test{}" @@ -227,7 +209,7 @@ mod tests { assert_eq!( output, - r#""test" "test{}test" "test{}" "{}test" "test{" "test}" "test{test" "test}test" "te{{}}" aleo1qnr4dkkvkgfqph0vzc3y6z2eu975wnpz2925ntjccd5cfqxtyu8sta57j8 test_ident 12345 address as bool circuit const else false field for function group i128 i64 i32 i16 i8 if import in input let mut & return static string test true u128 u64 u32 u16 u8 self Self console ! != && ( ) * ** **= *= + += , - -= -> _ . .. ... / /= : :: ; < <= = == > >= @ [ ] { { } } || ? // test + r#"'a' '😭' "test" "test{}test" "test{}" "{}test" "test{" "test}" "test{test" "test}test" "te{{}}" aleo1qnr4dkkvkgfqph0vzc3y6z2eu975wnpz2925ntjccd5cfqxtyu8sta57j8 test_ident 12345 address as bool circuit const else false field for function group i128 i64 i32 i16 i8 if import in input let mut & return static string test true u128 u64 u32 u16 u8 self Self console ! != && ( ) * ** **= *= + += , - -= -> _ . .. ... / /= : :: ; < <= = == > >= @ [ ] { { } } || ? // test /* test */ // "# ); }); diff --git a/compiler/parser/src/tokenizer/token.rs b/compiler/parser/src/tokenizer/token.rs index 3dca55e1cf..167fd04dff 100644 --- a/compiler/parser/src/tokenizer/token.rs +++ b/compiler/parser/src/tokenizer/token.rs @@ -18,7 +18,6 @@ use leo_span::{sym, Symbol}; use serde::{Deserialize, Serialize}; use std::fmt; -use tendril::StrTendril; #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub enum Char { @@ -40,7 +39,7 @@ impl fmt::Display for Char { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Self::Scalar(c) => write!(f, "{}", c), - Self::NonScalar(c) => write!(f, "{}", c), + Self::NonScalar(c) => write!(f, "{:X}", c), } } } @@ -50,14 +49,14 @@ impl fmt::Display for Char { pub enum Token { // Lexical Grammar // Literals - CommentLine(#[serde(with = "leo_span::tendril_json")] StrTendril), - CommentBlock(#[serde(with = "leo_span::tendril_json")] StrTendril), + CommentLine(String), + CommentBlock(String), StringLit(Vec), Ident(Symbol), - Int(#[serde(with = "leo_span::tendril_json")] StrTendril), + Int(String), True, False, - AddressLit(#[serde(with = "leo_span::tendril_json")] StrTendril), + AddressLit(String), CharLit(Char), WhiteSpace, @@ -259,7 +258,7 @@ impl fmt::Display for Token { True => write!(f, "true"), False => write!(f, "false"), AddressLit(s) => write!(f, "{}", s), - CharLit(s) => write!(f, "{}", s), + CharLit(s) => write!(f, "'{}'", s), WhiteSpace => write!(f, "whitespace"), At => write!(f, "@"), diff --git a/compiler/parser/tests/serialization/json.rs b/compiler/parser/tests/serialization/json.rs deleted file mode 100644 index b0fcd85eef..0000000000 --- a/compiler/parser/tests/serialization/json.rs +++ /dev/null @@ -1,217 +0,0 @@ -// Copyright (C) 2019-2022 Aleo Systems Inc. -// This file is part of the Leo library. - -// The Leo library is free software: you can redistribute it and/or modify -// it under the terms of the GNU General Public License as published by -// the Free Software Foundation, either version 3 of the License, or -// (at your option) any later version. - -// The Leo library is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU General Public License for more details. - -// You should have received a copy of the GNU General Public License -// along with the Leo library. If not, see . - -use leo_ast::Ast; -#[cfg(not(feature = "ci_skip"))] -use leo_ast::Program; -use leo_errors::{emitter::Handler, LeoError, Result}; - -use std::fs::File; -use std::io::BufReader; -use std::iter::Iterator; -use std::path::{Path, PathBuf}; - -fn to_ast(program_filepath: &Path) -> Result { - let program_string = std::fs::read_to_string(program_filepath).expect("failed to open test"); - - // Parses the Leo file and constructs a leo ast. - leo_parser::parse_ast(&Handler::default(), "", &program_string) -} - -fn setup() { - std::env::set_var("LEO_TESTFRAMEWORK", "true"); -} - -fn clean() { - std::env::remove_var("LEO_TESTFRAMEWORK"); -} - -#[test] -#[cfg(not(feature = "ci_skip"))] -fn test_serialize() { - setup(); - - // Construct an ast from the given test file. - let ast = { - let mut program_filepath = PathBuf::from(env!("CARGO_MANIFEST_DIR")); - program_filepath.push("tests/serialization/leo/one_plus_one.leo"); - - to_ast(&program_filepath).unwrap() - }; - - // Serializes the ast into JSON format. - let serialized_ast: Program = serde_json::from_value(serde_json::to_value(ast.as_repr()).unwrap()).unwrap(); - - // Load the expected ast. - let expected: Program = serde_json::from_str(include_str!("./expected_leo_ast/one_plus_one.json")).unwrap(); - - clean(); - assert_eq!(expected, serialized_ast); -} - -#[test] -#[cfg(not(feature = "ci_skip"))] -fn test_serialize_no_span() { - setup(); - - let program_paths = vec![ - "tests/serialization/leo/linear_regression.leo", - "tests/serialization/leo/palindrome.leo", - "tests/serialization/leo/pedersen_hash.leo", - "tests/serialization/leo/silly_sudoku.leo", - ]; - - let json_paths = vec![ - "tests/serialization/expected_leo_ast/linear_regression.json", - "tests/serialization/expected_leo_ast/palindrome.json", - "tests/serialization/expected_leo_ast/pedersen_hash.json", - "tests/serialization/expected_leo_ast/silly_sudoku.json", - ]; - - for (program_path, json_path) in program_paths.into_iter().zip(json_paths) { - // Construct an ast from the given test file. - let ast = { - let mut program_filepath = PathBuf::from(env!("CARGO_MANIFEST_DIR")); - program_filepath.push(program_path); - to_ast(&program_filepath).unwrap() - }; - - let json_reader = { - let mut json_filepath = PathBuf::from(env!("CARGO_MANIFEST_DIR")); - json_filepath.push(json_path); - let file = File::open(json_filepath).expect("Failed to read expected ast file"); - BufReader::new(file) - }; - - // Serializes the ast into JSON format. - let mut serialized_ast: serde_json::Value = serde_json::to_value(ast.as_repr()).unwrap(); - remove_key_from_json(&mut serialized_ast, "span"); - serialized_ast = normalize_json_value(serialized_ast); - - // Load the expected ast. - let expected: serde_json::Value = serde_json::from_reader(json_reader).unwrap(); - - assert_eq!(expected, serialized_ast); - } - clean(); -} - -// Helper functions to recursively filter keys from AST JSON. -// Redeclaring here since we don't want to make this public. -fn remove_key_from_json(value: &mut serde_json::Value, key: &str) { - match value { - serde_json::value::Value::Object(map) => { - map.remove(key); - for val in map.values_mut() { - remove_key_from_json(val, key); - } - } - serde_json::value::Value::Array(values) => { - for val in values.iter_mut() { - remove_key_from_json(val, key); - } - } - _ => (), - } -} - -// Helper function to normalize AST -// Redeclaring here because we don't want to make this public -fn normalize_json_value(value: serde_json::Value) -> serde_json::Value { - match value { - serde_json::Value::Array(vec) => { - let orig_length = vec.len(); - let mut new_vec: Vec = vec - .into_iter() - .filter(|v| !matches!(v, serde_json::Value::Object(map) if map.is_empty())) - .map(normalize_json_value) - .collect(); - - if orig_length == 2 && new_vec.len() == 1 { - new_vec.pop().unwrap() - } else { - serde_json::Value::Array(new_vec) - } - } - serde_json::Value::Object(map) => { - serde_json::Value::Object(map.into_iter().map(|(k, v)| (k, normalize_json_value(v))).collect()) - } - _ => value, - } -} - -// TODO Renable when we don't write spans to snapshots. -/* #[test] -#[cfg(not(feature = "ci_skip"))] -fn test_deserialize() { - setup(); - - // Load the expected ast. - let expected_ast = { - let mut program_filepath = PathBuf::from(env!("CARGO_MANIFEST_DIR")); - program_filepath.push("tests/serialization/main.leo"); - - to_ast(&program_filepath).unwrap() - }; - - // Construct an ast by deserializing a ast JSON file. - let serialized_ast = include_str!("expected_leo_ast.json"); - let ast = Ast::from_json_string(serialized_ast).unwrap(); - - clean(); - assert_eq!(expected_ast, ast); -} - -#[test] -fn test_serialize_deserialize_serialize() { - setup(); - - // Construct an ast from the given test file. - let ast = { - let mut program_filepath = PathBuf::from(env!("CARGO_MANIFEST_DIR")); - program_filepath.push("tests/serialization/main.leo"); - - to_ast(&program_filepath).unwrap() - }; - - // Serializes the ast into JSON format. - let serialized_ast = ast.to_json_string().unwrap(); - - // Deserializes the serialized ast into an ast. - let ast = Ast::from_json_string(&serialized_ast).unwrap(); - - // Reserializes the ast into JSON format. - let reserialized_ast = ast.to_json_string().unwrap(); - - clean(); - assert_eq!(serialized_ast, reserialized_ast); -} */ - -#[test] -fn test_generic_parser_error() { - setup(); - - let error_result = { - let mut program_filepath = PathBuf::from(env!("CARGO_MANIFEST_DIR")); - program_filepath.push("tests/serialization/leo/parser_error.leo"); - - to_ast(&program_filepath) - } - .map_err(|err| matches!(err, LeoError::ParserError(_))); - - clean(); - assert!(error_result.err().unwrap()); -} diff --git a/docs/grammar/README.md b/docs/grammar/README.md index 1cb7be2e77..b2226f3fb3 100644 --- a/docs/grammar/README.md +++ b/docs/grammar/README.md @@ -204,6 +204,12 @@ hexadecimal-digit = decimal-digit / "a" / "b" / "c" / "d" / "e" / "f" Go to: _[decimal-digit](#user-content-decimal-digit)_; +An identifier is a non-empty sequence of +letters, (decimal) digits, and underscores, +starting with a letter. +It must not be a keyword or a boolean literal, +and it must not be or start with `aleo1`; +these are extra-grammatical requirements, indicated in the comment. ```abnf identifier = letter *( letter / decimal-digit / "_" ) @@ -479,7 +485,6 @@ scalar-type = boolean-type / arithmetic-type / address-type / character-type Go to: _[address-type](#user-content-address-type), [arithmetic-type](#user-content-arithmetic-type), [boolean-type](#user-content-boolean-type), [character-type](#user-content-character-type)_; - ```abnf type = scalar-type @@ -782,7 +787,6 @@ print-call = print-function print-arguments Go to: _[print-arguments](#user-content-print-arguments), [print-function](#user-content-print-function)_; - ```abnf function-declaration = %s"function" identifier diff --git a/docs/grammar/abnf-grammar.txt b/docs/grammar/abnf-grammar.txt index 668b2af979..490bc9e945 100644 --- a/docs/grammar/abnf-grammar.txt +++ b/docs/grammar/abnf-grammar.txt @@ -100,6 +100,12 @@ octal-digit = %x30-37 ; 0-7 hexadecimal-digit = decimal-digit / "a" / "b" / "c" / "d" / "e" / "f" +; An identifier is a non-empty sequence of +; letters, (decimal) digits, and underscores, +; starting with a letter. +; It must not be a keyword or a boolean literal, +; and it must not be or start with `aleo1`; +; these are extra-grammatical requirements, indicated in the comment. identifier = letter *( letter / decimal-digit / "_" ) ; but not a keyword or a boolean literal or aleo1... diff --git a/leo/errors/src/parser/parser_errors.rs b/leo/errors/src/parser/parser_errors.rs index 08a5905e44..64afc7e82d 100644 --- a/leo/errors/src/parser/parser_errors.rs +++ b/leo/errors/src/parser/parser_errors.rs @@ -136,14 +136,6 @@ create_errors!( help: None, } - /// For when the parser encountered an illegal `const self` argument. - @formatted - illegal_self_const { - args: (), - msg: "cannot have const self", - help: None, - } - /// For when the parser encountered a deprecated `mut` argument in a function. @formatted mut_function_input { @@ -232,14 +224,6 @@ create_errors!( help: None, } - /// When an integer is started with a leading zero. - @backtraced - lexer_eat_integer_leading_zero { - args: (input: impl Display), - msg: format!("Tried to eat integer but found a leading zero on `{}`.", input), - help: None, - } - /// When an integer is started with a leading zero. @backtraced lexer_expected_valid_escaped_char { @@ -251,8 +235,8 @@ create_errors!( /// When a string is not properly closed. @backtraced lexer_string_not_closed { - args: (input: impl Display), - msg: format!("Expected a closed string but found `{}`.", input), + args: (input: impl Debug), + msg: format!("Expected a closed string but found `{:?}`.", input), help: None, } @@ -296,30 +280,6 @@ create_errors!( help: None, } - /// When a escaped character was given more than one char to escape. - @backtraced - lexer_escaped_char_incorrect_length { - args: (input: impl Display), - msg: format!("Could not lex the following escaped char due to being given more than one char: `{}`.", input), - help: None, - } - - /// When a escape was given but no following character - @backtraced - lexer_unclosed_escaped_char { - args: (), - msg: "There was no escaped character following the escape char symbol `\\`.", - help: None, - } - - /// When a escaped hex was given more than two chars to escape. - @backtraced - lexer_escaped_hex_incorrect_length { - args: (input: impl Display), - msg: format!("Could not lex the following escaped hex due to being given more than two chars: `{}`.", input), - help: None, - } - /// When a valid hex character was expected. @backtraced lexer_expected_valid_hex_char { @@ -336,7 +296,7 @@ create_errors!( help: None, } - /// When a escaped unicode char was given but it had an incorrect length. + /// When a escaped unicode char was given but it had an incorrect length. 38 @backtraced lexer_invalid_escaped_unicode_length { args: (input: impl Display), @@ -375,14 +335,6 @@ create_errors!( help: None, } - /// When a escaped unicode char has an emoji in it. - @backtraced - lexer_emoji_inside_escaped_unicode_char { - args: (input: impl Display), - msg: format!("There was an emoji found in the escaped unicode character: `{}`.", input), - help: None, - } - /// For when a user puts parens around a single defined variable. @formatted invalid_parens_around_single_variable { @@ -398,4 +350,12 @@ create_errors!( msg: "do not put parens around single dimension array size", help: None, } + + /// For when a user puts parens around a single defined variable. + @backtraced + lexer_expected_but_found { + args: (found: impl Display, expected: impl Display), + msg: format!("Found the char `{}`, but expected `{}`", found, expected), + help: None, + } ); diff --git a/leo/package/Cargo.toml b/leo/package/Cargo.toml index 3169c218d2..3c8af886cc 100644 --- a/leo/package/Cargo.toml +++ b/leo/package/Cargo.toml @@ -40,7 +40,7 @@ version = "0.1" version = "2" [dependencies.zip] -version = "0.5" +version = "0.6" [dev-dependencies.lazy_static] version = "1.3.0" diff --git a/leo/span/src/span.rs b/leo/span/src/span.rs index be3db2dacd..b5754b013d 100644 --- a/leo/span/src/span.rs +++ b/leo/span/src/span.rs @@ -20,7 +20,6 @@ use std::{fmt, sync::Arc, usize}; use serde::ser::{Serialize, SerializeStruct, Serializer}; use serde::Deserialize; -use tendril::StrTendril; /// The span type which tracks where formatted errors originate from in a Leo file. /// This is used in many spots throughout the rest of the Leo crates. @@ -36,9 +35,8 @@ pub struct Span { pub col_stop: usize, /// The path to the Leo file containing the error. pub path: Arc, - #[serde(with = "crate::tendril_json")] /// The content of the line(s) that the span is found on. - pub content: StrTendril, + pub content: String, } impl Span { @@ -55,7 +53,7 @@ impl Span { col_start: usize, col_stop: usize, path: Arc, - content: StrTendril, + content: String, ) -> Self { Self { line_start, @@ -90,7 +88,7 @@ impl Serialize for Span { } else { state.serialize_field("path", "")?; } - state.serialize_field("content", self.content.as_ref())?; + state.serialize_field("content", &self.content)?; state.end() } } @@ -156,7 +154,7 @@ impl std::ops::Add for Span { new_content.push(format!("{:<1$}...", " ", other.col_start + 4)); } } - let new_content = new_content.join("\n").into(); + let new_content = new_content.join("\n"); if self.line_start < other.line_stop { Span { line_start: self.line_start, diff --git a/tests/expectations/parser/parser/circuits/mixed_order_fail.leo.out b/tests/expectations/parser/parser/circuits/mixed_order_fail.leo.out index 7ef15bf91e..f29639b5a2 100644 --- a/tests/expectations/parser/parser/circuits/mixed_order_fail.leo.out +++ b/tests/expectations/parser/parser/circuits/mixed_order_fail.leo.out @@ -2,4 +2,4 @@ namespace: Parse expectation: Fail outputs: - - "Error [EPAR0370022]: Member functions must come after member variables.\n --> test:7:5\n |\n 7 | foo: u8,\n | ^^^\nError [EPAR0370021]: Member functions must come after member consts.\n --> test:9:18\n |\n 9 | static const BAR: u8 = 0u8;\n | ^^^^^^^^^^^^^\nError [EPAR0370020]: Member variables must come after member consts.\n --> test:15:18\n |\n 15 | static const BAR: u8 = 0u8;\n | ^^^^^^^^^^^^^" + - "Error [EPAR0370021]: Member functions must come after member variables.\n --> test:7:5\n |\n 7 | foo: u8,\n | ^^^\nError [EPAR0370020]: Member functions must come after member consts.\n --> test:9:18\n |\n 9 | static const BAR: u8 = 0u8;\n | ^^^^^^^^^^^^^\nError [EPAR0370019]: Member variables must come after member consts.\n --> test:15:18\n |\n 15 | static const BAR: u8 = 0u8;\n | ^^^^^^^^^^^^^" diff --git a/tests/expectations/parser/parser/circuits/mut_self_fail.leo.out b/tests/expectations/parser/parser/circuits/mut_self_fail.leo.out index 7a1b004381..0007ae6240 100644 --- a/tests/expectations/parser/parser/circuits/mut_self_fail.leo.out +++ b/tests/expectations/parser/parser/circuits/mut_self_fail.leo.out @@ -2,4 +2,4 @@ namespace: Parse expectation: Fail outputs: - - "Error [EPAR0370019]: `mut self` is no longer accepted. Use `&self` if you would like to pass in a mutable reference to `self`\n --> test:4:16\n |\n 4 | function x(mut self) {\n | ^^^^^^^^" + - "Error [EPAR0370018]: `mut self` is no longer accepted. Use `&self` if you would like to pass in a mutable reference to `self`\n --> test:4:16\n |\n 4 | function x(mut self) {\n | ^^^^^^^^" diff --git a/tests/expectations/parser/parser/circuits/self_not_first_fail.leo.out b/tests/expectations/parser/parser/circuits/self_not_first_fail.leo.out index 5a73599380..7959437526 100644 --- a/tests/expectations/parser/parser/circuits/self_not_first_fail.leo.out +++ b/tests/expectations/parser/parser/circuits/self_not_first_fail.leo.out @@ -2,4 +2,4 @@ namespace: Parse expectation: Fail outputs: - - "Error [EPAR0370041]: A function received a self argument as not the first argument." + - "Error [EPAR0370036]: A function received a self argument as not the first argument." diff --git a/tests/expectations/parser/parser/expression/array_init_fail.leo.out b/tests/expectations/parser/parser/expression/array_init_fail.leo.out index c38384b2b3..17ad75389c 100644 --- a/tests/expectations/parser/parser/expression/array_init_fail.leo.out +++ b/tests/expectations/parser/parser/expression/array_init_fail.leo.out @@ -4,5 +4,5 @@ expectation: Fail outputs: - "Error [EPAR0370010]: illegal spread in array initializer\n --> test:1:1\n |\n 1 | [...0u8; 1]\n | ^^^^^^^" - "Error [EPAR0370010]: illegal spread in array initializer\n --> test:1:1\n |\n 1 | [...0; 1]\n | ^^^^^" - - "Error [EPAR0370023]: Array dimensions specified as a tuple cannot be empty.\n --> test:1:5\n |\n 1 | [0; ()]\n | ^^" - - "Error [EPAR0370045]: do not put parens around single dimension array size\n --> test:1:5\n |\n 1 | [0; (1)]\n | ^^^" + - "Error [EPAR0370022]: Array dimensions specified as a tuple cannot be empty.\n --> test:1:5\n |\n 1 | [0; ()]\n | ^^" + - "Error [EPAR0370039]: do not put parens around single dimension array size\n --> test:1:5\n |\n 1 | [0; (1)]\n | ^^^" diff --git a/tests/expectations/parser/parser/expression/literal/char.leo.out b/tests/expectations/parser/parser/expression/literal/char.leo.out index 19ebfb851e..1555a1e74d 100644 --- a/tests/expectations/parser/parser/expression/literal/char.leo.out +++ b/tests/expectations/parser/parser/expression/literal/char.leo.out @@ -2,56 +2,56 @@ namespace: Token expectation: Pass outputs: - - "'a' @ 1:1-4" - - "'Z' @ 1:1-4" - - "'\"' @ 1:1-5" - - "''' @ 1:1-5" - - "'' @ 1:1-5" - - "'' @ 1:1-5" - - "'\u0000' @ 1:1-5" - - "'\u000f' @ 1:1-8" - - "'' @ 1:1-6" - - "'å' @ 1:1-9" - - "'å' @ 1:1-5" - - "'Ӡ' @ 1:1-10" - - "'Ӡ' @ 1:1-5" - - "'55296' @ 1:1-11" - - "'❤' @ 1:1-11" - - "'❤' @ 1:1-6" - - "'😢' @ 1:1-12" - - "'😭' @ 1:1-7" - - "'􀀟' @ 1:1-13" - - "'*' @ 1:1-7" - - "'\u007f' @ 1:1-7" - - "'\u0000' @ 1:1-7" - - "'\u0001' @ 1:1-7" - - "'\u0002' @ 1:1-7" - - "'\u0003' @ 1:1-7" - - "'\u0004' @ 1:1-7" - - "'\u0005' @ 1:1-7" - - "'\u0006' @ 1:1-7" - - "'\u0007' @ 1:1-7" - - "'\u0010' @ 1:1-7" - - "'\u0011' @ 1:1-7" - - "'\u0012' @ 1:1-7" - - "'\u0013' @ 1:1-7" - - "'\u0014' @ 1:1-7" - - "'\u0015' @ 1:1-7" - - "'\u0016' @ 1:1-7" - - "'\u0017' @ 1:1-7" - - "'' @ 1:1-7" - - "'!' @ 1:1-7" - - "'\"' @ 1:1-7" - - "'#' @ 1:1-7" - - "'$' @ 1:1-7" - - "'%' @ 1:1-7" - - "'&' @ 1:1-7" - - "''' @ 1:1-7" - - "'0' @ 1:1-7" - - "'1' @ 1:1-7" - - "'2' @ 1:1-7" - - "'3' @ 1:1-7" - - "'4' @ 1:1-7" - - "'5' @ 1:1-7" - - "'6' @ 1:1-7" - - "'7' @ 1:1-7" + - "''a'' @ 1:1-4" + - "''Z'' @ 1:1-4" + - "''\"'' @ 1:1-5" + - "''''' @ 1:1-5" + - "''\t'' @ 1:1-5" + - "''\r'' @ 1:1-5" + - "''\u0000'' @ 1:1-5" + - "''\u000f'' @ 1:1-8" + - "'''' @ 1:1-6" + - "''å'' @ 1:1-9" + - "''å'' @ 1:1-5" + - "''Ӡ'' @ 1:1-10" + - "''Ӡ'' @ 1:1-5" + - "''D800'' @ 1:1-11" + - "''❤'' @ 1:1-11" + - "''❤'' @ 1:1-6" + - "''😢'' @ 1:1-12" + - "''😭'' @ 1:1-7" + - "''􀀟'' @ 1:1-13" + - "''*'' @ 1:1-7" + - "''\u007f'' @ 1:1-7" + - "''\u0000'' @ 1:1-7" + - "''\u0001'' @ 1:1-7" + - "''\u0002'' @ 1:1-7" + - "''\u0003'' @ 1:1-7" + - "''\u0004'' @ 1:1-7" + - "''\u0005'' @ 1:1-7" + - "''\u0006'' @ 1:1-7" + - "''\u0007'' @ 1:1-7" + - "''\u0010'' @ 1:1-7" + - "''\u0011'' @ 1:1-7" + - "''\u0012'' @ 1:1-7" + - "''\u0013'' @ 1:1-7" + - "''\u0014'' @ 1:1-7" + - "''\u0015'' @ 1:1-7" + - "''\u0016'' @ 1:1-7" + - "''\u0017'' @ 1:1-7" + - "'' '' @ 1:1-7" + - "''!'' @ 1:1-7" + - "''\"'' @ 1:1-7" + - "''#'' @ 1:1-7" + - "''$'' @ 1:1-7" + - "''%'' @ 1:1-7" + - "''&'' @ 1:1-7" + - "''''' @ 1:1-7" + - "''0'' @ 1:1-7" + - "''1'' @ 1:1-7" + - "''2'' @ 1:1-7" + - "''3'' @ 1:1-7" + - "''4'' @ 1:1-7" + - "''5'' @ 1:1-7" + - "''6'' @ 1:1-7" + - "''7'' @ 1:1-7" diff --git a/tests/expectations/parser/parser/expression/literal/char_fail.leo.out b/tests/expectations/parser/parser/expression/literal/char_fail.leo.out index 1f9a476eab..4c79c63c00 100644 --- a/tests/expectations/parser/parser/expression/literal/char_fail.leo.out +++ b/tests/expectations/parser/parser/expression/literal/char_fail.leo.out @@ -2,47 +2,47 @@ namespace: Token expectation: Fail outputs: - - "Error [EPAR0370028]: Expected a closed char but found `'\\'`." - - "Error [EPAR0370032]: Could not lex the following content: `\\`." - - "Error [EPAR0370028]: Expected a closed char but found `'\\`." - - "Error [EPAR0370032]: Could not lex the following content: `\\n`." - - "Error [EPAR0370028]: Expected a closed char but found `'a`." - - "Error [EPAR0370024]: Expected more characters to lex but found none." - - "Error [EPAR0370035]: Could not lex the following escaped hex due to being given more than two chars: `7`." - - "Error [EPAR0370035]: Could not lex the following escaped hex due to being given more than two chars: `z`." - - "Error [EPAR0370036]: Expected a valid hex character but found `154`." - - "Error [EPAR0370028]: Expected a closed char but found `\\x7g`." - - "Error [EPAR0370036]: Expected a valid hex character but found `128`." - - "Error [EPAR0370036]: Expected a valid hex character but found `193`." - - "Error [EPAR0370036]: Expected a valid hex character but found `194`." - - "Error [EPAR0370036]: Expected a valid hex character but found `223`." - - "Error [EPAR0370036]: Expected a valid hex character but found `192`." - - "Error [EPAR0370036]: Expected a valid hex character but found `224`." - - "Error [EPAR0370036]: Expected a valid hex character but found `159`." - - "Error [EPAR0370028]: Expected a closed char but found `abcdefg`." - - "Error [EPAR0370026]: Expected a valid escape character but found `a`." - - "Error [EPAR0370026]: Expected a valid escape character but found `z`." - - "Error [EPAR0370026]: Expected a valid escape character but found `A`." - - "Error [EPAR0370026]: Expected a valid escape character but found `Z`." - - "Error [EPAR0370026]: Expected a valid escape character but found `1`." - - "Error [EPAR0370026]: Expected a valid escape character but found `9`." - - "Error [EPAR0370026]: Expected a valid escape character but found `*`." - - "Error [EPAR0370035]: Could not lex the following escaped hex due to being given more than two chars: ``." - - "Error [EPAR0370026]: Expected a valid escape character but found `39`." - - "Error [EPAR0370033]: Could not lex the following escaped char due to being given more than one char: `t\\t`." - - "Error [EPAR0370026]: Expected a valid escape character but found `122`." - - "Error [EPAR0370026]: Expected a valid escape character but found `49`." - - "Error [EPAR0370026]: Expected a valid escape character but found `125`." - - "Error [EPAR0370029]: Expected valid character but found `'🦀\\n`." - - "Error [EPAR0370026]: Expected a valid escape character but found `49`." - - "Error [EPAR0370029]: Expected valid character but found `'🦀1🦀`." - - "Error [EPAR0370026]: Expected a valid escape character but found `54`." - - "Error [EPAR0370037]: There was no closing `}` after a escaped unicode `\\u{af🦀`." - - "Error [EPAR0370037]: There was no closing `}` after a escaped unicode `\\u{2764z`." - - "Error [EPAR0370028]: Expected a closed char but found `\\u{276g}`." - - "Error [EPAR0370026]: Expected a valid escape character but found `57`." - - "Error [EPAR0370026]: Expected a valid escape character but found `48`." - - "Error [EPAR0370026]: Expected a valid escape character but found `48`." - - "Error [EPAR0370039]: The escaped unicode char `110000` is greater than 0x10FFFF." - - "Error [EPAR0370038]: The escaped unicode char `bbbbb}\\u{aaaa` is not within valid length of [1, 6]." - - "Error [EPAR0370029]: Expected valid character but found `'😭😂`." + - "Error [EPAR0370023]: Expected more characters to lex but found none." + - "Error [EPAR0370030]: Could not lex the following content: `\\`." + - "Error [EPAR0370023]: Expected more characters to lex but found none." + - "Error [EPAR0370030]: Could not lex the following content: `\\n`." + - "Error [EPAR0370023]: Expected more characters to lex but found none." + - "Error [EPAR0370023]: Expected more characters to lex but found none." + - "Error [EPAR0370031]: Expected a valid hex character but found `'`." + - "Error [EPAR0370031]: Expected a valid hex character but found `'`." + - "Error [EPAR0370031]: Expected a valid hex character but found `9A`." + - "Error [EPAR0370031]: Expected a valid hex character but found `7g`." + - "Error [EPAR0370031]: Expected a valid hex character but found `80`." + - "Error [EPAR0370031]: Expected a valid hex character but found `c1`." + - "Error [EPAR0370031]: Expected a valid hex character but found `c2`." + - "Error [EPAR0370031]: Expected a valid hex character but found `DF`." + - "Error [EPAR0370031]: Expected a valid hex character but found `C0`." + - "Error [EPAR0370031]: Expected a valid hex character but found `e0`." + - "Error [EPAR0370031]: Expected a valid hex character but found `9f`." + - "Error [EPAR0370026]: Expected a closed char but found `a`." + - "Error [EPAR0370024]: Expected a valid escape character but found `a`." + - "Error [EPAR0370024]: Expected a valid escape character but found `z`." + - "Error [EPAR0370024]: Expected a valid escape character but found `A`." + - "Error [EPAR0370024]: Expected a valid escape character but found `Z`." + - "Error [EPAR0370024]: Expected a valid escape character but found `1`." + - "Error [EPAR0370024]: Expected a valid escape character but found `9`." + - "Error [EPAR0370024]: Expected a valid escape character but found `*`." + - "Error [EPAR0370031]: Expected a valid hex character but found `'`." + - "Error [EPAR0370037]: There was no opening `{` after starting an escaped unicode `'`." + - "Error [EPAR0370026]: Expected a closed char but found `\t`." + - "Error [EPAR0370037]: There was no opening `{` after starting an escaped unicode `z`." + - "Error [EPAR0370037]: There was no opening `{` after starting an escaped unicode `1`." + - "Error [EPAR0370037]: There was no opening `{` after starting an escaped unicode `}`." + - "Error [EPAR0370026]: Expected a closed char but found `🦀`." + - "Error [EPAR0370037]: There was no opening `{` after starting an escaped unicode `1`." + - "Error [EPAR0370026]: Expected a closed char but found `🦀`." + - "Error [EPAR0370037]: There was no opening `{` after starting an escaped unicode `6`." + - "Error [EPAR0370032]: There was no closing `}` after a escaped unicode `af🦀'`." + - "Error [EPAR0370032]: There was no closing `}` after a escaped unicode `2764z'`." + - "Error [EPAR0370031]: Expected a valid hex character but found `276g`." + - "Error [EPAR0370037]: There was no opening `{` after starting an escaped unicode `9`." + - "Error [EPAR0370037]: There was no opening `{` after starting an escaped unicode `0`." + - "Error [EPAR0370037]: There was no opening `{` after starting an escaped unicode `0`." + - "Error [EPAR0370034]: The escaped unicode char `110000` is greater than 0x10FFFF." + - "Error [EPAR0370026]: Expected a closed char but found `򻮻`." + - "Error [EPAR0370026]: Expected a closed char but found `😭`." diff --git a/tests/expectations/parser/parser/expression/literal/char_parse.leo.out b/tests/expectations/parser/parser/expression/literal/char_parse.leo.out index fa87952664..3d84a2cece 100644 --- a/tests/expectations/parser/parser/expression/literal/char_parse.leo.out +++ b/tests/expectations/parser/parser/expression/literal/char_parse.leo.out @@ -2,54 +2,54 @@ namespace: Token expectation: Pass outputs: - - "'a' @ 1:1-4" - - "'Z' @ 1:1-4" - - "'\"' @ 1:1-5" - - "'' @ 1:1-5" - - "'' @ 1:1-5" - - "'\u0000' @ 1:1-5" - - "'\u000f' @ 1:1-8" - - "'' @ 1:1-6" - - "'å' @ 1:1-9" - - "'å' @ 1:1-5" - - "'Ӡ' @ 1:1-10" - - "'Ӡ' @ 1:1-5" - - "'❤' @ 1:1-11" - - "'❤' @ 1:1-6" - - "'😢' @ 1:1-12" - - "'😭' @ 1:1-7" - - "'􀀟' @ 1:1-13" - - "'*' @ 1:1-7" - - "'\u007f' @ 1:1-7" - - "'\u0000' @ 1:1-7" - - "'\u0001' @ 1:1-7" - - "'\u0002' @ 1:1-7" - - "'\u0003' @ 1:1-7" - - "'\u0004' @ 1:1-7" - - "'\u0005' @ 1:1-7" - - "'\u0006' @ 1:1-7" - - "'\u0007' @ 1:1-7" - - "'\u0010' @ 1:1-7" - - "'\u0011' @ 1:1-7" - - "'\u0012' @ 1:1-7" - - "'\u0013' @ 1:1-7" - - "'\u0014' @ 1:1-7" - - "'\u0015' @ 1:1-7" - - "'\u0016' @ 1:1-7" - - "'\u0017' @ 1:1-7" - - "'' @ 1:1-7" - - "'!' @ 1:1-7" - - "'\"' @ 1:1-7" - - "'#' @ 1:1-7" - - "'$' @ 1:1-7" - - "'%' @ 1:1-7" - - "'&' @ 1:1-7" - - "''' @ 1:1-7" - - "'0' @ 1:1-7" - - "'1' @ 1:1-7" - - "'2' @ 1:1-7" - - "'3' @ 1:1-7" - - "'4' @ 1:1-7" - - "'5' @ 1:1-7" - - "'6' @ 1:1-7" - - "'7' @ 1:1-7" + - "''a'' @ 1:1-4" + - "''Z'' @ 1:1-4" + - "''\"'' @ 1:1-5" + - "''\t'' @ 1:1-5" + - "''\r'' @ 1:1-5" + - "''\u0000'' @ 1:1-5" + - "''\u000f'' @ 1:1-8" + - "'''' @ 1:1-6" + - "''å'' @ 1:1-9" + - "''å'' @ 1:1-5" + - "''Ӡ'' @ 1:1-10" + - "''Ӡ'' @ 1:1-5" + - "''❤'' @ 1:1-11" + - "''❤'' @ 1:1-6" + - "''😢'' @ 1:1-12" + - "''😭'' @ 1:1-7" + - "''􀀟'' @ 1:1-13" + - "''*'' @ 1:1-7" + - "''\u007f'' @ 1:1-7" + - "''\u0000'' @ 1:1-7" + - "''\u0001'' @ 1:1-7" + - "''\u0002'' @ 1:1-7" + - "''\u0003'' @ 1:1-7" + - "''\u0004'' @ 1:1-7" + - "''\u0005'' @ 1:1-7" + - "''\u0006'' @ 1:1-7" + - "''\u0007'' @ 1:1-7" + - "''\u0010'' @ 1:1-7" + - "''\u0011'' @ 1:1-7" + - "''\u0012'' @ 1:1-7" + - "''\u0013'' @ 1:1-7" + - "''\u0014'' @ 1:1-7" + - "''\u0015'' @ 1:1-7" + - "''\u0016'' @ 1:1-7" + - "''\u0017'' @ 1:1-7" + - "'' '' @ 1:1-7" + - "''!'' @ 1:1-7" + - "''\"'' @ 1:1-7" + - "''#'' @ 1:1-7" + - "''$'' @ 1:1-7" + - "''%'' @ 1:1-7" + - "''&'' @ 1:1-7" + - "''''' @ 1:1-7" + - "''0'' @ 1:1-7" + - "''1'' @ 1:1-7" + - "''2'' @ 1:1-7" + - "''3'' @ 1:1-7" + - "''4'' @ 1:1-7" + - "''5'' @ 1:1-7" + - "''6'' @ 1:1-7" + - "''7'' @ 1:1-7" diff --git a/tests/expectations/parser/parser/expression/literal/comment_fail.leo.out b/tests/expectations/parser/parser/expression/literal/comment_fail.leo.out index 394146f98a..7507dd35ed 100644 --- a/tests/expectations/parser/parser/expression/literal/comment_fail.leo.out +++ b/tests/expectations/parser/parser/expression/literal/comment_fail.leo.out @@ -2,14 +2,14 @@ namespace: ParseExpression expectation: Fail outputs: - - "Error [EPAR0370031]: Block comment does not close with content: `/*`." - - "Error [EPAR0370031]: Block comment does not close with content: `/* test`." + - "Error [EPAR0370028]: Empty block comment." + - "Error [EPAR0370029]: Block comment does not close with content: `/* test`." - "Error [EPAR0370009]: unexpected string: expected 'expression', got '/'\n --> test:1:1\n |\n 1 | / /\n | ^" - - "Error [EPAR0370031]: Block comment does not close with content: `/*/`." + - "Error [EPAR0370029]: Block comment does not close with content: `/*/`." - "Error [EPAR0370009]: unexpected string: expected 'expression', got '*'\n --> test:1:1\n |\n 1 | */\n | ^" - - "Error [EPAR0370032]: Could not lex the following content: `🦀**/`." - - "Error [EPAR0370032]: Could not lex the following content: `🦀*/`." - - "Error [EPAR0370031]: Block comment does not close with content: `/*🦀/`." - - "Error [EPAR0370031]: Block comment does not close with content: `/**🦀`." - - "Error [EPAR0370031]: Block comment does not close with content: `/*🦀`." - - "Error [EPAR0370031]: Block comment does not close with content: `/*/*`." + - "Error [EPAR0370030]: Could not lex the following content: `🦀**/`." + - "Error [EPAR0370030]: Could not lex the following content: `🦀*/`." + - "Error [EPAR0370029]: Block comment does not close with content: `/*🦀/`." + - "Error [EPAR0370029]: Block comment does not close with content: `/**🦀`." + - "Error [EPAR0370029]: Block comment does not close with content: `/*🦀`." + - "Error [EPAR0370029]: Block comment does not close with content: `/*/*`." diff --git a/tests/expectations/parser/parser/expression/literal/int_fail.leo.out b/tests/expectations/parser/parser/expression/literal/int_fail.leo.out index 3f1a3281cd..3620d1ec62 100644 --- a/tests/expectations/parser/parser/expression/literal/int_fail.leo.out +++ b/tests/expectations/parser/parser/expression/literal/int_fail.leo.out @@ -2,6 +2,6 @@ namespace: Token expectation: Fail outputs: - - "Error [EPAR0370040]: A hex number `0x..` was provided but hex is not allowed." - - "Error [EPAR0370040]: A hex number `0x..` was provided but hex is not allowed." - - "Error [EPAR0370040]: A hex number `0x..` was provided but hex is not allowed." + - "Error [EPAR0370035]: A hex number `0x..` was provided but hex is not allowed." + - "Error [EPAR0370035]: A hex number `0x..` was provided but hex is not allowed." + - "Error [EPAR0370035]: A hex number `0x..` was provided but hex is not allowed." diff --git a/tests/expectations/parser/parser/expression/literal/string.leo.out b/tests/expectations/parser/parser/expression/literal/string.leo.out index 11e9e9e3d6..83980319ab 100644 --- a/tests/expectations/parser/parser/expression/literal/string.leo.out +++ b/tests/expectations/parser/parser/expression/literal/string.leo.out @@ -14,18 +14,21 @@ outputs: - "'\"\n\"' @ 1:1-7" - "'\"\u007f\"' @ 1:1-7" - "'\"aa \\ \" ' \n aa \t \r \u0000\"' @ 1:1-30" - - "'\"test \"' @ 1:1-15" - - "'\"\"' @ 1:1-15" - - "'\"\"' @ 1:1-10" - - "'\"\"' @ 1:1-7" - - "'\"\"' @ 1:1-6" - - "'\"\"' @ 1:1-12" - - "'\"(3\"' @ 1:1-17" - - "'\"ヽಠ\"' @ 1:1-26" - - "'\"(╯\"' @ 1:1-33" - - "'\"┬ノ ゜゜\"' @ 1:1-29" - - "'\"( ͜͡͡\"' @ 1:1-20" - - "'\"b\"' @ 1:1-4,'// TODO reenabe once #1682 is closed \"ᕙ(▀̿ĺ̯▀̿ ̿)ᕗ\"' @ 1:5-69" - - "'\"♥-_-]\"' @ 1:1-20" - - "'\"b\"' @ 1:1-4,'// TODO reenabe once #1682 is closed \"(⑅∫°ਊ°)∫\"' @ 1:5-62" - - "'\"b\"' @ 1:1-4,'// TODO reenabe once #1682 is closed \"🦀°1\"' @ 1:5-51" + - "'\"test 😒€\"' @ 1:1-15" + - "'\"😭😂😘\"' @ 1:1-15" + - "'\"✋🏿\"' @ 1:1-10" + - "'\"🦀\"' @ 1:1-7" + - "'\"￿\"' @ 1:1-6" + - "'\"���\"' @ 1:1-12" + - "'\"(>3<)三\"' @ 1:1-17" + - "'\"ヽ༼ ಠ益ಠ ༽ノ\"' @ 1:1-26" + - "'\"(╯°□°)╯︵ ┻━┻\"' @ 1:1-33" + - "'\"┬─┬ ノ( ゜-゜ノ)\"' @ 1:1-29" + - "'\"( ͡° ͜ʖ ͡°)\"' @ 1:1-20" + - "'\"b\"' @ 1:1-4" + - "'\"ᕙ(▀̿ĺ̯▀̿ ̿)ᕗ\"' @ 1:1-28" + - "'\"♥╣[-_-]╠♥\"' @ 1:1-20" + - "'\"b\"' @ 1:1-4" + - "'\"(⑅∫°ਊ°)∫\"' @ 1:1-21" + - "'\"b\"' @ 1:1-4" + - "'\"🦀°1\"' @ 1:1-10" diff --git a/tests/expectations/parser/parser/expression/literal/string_fail.leo.out b/tests/expectations/parser/parser/expression/literal/string_fail.leo.out index ac6dda0540..bac30c2c73 100644 --- a/tests/expectations/parser/parser/expression/literal/string_fail.leo.out +++ b/tests/expectations/parser/parser/expression/literal/string_fail.leo.out @@ -2,15 +2,16 @@ namespace: Token expectation: Fail outputs: - - "Error [EPAR0370027]: Expected a closed string but found `\"Hello world!`." - - "Error [EPAR0370027]: Expected a closed string but found `\"\\\"`." - - "Error [EPAR0370027]: Expected a closed string but found `\"\\`." - - "Error [EPAR0370026]: Expected a valid escape character but found `l`." - - "Error [EPAR0370027]: Expected a closed string but found `\"\\uaaa\"`." - - "Error [EPAR0370027]: Expected a closed string but found `\"\\u\"`." - - "Error [EPAR0370036]: Expected a valid hex character but found `255`." - - "Error [EPAR0370027]: Expected a closed string but found `\"\\x\"`." - - "Error [EPAR0370042]: There was no opening `{` after starting an escaped unicode `\\u}`." - - "Error [EPAR0370043]: There was an emoji found in the escaped unicode character: `\"\\u6🦀`." - - "Error [EPAR0370043]: There was an emoji found in the escaped unicode character: `\"\\u{af🦀`." - - "Error [EPAR0370032]: Could not lex the following content: `⭇😍;`." + - "Error [EPAR0370025]: Expected a closed string but found `[Scalar('H'), Scalar('e'), Scalar('l'), Scalar('l'), Scalar('o'), Scalar(' '), Scalar('w'), Scalar('o'), Scalar('r'), Scalar('l'), Scalar('d'), Scalar('!')]`." + - "Error [EPAR0370025]: Expected a closed string but found `[Scalar('\"')]`." + - "Error [EPAR0370023]: Expected more characters to lex but found none." + - "Error [EPAR0370024]: Expected a valid escape character but found `l`." + - "Error [EPAR0370037]: There was no opening `{` after starting an escaped unicode `a`." + - "Error [EPAR0370037]: There was no opening `{` after starting an escaped unicode `\"`." + - "Error [EPAR0370031]: Expected a valid hex character but found `FF`." + - "Error [EPAR0370023]: Expected more characters to lex but found none." + - "Error [EPAR0370037]: There was no opening `{` after starting an escaped unicode `}`." + - "Error [EPAR0370037]: There was no opening `{` after starting an escaped unicode `6`." + - "Error [EPAR0370032]: There was no closing `}` after a escaped unicode `af🦀\"`." + - "Error [EPAR0370025]: Expected a closed string but found `[Scalar('\"')]`." + - "Error [EPAR0370025]: Expected a closed string but found `[Scalar('⭇'), Scalar('😍'), Scalar(';')]`." diff --git a/tests/expectations/parser/parser/expression/literal/string_parse.leo.out b/tests/expectations/parser/parser/expression/literal/string_parse.leo.out index 2f4bf81ada..cd98aa67b8 100644 --- a/tests/expectations/parser/parser/expression/literal/string_parse.leo.out +++ b/tests/expectations/parser/parser/expression/literal/string_parse.leo.out @@ -177,6 +177,8 @@ outputs: - Scalar: 115 - Scalar: 116 - Scalar: 32 + - Scalar: 128530 + - Scalar: 8364 - span: line_start: 1 line_stop: 1 @@ -186,7 +188,9 @@ outputs: content: "\"test 😒€\"" - Value: String: - - [] + - - Scalar: 128557 + - Scalar: 128514 + - Scalar: 128536 - span: line_start: 1 line_stop: 1 diff --git a/tests/expectations/parser/parser/functions/annotated_arg_not_ident.leo.out b/tests/expectations/parser/parser/functions/annotated_arg_not_ident.leo.out index 35a7e35ca2..cffae59c8f 100644 --- a/tests/expectations/parser/parser/functions/annotated_arg_not_ident.leo.out +++ b/tests/expectations/parser/parser/functions/annotated_arg_not_ident.leo.out @@ -2,4 +2,4 @@ namespace: Parse expectation: Fail outputs: - - "Error [EPAR0370009]: unexpected string: expected 'ident', got '?'\n --> test:3:6\n |\n 3 | @foo(?, bar, ?)\n | ^\nError [EPAR0370009]: unexpected string: expected 'ident', got '?'\n --> test:3:14\n |\n 3 | @foo(?, bar, ?)\n | ^\nError [EPAR0370009]: unexpected string: expected 'ident', got '123'\n --> test:8:6\n |\n 8 | @bar(123) // ints not vali\n | ^^^\nError [EPAR0370017]: \"@context(...)\" is deprecated. Did you mean @test annotation?\n --> test:14:2\n |\n 14 | @context // recovery witness\n | ^^^^^^^" + - "Error [EPAR0370009]: unexpected string: expected 'ident', got '?'\n --> test:3:6\n |\n 3 | @foo(?, bar, ?)\n | ^\nError [EPAR0370009]: unexpected string: expected 'ident', got '?'\n --> test:3:14\n |\n 3 | @foo(?, bar, ?)\n | ^\nError [EPAR0370009]: unexpected string: expected 'ident', got '123'\n --> test:8:6\n |\n 8 | @bar(123) // ints not vali\n | ^^^\nError [EPAR0370016]: \"@context(...)\" is deprecated. Did you mean @test annotation?\n --> test:14:2\n |\n 14 | @context // recovery witness\n | ^^^^^^^" diff --git a/tests/expectations/parser/parser/functions/annotated_context_fail.leo.out b/tests/expectations/parser/parser/functions/annotated_context_fail.leo.out index 15e47e1265..6366839d98 100644 --- a/tests/expectations/parser/parser/functions/annotated_context_fail.leo.out +++ b/tests/expectations/parser/parser/functions/annotated_context_fail.leo.out @@ -2,4 +2,4 @@ namespace: Parse expectation: Fail outputs: - - "Error [EPAR0370017]: \"@context(...)\" is deprecated. Did you mean @test annotation?\n --> test:3:2\n |\n 3 | @context\n | ^^^^^^^\nError [EPAR0370017]: \"@context(...)\" is deprecated. Did you mean @test annotation?\n --> test:8:2\n |\n 8 | @context // recovery witness\n | ^^^^^^^" + - "Error [EPAR0370016]: \"@context(...)\" is deprecated. Did you mean @test annotation?\n --> test:3:2\n |\n 3 | @context\n | ^^^^^^^\nError [EPAR0370016]: \"@context(...)\" is deprecated. Did you mean @test annotation?\n --> test:8:2\n |\n 8 | @context // recovery witness\n | ^^^^^^^" diff --git a/tests/expectations/parser/parser/functions/escape_fail.leo.out b/tests/expectations/parser/parser/functions/escape_fail.leo.out index d679c4f11b..9fe177028c 100644 --- a/tests/expectations/parser/parser/functions/escape_fail.leo.out +++ b/tests/expectations/parser/parser/functions/escape_fail.leo.out @@ -2,4 +2,4 @@ namespace: Parse expectation: Fail outputs: - - "Error [EPAR0370032]: Could not lex the following content: `\\`." + - "Error [EPAR0370030]: Could not lex the following content: `\\`." diff --git a/tests/expectations/parser/parser/program/backslash_eof.leo.out b/tests/expectations/parser/parser/program/backslash_eof.leo.out index d679c4f11b..9fe177028c 100644 --- a/tests/expectations/parser/parser/program/backslash_eof.leo.out +++ b/tests/expectations/parser/parser/program/backslash_eof.leo.out @@ -2,4 +2,4 @@ namespace: Parse expectation: Fail outputs: - - "Error [EPAR0370032]: Could not lex the following content: `\\`." + - "Error [EPAR0370030]: Could not lex the following content: `\\`." diff --git a/tests/expectations/parser/parser/program/dollar_eof.leo.out b/tests/expectations/parser/parser/program/dollar_eof.leo.out index 0389966496..1d418a4b25 100644 --- a/tests/expectations/parser/parser/program/dollar_eof.leo.out +++ b/tests/expectations/parser/parser/program/dollar_eof.leo.out @@ -2,4 +2,4 @@ namespace: Parse expectation: Fail outputs: - - "Error [EPAR0370032]: Could not lex the following content: `$`." + - "Error [EPAR0370030]: Could not lex the following content: `$`." diff --git a/tests/expectations/parser/parser/program/escape_u8_eof.leo.out b/tests/expectations/parser/parser/program/escape_u8_eof.leo.out index 82f56889ac..2973be8cc8 100644 --- a/tests/expectations/parser/parser/program/escape_u8_eof.leo.out +++ b/tests/expectations/parser/parser/program/escape_u8_eof.leo.out @@ -2,4 +2,4 @@ namespace: Parse expectation: Fail outputs: - - "Error [EPAR0370032]: Could not lex the following content: `\\1u8`." + - "Error [EPAR0370030]: Could not lex the following content: `\\1u8`." diff --git a/tests/expectations/parser/parser/program/hex_eof.leo.out b/tests/expectations/parser/parser/program/hex_eof.leo.out index 37aea2c88e..a7fcef2469 100644 --- a/tests/expectations/parser/parser/program/hex_eof.leo.out +++ b/tests/expectations/parser/parser/program/hex_eof.leo.out @@ -2,4 +2,4 @@ namespace: Parse expectation: Fail outputs: - - "Error [EPAR0370040]: A hex number `0x..` was provided but hex is not allowed." + - "Error [EPAR0370035]: A hex number `0x..` was provided but hex is not allowed." diff --git a/tests/expectations/parser/parser/program/pipe_eof.leo.out b/tests/expectations/parser/parser/program/pipe_eof.leo.out index 2103e7b671..5c1018440c 100644 --- a/tests/expectations/parser/parser/program/pipe_eof.leo.out +++ b/tests/expectations/parser/parser/program/pipe_eof.leo.out @@ -2,4 +2,4 @@ namespace: Parse expectation: Fail outputs: - - "Error [EPAR0370032]: Could not lex the following content: `|`." + - "Error [EPAR0370023]: Expected more characters to lex but found none." diff --git a/tests/expectations/parser/parser/program/q_eof.leo.out b/tests/expectations/parser/parser/program/q_eof.leo.out index 68f7c3b559..d7f4823873 100644 --- a/tests/expectations/parser/parser/program/q_eof.leo.out +++ b/tests/expectations/parser/parser/program/q_eof.leo.out @@ -2,4 +2,4 @@ namespace: Parse expectation: Fail outputs: - - "Error [EPAR0370027]: Expected a closed string but found `\"`." + - "Error [EPAR0370025]: Expected a closed string but found `[]`." diff --git a/tests/expectations/parser/parser/program/sq_eof.leo.out b/tests/expectations/parser/parser/program/sq_eof.leo.out index 1bab5ffabb..5c1018440c 100644 --- a/tests/expectations/parser/parser/program/sq_eof.leo.out +++ b/tests/expectations/parser/parser/program/sq_eof.leo.out @@ -2,4 +2,4 @@ namespace: Parse expectation: Fail outputs: - - "Error [EPAR0370028]: Expected a closed char but found `'`." + - "Error [EPAR0370023]: Expected more characters to lex but found none." diff --git a/tests/expectations/parser/parser/program/tilde_eof.leo.out b/tests/expectations/parser/parser/program/tilde_eof.leo.out index c44c1a4d3f..02b455d02b 100644 --- a/tests/expectations/parser/parser/program/tilde_eof.leo.out +++ b/tests/expectations/parser/parser/program/tilde_eof.leo.out @@ -2,4 +2,4 @@ namespace: Parse expectation: Fail outputs: - - "Error [EPAR0370032]: Could not lex the following content: `~`." + - "Error [EPAR0370030]: Could not lex the following content: `~`." diff --git a/tests/expectations/parser/parser/program/unclosed_unicode_eof_fail.leo.out b/tests/expectations/parser/parser/program/unclosed_unicode_eof_fail.leo.out index ad932d117a..5c1018440c 100644 --- a/tests/expectations/parser/parser/program/unclosed_unicode_eof_fail.leo.out +++ b/tests/expectations/parser/parser/program/unclosed_unicode_eof_fail.leo.out @@ -2,4 +2,4 @@ namespace: Parse expectation: Fail outputs: - - "Error [EPAR0370026]: Expected a valid escape character but found `117`." + - "Error [EPAR0370023]: Expected more characters to lex but found none." diff --git a/tests/expectations/parser/parser/serialize/palindrome.leo.out b/tests/expectations/parser/parser/serialize/palindrome.leo.out index df0ea483e8..6ecc69d502 100644 --- a/tests/expectations/parser/parser/serialize/palindrome.leo.out +++ b/tests/expectations/parser/parser/serialize/palindrome.leo.out @@ -416,6 +416,10 @@ outputs: arguments: - Value: String: + - Scalar: 128512 + - Scalar: 128512 + - Scalar: 128512 + - Scalar: 128512 - Scalar: 128512 - Scalar: 32 - Scalar: 32 @@ -426,6 +430,12 @@ outputs: - Scalar: 32 - Scalar: 32 - Scalar: 32 + - Scalar: 32 + - Scalar: 128512 + - Scalar: 128512 + - Scalar: 128512 + - Scalar: 128512 + - Scalar: 128512 - Console: function: Assert: diff --git a/tests/expectations/parser/parser/statement/assign_fail.leo.out b/tests/expectations/parser/parser/statement/assign_fail.leo.out index 4bf56a3048..9f5508847a 100644 --- a/tests/expectations/parser/parser/statement/assign_fail.leo.out +++ b/tests/expectations/parser/parser/statement/assign_fail.leo.out @@ -15,4 +15,4 @@ outputs: - "Error [EPAR0370011]: invalid assignment target\n --> test:1:1\n |\n 1 | x {x: y, y: z} = y;\n | ^^^^^^^^^^^^^^" - "Error [EPAR0370011]: invalid assignment target\n --> test:1:1\n |\n 1 | x() = y;\n | ^^^" - "Error [EPAR0370011]: invalid assignment target\n --> test:1:1\n |\n 1 | x.y() = y;\n | ^^^^^" - - "Error [EPAR0370032]: Could not lex the following content: `🦀 = y;`." + - "Error [EPAR0370030]: Could not lex the following content: `🦀 = y;`." diff --git a/tests/expectations/parser/parser/statement/definition_fail.leo.out b/tests/expectations/parser/parser/statement/definition_fail.leo.out index 7dbde10949..869c0238aa 100644 --- a/tests/expectations/parser/parser/statement/definition_fail.leo.out +++ b/tests/expectations/parser/parser/statement/definition_fail.leo.out @@ -2,30 +2,30 @@ namespace: ParseStatement expectation: Fail outputs: - - "Error [EPAR0370015]: let mut = ... is deprecated. `let` keyword implies mutabality by default.\n --> test:1:1\n |\n 1 | let mut x = expr;\n | ^^^^^^^" - - "Error [EPAR0370015]: let mut = ... is deprecated. `let` keyword implies mutabality by default.\n --> test:1:1\n |\n 1 | let mut x = ();\n | ^^^^^^^" - - "Error [EPAR0370015]: let mut = ... is deprecated. `let` keyword implies mutabality by default.\n --> test:1:1\n |\n 1 | let mut x = x+y;\n | ^^^^^^^" - - "Error [EPAR0370015]: let mut = ... is deprecated. `let` keyword implies mutabality by default.\n --> test:1:1\n |\n 1 | let mut x = (x,y);\n | ^^^^^^^" - - "Error [EPAR0370015]: let mut = ... is deprecated. `let` keyword implies mutabality by default.\n --> test:1:1\n |\n 1 | let mut x = x();\n | ^^^^^^^" - - "Error [EPAR0370015]: let mut = ... is deprecated. `let` keyword implies mutabality by default.\n --> test:1:1\n |\n 1 | const mut x = expr;\n | ^^^^^^^^^" - - "Error [EPAR0370015]: let mut = ... is deprecated. `let` keyword implies mutabality by default.\n --> test:1:1\n |\n 1 | const mut x = ();\n | ^^^^^^^^^" - - "Error [EPAR0370015]: let mut = ... is deprecated. `let` keyword implies mutabality by default.\n --> test:1:1\n |\n 1 | const mut x = x+y;\n | ^^^^^^^^^" - - "Error [EPAR0370015]: let mut = ... is deprecated. `let` keyword implies mutabality by default.\n --> test:1:1\n |\n 1 | const mut x = (x,y);\n | ^^^^^^^^^" - - "Error [EPAR0370015]: let mut = ... is deprecated. `let` keyword implies mutabality by default.\n --> test:1:1\n |\n 1 | const mut x = x();\n | ^^^^^^^^^" - - "Error [EPAR0370015]: let mut = ... is deprecated. `let` keyword implies mutabality by default.\n --> test:1:1\n |\n 1 | let mut x: u32 = expr;\n | ^^^^^^^" - - "Error [EPAR0370015]: let mut = ... is deprecated. `let` keyword implies mutabality by default.\n --> test:1:1\n |\n 1 | let mut x: u32 = ();\n | ^^^^^^^" - - "Error [EPAR0370015]: let mut = ... is deprecated. `let` keyword implies mutabality by default.\n --> test:1:1\n |\n 1 | let mut x: u32 = x+y;\n | ^^^^^^^" - - "Error [EPAR0370015]: let mut = ... is deprecated. `let` keyword implies mutabality by default.\n --> test:1:1\n |\n 1 | let mut x: u32 = (x,y);\n | ^^^^^^^" - - "Error [EPAR0370015]: let mut = ... is deprecated. `let` keyword implies mutabality by default.\n --> test:1:1\n |\n 1 | let mut x: u32 = x();\n | ^^^^^^^" - - "Error [EPAR0370015]: let mut = ... is deprecated. `let` keyword implies mutabality by default.\n --> test:1:1\n |\n 1 | const mut x: u32 = expr;\n | ^^^^^^^^^" - - "Error [EPAR0370015]: let mut = ... is deprecated. `let` keyword implies mutabality by default.\n --> test:1:1\n |\n 1 | const mut x: u32 = ();\n | ^^^^^^^^^" - - "Error [EPAR0370015]: let mut = ... is deprecated. `let` keyword implies mutabality by default.\n --> test:1:1\n |\n 1 | const mut x: u32 = x+y;\n | ^^^^^^^^^" - - "Error [EPAR0370015]: let mut = ... is deprecated. `let` keyword implies mutabality by default.\n --> test:1:1\n |\n 1 | const mut x: u32 = (x,y);\n | ^^^^^^^^^" - - "Error [EPAR0370015]: let mut = ... is deprecated. `let` keyword implies mutabality by default.\n --> test:1:1\n |\n 1 | const mut x: u32 = x();\n | ^^^^^^^^^" + - "Error [EPAR0370014]: let mut = ... is deprecated. `let` keyword implies mutabality by default.\n --> test:1:1\n |\n 1 | let mut x = expr;\n | ^^^^^^^" + - "Error [EPAR0370014]: let mut = ... is deprecated. `let` keyword implies mutabality by default.\n --> test:1:1\n |\n 1 | let mut x = ();\n | ^^^^^^^" + - "Error [EPAR0370014]: let mut = ... is deprecated. `let` keyword implies mutabality by default.\n --> test:1:1\n |\n 1 | let mut x = x+y;\n | ^^^^^^^" + - "Error [EPAR0370014]: let mut = ... is deprecated. `let` keyword implies mutabality by default.\n --> test:1:1\n |\n 1 | let mut x = (x,y);\n | ^^^^^^^" + - "Error [EPAR0370014]: let mut = ... is deprecated. `let` keyword implies mutabality by default.\n --> test:1:1\n |\n 1 | let mut x = x();\n | ^^^^^^^" + - "Error [EPAR0370014]: let mut = ... is deprecated. `let` keyword implies mutabality by default.\n --> test:1:1\n |\n 1 | const mut x = expr;\n | ^^^^^^^^^" + - "Error [EPAR0370014]: let mut = ... is deprecated. `let` keyword implies mutabality by default.\n --> test:1:1\n |\n 1 | const mut x = ();\n | ^^^^^^^^^" + - "Error [EPAR0370014]: let mut = ... is deprecated. `let` keyword implies mutabality by default.\n --> test:1:1\n |\n 1 | const mut x = x+y;\n | ^^^^^^^^^" + - "Error [EPAR0370014]: let mut = ... is deprecated. `let` keyword implies mutabality by default.\n --> test:1:1\n |\n 1 | const mut x = (x,y);\n | ^^^^^^^^^" + - "Error [EPAR0370014]: let mut = ... is deprecated. `let` keyword implies mutabality by default.\n --> test:1:1\n |\n 1 | const mut x = x();\n | ^^^^^^^^^" + - "Error [EPAR0370014]: let mut = ... is deprecated. `let` keyword implies mutabality by default.\n --> test:1:1\n |\n 1 | let mut x: u32 = expr;\n | ^^^^^^^" + - "Error [EPAR0370014]: let mut = ... is deprecated. `let` keyword implies mutabality by default.\n --> test:1:1\n |\n 1 | let mut x: u32 = ();\n | ^^^^^^^" + - "Error [EPAR0370014]: let mut = ... is deprecated. `let` keyword implies mutabality by default.\n --> test:1:1\n |\n 1 | let mut x: u32 = x+y;\n | ^^^^^^^" + - "Error [EPAR0370014]: let mut = ... is deprecated. `let` keyword implies mutabality by default.\n --> test:1:1\n |\n 1 | let mut x: u32 = (x,y);\n | ^^^^^^^" + - "Error [EPAR0370014]: let mut = ... is deprecated. `let` keyword implies mutabality by default.\n --> test:1:1\n |\n 1 | let mut x: u32 = x();\n | ^^^^^^^" + - "Error [EPAR0370014]: let mut = ... is deprecated. `let` keyword implies mutabality by default.\n --> test:1:1\n |\n 1 | const mut x: u32 = expr;\n | ^^^^^^^^^" + - "Error [EPAR0370014]: let mut = ... is deprecated. `let` keyword implies mutabality by default.\n --> test:1:1\n |\n 1 | const mut x: u32 = ();\n | ^^^^^^^^^" + - "Error [EPAR0370014]: let mut = ... is deprecated. `let` keyword implies mutabality by default.\n --> test:1:1\n |\n 1 | const mut x: u32 = x+y;\n | ^^^^^^^^^" + - "Error [EPAR0370014]: let mut = ... is deprecated. `let` keyword implies mutabality by default.\n --> test:1:1\n |\n 1 | const mut x: u32 = (x,y);\n | ^^^^^^^^^" + - "Error [EPAR0370014]: let mut = ... is deprecated. `let` keyword implies mutabality by default.\n --> test:1:1\n |\n 1 | const mut x: u32 = x();\n | ^^^^^^^^^" - "Error [EPAR0370009]: unexpected string: expected 'ident', got ','\n --> test:1:10\n |\n 1 | let (x,y,,) = ();\n | ^" - "Error [EPAR0370009]: unexpected string: expected 'ident', got ','\n --> test:1:6\n |\n 1 | let (,x,y) = ();\n | ^" - "Error [EPAR0370009]: unexpected string: expected 'ident', got ','\n --> test:1:8\n |\n 1 | let (x,,y) = ();\n | ^" - - "Error [EPAR0370009]: unexpected string: expected 'int', got ','\n --> test:1:16\n |\n 1 | let x: [u8; (2,,)] = [[0,0], [0,0]];\n | ^\nError [EPAR0370045]: do not put parens around single dimension array size\n --> test:1:13\n |\n 1 | let x: [u8; (2,,)] = [[0,0], [0,0]];\n | ^^^^^" + - "Error [EPAR0370009]: unexpected string: expected 'int', got ','\n --> test:1:16\n |\n 1 | let x: [u8; (2,,)] = [[0,0], [0,0]];\n | ^\nError [EPAR0370039]: do not put parens around single dimension array size\n --> test:1:13\n |\n 1 | let x: [u8; (2,,)] = [[0,0], [0,0]];\n | ^^^^^" - "Error [EPAR0370005]: expected 'i8', 'i16', 'i32', 'i64', 'i128', 'u8', 'u16', 'u32', 'u64', 'u128', 'field', 'group', 'address', 'bool', 'char' -- got 'const'\n --> test:1:8\n |\n 1 | let x: const = expr;\n | ^^^^^" - "Error [EPAR0370005]: expected 'i8', 'i16', 'i32', 'i64', 'i128', 'u8', 'u16', 'u32', 'u64', 'u128', 'field', 'group', 'address', 'bool', 'char' -- got 'let'\n --> test:1:10\n |\n 1 | const x: let = expr;\n | ^^^" - "Error [EPAR0370005]: expected 'i8', 'i16', 'i32', 'i64', 'i128', 'u8', 'u16', 'u32', 'u64', 'u128', 'field', 'group', 'address', 'bool', 'char' -- got 'mut'\n --> test:1:8\n |\n 1 | let x: mut = expr;\n | ^^^" @@ -43,6 +43,6 @@ outputs: - "Error [EPAR0370003]: unexpected EOF\n --> test:1:11\n |\n 1 | let x: [u8;\n | ^" - "Error [EPAR0370005]: expected ] -- got 'u8'\n --> test:1:14\n |\n 1 | let x: [u8; 1u8] = [1,\n | ^^" - "Error [EPAR0370009]: unexpected string: expected 'expression', got ']'\n --> test:1:15\n |\n 1 | let dbg: u8 = ];\n | ^" - - "Error [EPAR0370032]: Could not lex the following content: `🦀: u8 = 0;`." - - "Error [EPAR0370044]: do not put parens around single variable names\n --> test:1:6\n |\n 1 | let (x) = ...;\n | ^\nError [EPAR0370009]: unexpected string: expected 'expression', got '...'\n --> test:1:11\n |\n 1 | let (x) = ...;\n | ^^^" - - "Error [EPAR0370044]: do not put parens around single variable names\n --> test:1:6\n |\n 1 | let (x,) = ...;\n | ^\nError [EPAR0370009]: unexpected string: expected 'expression', got '...'\n --> test:1:12\n |\n 1 | let (x,) = ...;\n | ^^^" + - "Error [EPAR0370030]: Could not lex the following content: `🦀: u8 = 0;`." + - "Error [EPAR0370038]: do not put parens around single variable names\n --> test:1:6\n |\n 1 | let (x) = ...;\n | ^\nError [EPAR0370009]: unexpected string: expected 'expression', got '...'\n --> test:1:11\n |\n 1 | let (x) = ...;\n | ^^^" + - "Error [EPAR0370038]: do not put parens around single variable names\n --> test:1:6\n |\n 1 | let (x,) = ...;\n | ^\nError [EPAR0370009]: unexpected string: expected 'expression', got '...'\n --> test:1:12\n |\n 1 | let (x,) = ...;\n | ^^^" diff --git a/tests/expectations/parser/parser/statement/expression_fail.leo.out b/tests/expectations/parser/parser/statement/expression_fail.leo.out index 2f62e6bd17..51218ebaa7 100644 --- a/tests/expectations/parser/parser/statement/expression_fail.leo.out +++ b/tests/expectations/parser/parser/statement/expression_fail.leo.out @@ -4,7 +4,7 @@ expectation: Fail outputs: - "Error [EPAR0370009]: unexpected string: expected 'expression', got ']'\n --> test:1:2\n |\n 1 | (];\n | ^" - "Error [EPAR0370009]: unexpected string: expected 'expression', got ')'\n --> test:1:2\n |\n 1 | [);\n | ^" - - "Error [EPAR0370032]: Could not lex the following content: `\\y;`." - - "Error [EPAR0370032]: Could not lex the following content: `|;`." + - "Error [EPAR0370030]: Could not lex the following content: `\\y;`." + - "Error [EPAR0370040]: Found the char `;`, but expected `|`" - "Error [EPAR0370009]: unexpected string: expected 'expression', got '}'\n --> test:1:3\n |\n 1 | x[};\n | ^" - "Error [EPAR0370005]: expected ) -- got ']'\n --> test:1:6\n |\n 1 | (x, y];\n | ^" diff --git a/tests/expectations/parser/parser/statement/hex_int_fail.leo.out b/tests/expectations/parser/parser/statement/hex_int_fail.leo.out index d16c3c203a..e585d2222d 100644 --- a/tests/expectations/parser/parser/statement/hex_int_fail.leo.out +++ b/tests/expectations/parser/parser/statement/hex_int_fail.leo.out @@ -2,6 +2,6 @@ namespace: ParseStatement expectation: Fail outputs: - - "Error [EPAR0370040]: A hex number `0x..` was provided but hex is not allowed." - - "Error [EPAR0370040]: A hex number `0x..` was provided but hex is not allowed." - - "Error [EPAR0370040]: A hex number `0x..` was provided but hex is not allowed." + - "Error [EPAR0370035]: A hex number `0x..` was provided but hex is not allowed." + - "Error [EPAR0370035]: A hex number `0x..` was provided but hex is not allowed." + - "Error [EPAR0370035]: A hex number `0x..` was provided but hex is not allowed." diff --git a/tests/expectations/parser/parser/statement/let_mut_recover.leo.out b/tests/expectations/parser/parser/statement/let_mut_recover.leo.out index 6e6b2f1418..a2bf13ea6b 100644 --- a/tests/expectations/parser/parser/statement/let_mut_recover.leo.out +++ b/tests/expectations/parser/parser/statement/let_mut_recover.leo.out @@ -2,4 +2,4 @@ namespace: Parse expectation: Fail outputs: - - "Error [EPAR0370015]: let mut = ... is deprecated. `let` keyword implies mutabality by default.\n --> test:4:5\n |\n 4 | let mut x = 0;\n | ^^^^^^^\nError [EPAR0370015]: let mut = ... is deprecated. `let` keyword implies mutabality by default.\n --> test:5:5\n |\n 5 | let mut y = 0; // recovery witness\n | ^^^^^^^" + - "Error [EPAR0370014]: let mut = ... is deprecated. `let` keyword implies mutabality by default.\n --> test:4:5\n |\n 4 | let mut x = 0;\n | ^^^^^^^\nError [EPAR0370014]: let mut = ... is deprecated. `let` keyword implies mutabality by default.\n --> test:5:5\n |\n 5 | let mut y = 0; // recovery witness\n | ^^^^^^^" diff --git a/tests/parser/expression/literal/string.leo b/tests/parser/expression/literal/string.leo index fc9711e444..c09e6c4585 100644 --- a/tests/parser/expression/literal/string.leo +++ b/tests/parser/expression/literal/string.leo @@ -36,7 +36,10 @@ expectation: Pass "(╯°□°)╯︵ ┻━┻" "┬─┬ ノ( ゜-゜ノ)" "( ͡° ͜ʖ ͡°)" -"b" // TODO reenabe once #1682 is closed "ᕙ(▀̿ĺ̯▀̿ ̿)ᕗ" +"b" +"ᕙ(▀̿ĺ̯▀̿ ̿)ᕗ" "♥╣[-_-]╠♥" -"b" // TODO reenabe once #1682 is closed "(⑅∫°ਊ°)∫" -"b" // TODO reenabe once #1682 is closed "🦀°1" \ No newline at end of file +"b" +"(⑅∫°ਊ°)∫" +"b" +"🦀°1" \ No newline at end of file diff --git a/tests/parser/expression/literal/string_fail.leo b/tests/parser/expression/literal/string_fail.leo index 04dabbc499..dad1c2448c 100644 --- a/tests/parser/expression/literal/string_fail.leo +++ b/tests/parser/expression/literal/string_fail.leo @@ -25,4 +25,6 @@ expectation: Fail "\u{af🦀" -"\" // TODO reenable once #1683 is closed "⭇😍; \ No newline at end of file +"\" + +"⭇😍; \ No newline at end of file diff --git a/tests/test-framework/src/bin/errcov.rs b/tests/test-framework/src/bin/errcov.rs index 2d18415e71..a17a586874 100644 --- a/tests/test-framework/src/bin/errcov.rs +++ b/tests/test-framework/src/bin/errcov.rs @@ -49,7 +49,7 @@ fn run_with_args(opt: Opt) -> Result<(), Box> { // Variable that stores all the tests. let mut tests = Vec::new(); let mut test_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")); - test_dir.push("../tests/"); + test_dir.push("../"); let mut expectation_dir = test_dir.clone(); expectation_dir.push("expectations");