Merge branch 'master' into feature/leo-path-cli

This commit is contained in:
damirka 2021-04-08 17:47:20 +03:00
commit 166be0874c
90 changed files with 4797 additions and 1104 deletions

View File

@ -1,2 +1,2 @@
[target.'cfg(not(target_arch = "wasm32"))']
[target.'cfg(any(not(target_arch = "wasm32"), feature = "noconfig"))']
rustflags = ["-C", "target-cpu=native"]

View File

@ -44,7 +44,7 @@ jobs:
rust-stable:
docker:
- image: cimg/rust:1.50.0
- image: cimg/rust:1.51.0
resource_class: xlarge
steps:
- checkout
@ -62,7 +62,7 @@ jobs:
rust-nightly:
docker:
- image: howardwu/snarkos-ci:2021-01-31
- image: howardwu/snarkos-ci:2021-03-25
resource_class: xlarge
steps:
- checkout
@ -77,7 +77,7 @@ jobs:
leo-executable:
docker:
- image: cimg/rust:1.50.0
- image: cimg/rust:1.51.0
resource_class: xlarge
steps:
- checkout
@ -86,7 +86,7 @@ jobs:
- run:
name: Build and install Leo
no_output_timeout: 30m
command: cargo install --path . --root .
command: cargo install --path . --root . --locked
- persist_to_workspace:
root: ~/
paths: project/
@ -95,7 +95,7 @@ jobs:
leo-new:
docker:
- image: cimg/rust:1.50.0
- image: cimg/rust:1.51.0
resource_class: xlarge
steps:
- attach_workspace:
@ -108,7 +108,7 @@ jobs:
leo-init:
docker:
- image: cimg/rust:1.50.0
- image: cimg/rust:1.51.0
resource_class: xlarge
steps:
- attach_workspace:
@ -121,7 +121,7 @@ jobs:
leo-clean:
docker:
- image: cimg/rust:1.50.0
- image: cimg/rust:1.51.0
resource_class: xlarge
steps:
- attach_workspace:
@ -134,7 +134,7 @@ jobs:
leo-setup:
docker:
- image: cimg/rust:1.50.0
- image: cimg/rust:1.51.0
resource_class: xlarge
steps:
- attach_workspace:
@ -147,7 +147,7 @@ jobs:
leo-add-remove:
docker:
- image: cimg/rust:1.50.0
- image: cimg/rust:1.51.0
resource_class: xlarge
steps:
- attach_workspace:
@ -160,7 +160,7 @@ jobs:
leo-login-logout:
docker:
- image: cimg/rust:1.50.0
- image: cimg/rust:1.51.0
resource_class: xlarge
steps:
- attach_workspace:
@ -173,7 +173,7 @@ jobs:
leo-clone:
docker:
- image: cimg/rust:1.50.0
- image: cimg/rust:1.51.0
resource_class: xlarge
steps:
- attach_workspace:
@ -186,7 +186,7 @@ jobs:
leo-publish:
docker:
- image: cimg/rust:1.50.0
- image: cimg/rust:1.51.0
resource_class: xlarge
steps:
- attach_workspace:

View File

@ -25,7 +25,7 @@ jobs:
- name: Build Leo
run: |
cargo build --all --release && strip target/release/leo
cargo build --all --release --features noconfig && strip target/release/leo
env:
CARGO_NET_GIT_FETCH_WITH_CLI: true
@ -67,7 +67,7 @@ jobs:
- name: Build Leo
run: |
cargo build --all --release && strip target/release/leo
cargo build --all --release --features noconfig && strip target/release/leo
env:
CARGO_NET_GIT_FETCH_WITH_CLI: true
@ -95,7 +95,6 @@ jobs:
windows:
name: Windows
runs-on: windows-latest
continue-on-error: true
steps:
- name: Checkout
uses: actions/checkout@v1
@ -111,12 +110,15 @@ jobs:
- name: Install LLVM and Clang
uses: KyleMayes/install-llvm-action@v1
with:
version: "10.0"
directory: ~ / .clang
version: "11"
directory: ${{ runner.temp }}/llvm
- name: Set LIBCLANG_PATH
run: echo "LIBCLANG_PATH=$((gcm clang).source -replace "clang.exe")" >> $env:GITHUB_ENV
- name: Build Leo
run: |
cargo build --all --release
cargo build --all --release --features noconfig
env:
CARGO_NET_GIT_FETCH_WITH_CLI: true
@ -125,18 +127,13 @@ jobs:
- name: Zip
run: |
mkdir tempdir
mv target/release/leo tempdir
cd tempdir
Compress-Archive leo-${{ steps.get_version.outputs.version }}-x86_64-pc-windows-gnu leo
cd ..
mv leo-${{ steps.get_version.outputs.version }}-x86_64-pc-windows-gnu .
Compress-Archive target/release/leo.exe leo-${{ steps.get_version.outputs.version }}-x86_64-pc-windows-msvc.zip
- name: Release
uses: softprops/action-gh-release@v1
if: startsWith(github.ref, 'refs/tags/')
with:
files: |
leo-${{ steps.get_version.outputs.version }}-x86_64-pc-windows-gnu.zip
leo-${{ steps.get_version.outputs.version }}-x86_64-pc-windows-msvc.zip
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

150
Cargo.lock generated
View File

@ -2,6 +2,25 @@
# It is not intended for manual editing.
version = 3
[[package]]
name = "abnf"
version = "0.10.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fd8863e7db43447ad50376e19b0549343b72ad45cbd394b3fc8fe3ede961facc"
dependencies = [
"abnf-core",
"nom 6.1.2",
]
[[package]]
name = "abnf-core"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b514944cb7199c4201f54406bc58676a3e4f37d40bf8e3dbe30652ca82e3ddb4"
dependencies = [
"nom 6.1.2",
]
[[package]]
name = "addr2line"
version = "0.14.1"
@ -52,9 +71,9 @@ dependencies = [
[[package]]
name = "anyhow"
version = "1.0.39"
version = "1.0.40"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "81cddc5f91628367664cc7c69714ff08deee8a3efc54623011c772544d7b2767"
checksum = "28b2cd92db5cbd74e8e5028f7e27dd7aa3090e89e4f2a197cc7c8dfb69c7063b"
[[package]]
name = "arrayref"
@ -157,6 +176,18 @@ version = "1.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf1de2fe8c75bc145a2f577add951f8134889b4795d47466a54a5c846d691693"
[[package]]
name = "bitvec"
version = "0.19.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8942c8d352ae1838c9dda0b0ca2ab657696ef2232a20147cf1b30ae1a9cb4321"
dependencies = [
"funty",
"radium",
"tap",
"wyz",
]
[[package]]
name = "blake2"
version = "0.9.1"
@ -290,7 +321,7 @@ version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f4aedb84272dbe89af497cf81375129abda4fc0a9e7c5d317498c15cc30c0d27"
dependencies = [
"nom",
"nom 5.1.2",
]
[[package]]
@ -787,6 +818,12 @@ version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7"
[[package]]
name = "funty"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fed34cd105917e91daa4da6b3728c47b068749d6a62c59811f06ed2ac71d9da7"
[[package]]
name = "futf"
version = "0.1.4"
@ -1202,6 +1239,14 @@ version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55"
[[package]]
name = "leo-abnf"
version = "1.2.3"
dependencies = [
"abnf",
"anyhow",
]
[[package]]
name = "leo-asg"
version = "1.2.3"
@ -1269,6 +1314,7 @@ dependencies = [
"snarkvm-gadgets",
"snarkvm-r1cs",
"snarkvm-utilities",
"tempfile",
"thiserror",
"tracing",
]
@ -1399,6 +1445,19 @@ dependencies = [
"snarkvm-r1cs",
]
[[package]]
name = "lexical-core"
version = "0.7.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "21f866863575d0e1d654fbeeabdc927292fdf862873dc3c96c6f753357e13374"
dependencies = [
"arrayvec",
"bitflags",
"cfg-if 1.0.0",
"ryu",
"static_assertions",
]
[[package]]
name = "libc"
version = "0.2.89"
@ -1651,6 +1710,19 @@ dependencies = [
"version_check",
]
[[package]]
name = "nom"
version = "6.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e7413f999671bd4745a7b624bd370a569fb6bc574b23c83a3c5ed2e453f3d5e2"
dependencies = [
"bitvec",
"funty",
"lexical-core",
"memchr",
"version_check",
]
[[package]]
name = "notify"
version = "4.0.15"
@ -2036,6 +2108,12 @@ dependencies = [
"proc-macro2 1.0.24",
]
[[package]]
name = "radium"
version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "941ba9d78d8e2f7ce474c015eea4d9c6d25b6a3327f9832ee29a4de27f91bbb8"
[[package]]
name = "rand"
version = "0.8.3"
@ -2493,9 +2571,9 @@ checksum = "fe0f37c9e8f3c5a4a66ad655a93c74daac4ad00c441533bf5c6e7990bb42604e"
[[package]]
name = "snarkvm-algorithms"
version = "0.2.1"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "472ed062cdd1f54076312dd34e5fb56bd585c80c12209045f4b5bbbd368e9000"
checksum = "bdf8ca73d429824090b96f751846e37e539f24c527f1f1ce0254984ade6d17b2"
dependencies = [
"blake2",
"derivative",
@ -2516,9 +2594,9 @@ dependencies = [
[[package]]
name = "snarkvm-curves"
version = "0.2.1"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cdfdfa3aa137f64a7f49df03393e5d0269f133ca8c8c79e569cb3bb13181aeb2"
checksum = "64610b135b8b1152439d5dfa4f745515933366082f08651961344aa0bb5abfca"
dependencies = [
"derivative",
"rand",
@ -2532,9 +2610,9 @@ dependencies = [
[[package]]
name = "snarkvm-derives"
version = "0.2.1"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6a2ba967601ff2534adbc6a71a691be4285e61c83d23d54a59824f8fa80f6038"
checksum = "46c9829b6e2023b4c7c4d6c55e88fe755dd997171a6c9c063b75c28161d04326"
dependencies = [
"proc-macro-crate",
"proc-macro-error",
@ -2545,9 +2623,9 @@ dependencies = [
[[package]]
name = "snarkvm-dpc"
version = "0.2.1"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ff4cb55898089843ba44b9f96448dcb2badcc1ce12daa8d7365d4e41513e37bc"
checksum = "491ae936e24e17c358d112ff8638b260500b5a982ecefc804861e28b5279f552"
dependencies = [
"anyhow",
"base58",
@ -2571,9 +2649,9 @@ dependencies = [
[[package]]
name = "snarkvm-fields"
version = "0.2.1"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ca9ea954196e76fe8968fb99eced7ccf08f901ab22747c4c489bda6674a7cb39"
checksum = "8c49c69d02df11be58e07f626c9d6f5804c6dd4ccf42e425f2be8d79fe6e5bb7"
dependencies = [
"bincode",
"derivative",
@ -2586,9 +2664,9 @@ dependencies = [
[[package]]
name = "snarkvm-gadgets"
version = "0.2.1"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fdda42a0a6484d9f008801a8a4d494a69a4db3f7b317057a8cc3c6e4b3ef6884"
checksum = "bd6f9ac2a166d926e1755a06fdae21ce40ce6164c75c89120401b8d78f3b7ba4"
dependencies = [
"derivative",
"digest 0.9.0",
@ -2603,9 +2681,9 @@ dependencies = [
[[package]]
name = "snarkvm-objects"
version = "0.2.1"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e20d13db49cedc147df06c4a6f2dd727ea25640bdf50b876f40005331767a68f"
checksum = "9bd9779ec6ab9211f34a6ba25566feb575a396f4c41cc0e002ec2d48d7560a2a"
dependencies = [
"anyhow",
"bincode",
@ -2624,9 +2702,9 @@ dependencies = [
[[package]]
name = "snarkvm-parameters"
version = "0.2.1"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d35fa1819d803e45b4e99fe822e6981f177716f5384eef27245b5f6ed59a8305"
checksum = "98378f612206fc7dd44a26f4e345bd1f3ba51bd325acad1e5cc3785d14750ec5"
dependencies = [
"curl",
"hex",
@ -2637,15 +2715,15 @@ dependencies = [
[[package]]
name = "snarkvm-profiler"
version = "0.2.1"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7834d57af37a31f2f280f08b61d07a04a9a4b7720819b06ca325da32a5a925f5"
checksum = "b2460ac01c25f79f5ea306e4de82a1d4105e811f868206b4fd31c0c9b62a3d7b"
[[package]]
name = "snarkvm-r1cs"
version = "0.2.1"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0838118f276e7bb673cbf3741f4966c56861aaff399a46d343fc98c12851d9eb"
checksum = "a3a0d54b15802976aff7522765dd29d5733f338612449629cc57c5a4a4d51f05"
dependencies = [
"cfg-if 1.0.0",
"fxhash",
@ -2658,9 +2736,9 @@ dependencies = [
[[package]]
name = "snarkvm-storage"
version = "0.2.1"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a42d92a817502878f315cc264704fa2a3d563755f16186316d8177ea685769af"
checksum = "1d76881939f008d7bba4c8cc4118d29567b5c71908ad66bef9880f8aa7c52881"
dependencies = [
"anyhow",
"bincode",
@ -2679,9 +2757,9 @@ dependencies = [
[[package]]
name = "snarkvm-utilities"
version = "0.2.1"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5598f7f71c8aaf4fc267b5b420b2440a4d86c9243cecd57ff0af5c366217e5cc"
checksum = "c763843fa67a3aa4ce68173c8cd96b4f04aaa135a5792bc051c36eec0fe1cd73"
dependencies = [
"bincode",
"rand",
@ -2700,6 +2778,12 @@ dependencies = [
"winapi 0.3.9",
]
[[package]]
name = "static_assertions"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f"
[[package]]
name = "strsim"
version = "0.8.0"
@ -2770,6 +2854,12 @@ dependencies = [
"unicode-xid 0.2.1",
]
[[package]]
name = "tap"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369"
[[package]]
name = "tempfile"
version = "3.2.0"
@ -3325,6 +3415,12 @@ dependencies = [
"winapi-build",
]
[[package]]
name = "wyz"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "85e60b0d1b5f99db2556934e21937020776a5d31520bf169e851ac44e6420214"
[[package]]
name = "zip"
version = "0.5.10"

View File

@ -29,6 +29,7 @@ members = [
"asg",
"ast",
"compiler",
"grammar",
"imports",
"input",
"linter",
@ -68,23 +69,23 @@ path = "./synthesizer"
version = "1.2.3"
[dependencies.snarkvm-algorithms]
version = "0.2.1"
version = "0.2.2"
#default-features = false
[dependencies.snarkvm-curves]
version = "0.2.1"
version = "0.2.2"
default-features = false
[dependencies.snarkvm-gadgets]
version = "0.2.1"
version = "0.2.2"
default-features = false
[dependencies.snarkvm-r1cs]
version = "0.2.1"
version = "0.2.2"
default-features = false
[dependencies.snarkvm-utilities]
version = "0.2.1"
version = "0.2.2"
[dependencies.anyhow]
version = "1.0"
@ -154,8 +155,10 @@ version = "0.5"
version = "0.11.2"
[features]
default = [ ]
default = []
ci_skip = [ "leo-compiler/ci_skip" ]
# This feature flag is used to disable `target-cpu=native` in `.cargo/config`.
noconfig = []
[profile.release]
opt-level = 3

View File

@ -172,6 +172,13 @@ impl AsgConvertError {
)
}
pub fn duplicate_function_definition(name: &str, span: &Span) -> Self {
Self::new_from_span(
format!("a function named \"{}\" already exists in this scope", name),
span,
)
}
pub fn index_into_non_tuple(name: &str, span: &Span) -> Self {
Self::new_from_span(format!("failed to index into non-tuple '{}'", name), span)
}

View File

@ -67,7 +67,7 @@ impl<'a> ExpressionNode<'a> for CircuitInitExpression<'a> {
}
fn is_mut_ref(&self) -> bool {
false
true
}
fn const_value(&self) -> Option<ConstValue> {

View File

@ -266,7 +266,13 @@ impl<'a> Program<'a> {
asg_function.fill_from_ast(function)?;
functions.insert(name.name.to_string(), asg_function);
let name = name.name.to_string();
if functions.contains_key(&name) {
return Err(AsgConvertError::duplicate_function_definition(&name, &function.span));
}
functions.insert(name, asg_function);
}
let mut circuits = IndexMap::new();

View File

@ -26,18 +26,6 @@ use std::fmt;
pub struct ArrayDimensions(pub Vec<PositiveNumber>);
impl ArrayDimensions {
///
/// Creates a new `PositiveNumber` from the given `usize` and `Span`.
/// Appends the new `PositiveNumber` to the array dimensions.
///
pub fn push_usize(&mut self, number: usize) {
let positive_number = PositiveNumber {
value: number.to_string().into(),
};
self.0.push(positive_number)
}
///
/// Appends a vector of array dimensions to the self array dimensions.
///

View File

@ -470,7 +470,35 @@ impl ReconstructingReducer for Canonicalizer {
) -> Result<AssignStatement, CanonicalizeError> {
match value {
Expression::Value(value_expr) if assign.operation != AssignOperation::Assign => {
let left = Box::new(Expression::Identifier(assignee.identifier.clone()));
let mut left = Box::new(Expression::Identifier(assignee.identifier.clone()));
for access in assignee.accesses.iter().rev() {
match self.canonicalize_assignee_access(&access) {
AssigneeAccess::ArrayIndex(index) => {
left = Box::new(Expression::ArrayAccess(ArrayAccessExpression {
array: left,
index: Box::new(index),
span: assign.span.clone(),
}));
}
AssigneeAccess::Tuple(positive_number, _) => {
left = Box::new(Expression::TupleAccess(TupleAccessExpression {
tuple: left,
index: positive_number,
span: assign.span.clone(),
}));
}
AssigneeAccess::Member(identifier) => {
left = Box::new(Expression::CircuitMemberAccess(CircuitMemberAccessExpression {
circuit: left,
name: identifier,
span: assign.span.clone(),
}));
}
_ => unimplemented!(), // No reason for someone to compute ArrayRanges.
}
}
let right = Box::new(Expression::Value(value_expr));
let op = match assign.operation {

View File

@ -50,27 +50,27 @@ path = "../asg-passes"
version = "1.2.3"
[dependencies.snarkvm-curves]
version = "0.2.1"
version = "0.2.2"
default-features = false
[dependencies.snarkvm-fields]
version = "0.2.1"
version = "0.2.2"
default-features = false
[dependencies.snarkvm-dpc]
version = "0.2.1"
version = "0.2.2"
default-features = false
[dependencies.snarkvm-gadgets]
version = "0.2.1"
version = "0.2.2"
default-features = false
[dependencies.snarkvm-r1cs]
version = "0.2.1"
version = "0.2.2"
default-features = false
[dependencies.snarkvm-utilities]
version = "0.2.1"
version = "0.2.2"
[dependencies.bincode]
version = "1.3"
@ -111,9 +111,12 @@ version = "0.3"
default-features = false
[dev-dependencies.snarkvm-algorithms]
version = "0.2.1"
version = "0.2.2"
default-features = false
[dev-dependencies.tempfile]
version = "3.0.4"
[features]
default = [ ]
ci_skip = [ "leo-ast/ci_skip" ]

View File

@ -14,16 +14,14 @@
// You should have received a copy of the GNU General Public License
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
use crate::errors::{FunctionError, ImportError, OutputBytesError, OutputFileError};
use crate::errors::FunctionError;
use leo_asg::{AsgConvertError, FormattedError};
use leo_ast::{CanonicalizeError, LeoError};
use leo_imports::ImportParserError;
use leo_input::InputParserError;
use leo_parser::SyntaxError;
use leo_state::LocalDataVerificationError;
use bincode::Error as SerdeError;
use std::{ffi::OsString, path::PathBuf};
use std::path::PathBuf;
#[derive(Debug, Error)]
pub enum CompilerError {
@ -33,12 +31,6 @@ pub enum CompilerError {
#[error("{}", _0)]
AsgPassError(FormattedError),
#[error("{}", _0)]
ImportError(#[from] ImportError),
#[error("{}", _0)]
ImportParserError(#[from] ImportParserError),
#[error("{}", _0)]
InputParserError(#[from] InputParserError),
@ -51,30 +43,15 @@ pub enum CompilerError {
#[error("Cannot read from the provided file path '{:?}': {}", _0, _1)]
FileReadError(PathBuf, std::io::Error),
#[error("Cannot parse file string `{:?}`", _0)]
FileStringError(OsString),
#[error("{}", _0)]
LocalDataVerificationError(#[from] LocalDataVerificationError),
#[error("`main` function not found")]
NoMain,
#[error("`main` must be a function")]
NoMainFunction,
#[error("Failed to find input files for the current test")]
NoTestInput,
#[error("{}", _0)]
OutputError(#[from] OutputFileError),
#[error("{}", _0)]
OutputStringError(#[from] OutputBytesError),
#[error("{}", _0)]
SerdeError(#[from] SerdeError),
#[error("{}", _0)]
AsgConvertError(#[from] AsgConvertError),

View File

@ -15,7 +15,7 @@
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
use crate::errors::{AddressError, BooleanError, FieldError, FunctionError, GroupError, IntegerError, ValueError};
use leo_ast::{ArrayDimensions, FormattedError, Identifier, LeoError, PositiveNumber, Span};
use leo_ast::{FormattedError, Identifier, LeoError, Span};
use snarkvm_r1cs::SynthesisError;
#[derive(Debug, Error)]
@ -115,24 +115,6 @@ impl ExpressionError {
Self::new_from_span(message, span)
}
pub fn invalid_dimensions(expected: &ArrayDimensions, actual: &ArrayDimensions, span: &Span) -> Self {
let message = format!(
"expected array dimensions {}, found array dimensions {}",
expected, actual
);
Self::new_from_span(message, span)
}
pub fn invalid_first_dimension(expected: &PositiveNumber, actual: &PositiveNumber, span: &Span) -> Self {
let message = format!(
"expected array dimension {}, found array dimension {}",
expected, actual
);
Self::new_from_span(message, span)
}
pub fn invalid_index(actual: String, span: &Span) -> Self {
let message = format!("index must resolve to an integer, found `{}`", actual);
@ -145,48 +127,18 @@ impl ExpressionError {
Self::new_from_span(message, span)
}
pub fn invalid_spread(actual: String, span: &Span) -> Self {
let message = format!("spread should contain an array, found `{}`", actual);
Self::new_from_span(message, span)
}
pub fn invalid_member_access(member: String, span: &Span) -> Self {
let message = format!("non-static member `{}` must be accessed using `.` syntax", member);
Self::new_from_span(message, span)
}
pub fn invalid_static_access(member: String, span: &Span) -> Self {
let message = format!("static member `{}` must be accessed using `::` syntax", member);
Self::new_from_span(message, span)
}
pub fn function_no_return(function: String, span: &Span) -> Self {
let message = format!("inline function call to `{}` did not return", function);
Self::new_from_span(message, span)
}
pub fn self_keyword(span: &Span) -> Self {
let message = "cannot call keyword `Self` outside of a circuit function".to_string();
Self::new_from_span(message, span)
}
pub fn undefined_array(actual: String, span: &Span) -> Self {
let message = format!("array `{}` must be declared before it is used in an expression", actual);
Self::new_from_span(message, span)
}
pub fn undefined_tuple(actual: String, span: &Span) -> Self {
let message = format!("tuple `{}` must be declared before it is used in an expression", actual);
Self::new_from_span(message, span)
}
pub fn undefined_circuit(actual: String, span: &Span) -> Self {
let message = format!(
"circuit `{}` must be declared before it is used in an expression",
@ -196,21 +148,6 @@ impl ExpressionError {
Self::new_from_span(message, span)
}
pub fn undefined_first_dimension(span: &Span) -> Self {
let message = "the first dimension of the array must be a number".to_string();
Self::new_from_span(message, span)
}
pub fn undefined_function(function: String, span: &Span) -> Self {
let message = format!(
"function `{}` must be declared before it is used in an inline expression",
function
);
Self::new_from_span(message, span)
}
pub fn undefined_identifier(identifier: Identifier) -> Self {
let message = format!("Cannot find value `{}` in this scope", identifier.name);
@ -222,22 +159,4 @@ impl ExpressionError {
Self::new_from_span(message, span)
}
pub fn undefined_static_access(circuit: String, member: String, span: &Span) -> Self {
let message = format!("Circuit `{}` has no static member `{}`", circuit, member);
Self::new_from_span(message, span)
}
pub fn unexpected_array(expected: String, span: &Span) -> Self {
let message = format!("expected type `{}`, found array with elements", expected);
Self::new_from_span(message, span)
}
pub fn unexpected_tuple(expected: String, actual: String, span: &Span) -> Self {
let message = format!("expected type `{}`, found tuple with values `{}`", expected, actual);
Self::new_from_span(message, span)
}
}

View File

@ -128,18 +128,6 @@ impl FunctionError {
Self::new_from_span(message, span)
}
pub fn return_arguments_length(expected: usize, actual: usize, span: &Span) -> Self {
let message = format!("function expected {} returns, found {} returns", expected, actual);
Self::new_from_span(message, span)
}
pub fn return_argument_type(expected: String, actual: String, span: &Span) -> Self {
let message = format!("Expected function return type `{}`, found `{}`", expected, actual);
Self::new_from_span(message, span)
}
pub fn input_not_found(expected: String, span: &Span) -> Self {
let message = format!("main function input {} not found", expected);

View File

@ -14,7 +14,7 @@
// You should have received a copy of the GNU General Public License
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
use leo_ast::{FormattedError, Identifier, ImportSymbol, LeoError, Span};
use leo_ast::{FormattedError, LeoError};
#[derive(Debug, Error)]
pub enum ImportError {
@ -23,25 +23,3 @@ pub enum ImportError {
}
impl LeoError for ImportError {}
impl ImportError {
fn new_from_span(message: String, span: &Span) -> Self {
ImportError::Error(FormattedError::new_from_span(message, span))
}
pub fn unknown_package(identifier: Identifier) -> Self {
let message = format!(
"cannot find imported package `{}` in source files or import directory",
identifier.name
);
Self::new_from_span(message, &identifier.span)
}
pub fn unknown_symbol(symbol: ImportSymbol, file: String) -> Self {
let message = format!("cannot find imported symbol `{}` in imported file `{}`", symbol, file);
let error = FormattedError::new_from_span(message, &symbol.span);
ImportError::Error(error)
}
}

View File

@ -21,9 +21,6 @@ pub enum OutputFileError {
#[error("{}: {}", _0, _1)]
Crate(&'static str, String),
#[error("creating: {}", _0)]
Creating(io::Error),
#[error("Cannot read from the provided file path - {:?}", _0)]
FileReadError(PathBuf),

View File

@ -49,12 +49,6 @@ impl StatementError {
StatementError::Error(FormattedError::new_from_span(message, span))
}
pub fn arguments_type(expected: &Type, actual: &Type, span: &Span) -> Self {
let message = format!("expected return argument type `{}`, found type `{}`", expected, actual);
Self::new_from_span(message, span)
}
pub fn array_assign_index(span: &Span) -> Self {
let message = "Cannot assign single index to array of values".to_string();
@ -103,24 +97,6 @@ impl StatementError {
Self::new_from_span(message, span)
}
pub fn immutable_assign(name: String, span: &Span) -> Self {
let message = format!("Cannot assign to immutable variable `{}`", name);
Self::new_from_span(message, span)
}
pub fn immutable_circuit_function(name: String, span: &Span) -> Self {
let message = format!("Cannot mutate circuit function, `{}`", name);
Self::new_from_span(message, span)
}
pub fn immutable_circuit_variable(name: String, span: &Span) -> Self {
let message = format!("Circuit member variable `{}` is immutable", name);
Self::new_from_span(message, span)
}
pub fn indicator_calculation(name: String, span: &Span) -> Self {
let message = format!(
"Constraint system failed to evaluate branch selection indicator `{}`",
@ -139,15 +115,6 @@ impl StatementError {
Self::new_from_span(message, span)
}
pub fn invalid_number_of_returns(expected: usize, actual: usize, span: &Span) -> Self {
let message = format!(
"Function return statement expected {} return values, found {} values",
expected, actual
);
Self::new_from_span(message, span)
}
pub fn multiple_definition(value: String, span: &Span) -> Self {
let message = format!("cannot assign multiple variables to a single value: {}", value,);
@ -194,12 +161,6 @@ impl StatementError {
Self::new_from_span(message, span)
}
pub fn tuple_type(type_: String, span: &Span) -> Self {
let message = format!("Expected tuple type, found type `{}`", type_);
Self::new_from_span(message, span)
}
pub fn unassigned(span: &Span) -> Self {
let message = "Expected assignment of return values for expression".to_string();

View File

@ -16,7 +16,6 @@
use leo_ast::{FormattedError, LeoError, Span};
use snarkvm_dpc::AccountError;
use snarkvm_r1cs::SynthesisError;
#[derive(Debug, Error)]
pub enum AddressError {
@ -37,21 +36,6 @@ impl AddressError {
Self::new_from_span(message, span)
}
pub fn cannot_enforce(operation: String, error: SynthesisError, span: &Span) -> Self {
let message = format!(
"the address operation `{:?}` failed due to the synthesis error `{}`",
operation, error,
);
Self::new_from_span(message, span)
}
pub fn cannot_evaluate(operation: String, span: &Span) -> Self {
let message = format!("no implementation found for `{}`", operation);
Self::new_from_span(message, span)
}
pub fn invalid_address(actual: String, span: &Span) -> Self {
let message = format!("expected address input type, found `{}`", actual);

View File

@ -62,10 +62,4 @@ impl FieldError {
Self::new_from_span(message, span)
}
pub fn synthesis_error(error: SynthesisError, span: &Span) -> Self {
let message = format!("compilation failed due to field synthesis error `{:?}`", error);
Self::new_from_span(message, span)
}
}

View File

@ -14,9 +14,9 @@
// You should have received a copy of the GNU General Public License
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
use leo_ast::{FormattedError, IntegerType, LeoError, Span, Type};
use leo_ast::{FormattedError, LeoError, Span};
use snarkvm_gadgets::errors::SignedIntegerError;
use snarkvm_gadgets::errors::{SignedIntegerError, UnsignedIntegerError};
use snarkvm_r1cs::SynthesisError;
#[derive(Debug, Error)]
@ -32,25 +32,16 @@ impl IntegerError {
IntegerError::Error(FormattedError::new_from_span(message, span))
}
pub fn cannot_enforce(operation: String, error: SynthesisError, span: &Span) -> Self {
let message = format!(
"the integer operation `{}` failed due to the synthesis error `{:?}`",
operation, error,
);
Self::new_from_span(message, span)
}
pub fn signed(error: SignedIntegerError, span: &Span) -> Self {
let message = format!("integer operation failed due to the signed integer error `{:?}`", error);
Self::new_from_span(message, span)
}
pub fn signed_error(operation: String, error: SignedIntegerError, span: &Span) -> Self {
pub fn unsigned(error: UnsignedIntegerError, span: &Span) -> Self {
let message = format!(
"the integer operation `{}` failed due to the signed integer error `{:?}`",
operation, error
"integer operation failed due to the unsigned integer error `{:?}`",
error
);
Self::new_from_span(message, span)
@ -77,32 +68,12 @@ impl IntegerError {
Self::new_from_span(message, span)
}
pub fn invalid_index(span: &Span) -> Self {
let message =
"index must be a constant value unsigned integer. allocated indices produce a circuit of unknown size"
.to_string();
Self::new_from_span(message, span)
}
pub fn invalid_integer(actual: String, span: &Span) -> Self {
let message = format!("failed to parse `{}` as expected integer type", actual);
Self::new_from_span(message, span)
}
pub fn invalid_integer_type(expected: &IntegerType, actual: &IntegerType, span: &Span) -> Self {
let message = format!("expected integer type {} found integer type {}", expected, actual);
Self::new_from_span(message, span)
}
pub fn invalid_type(actual: &Type, span: &Span) -> Self {
let message = format!("expected type {}, found type IntegerType", actual);
Self::new_from_span(message, span)
}
pub fn missing_integer(expected: String, span: &Span) -> Self {
let message = format!("expected integer input `{}` not found", expected);

View File

@ -15,7 +15,7 @@
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
use crate::errors::{AddressError, BooleanError, FieldError, GroupError, IntegerError};
use leo_ast::{FormattedError, LeoError, Span};
use leo_ast::{FormattedError, LeoError};
#[derive(Debug, Error)]
pub enum ValueError {
@ -39,21 +39,3 @@ pub enum ValueError {
}
impl LeoError for ValueError {}
impl ValueError {
fn new_from_span(message: String, span: &Span) -> Self {
ValueError::Error(FormattedError::new_from_span(message, span))
}
pub fn implicit(value: String, span: &Span) -> Self {
let message = format!("explicit type needed for `{}`", value);
Self::new_from_span(message, span)
}
pub fn implicit_group(span: &Span) -> Self {
let message = "group coordinates should be in (x, y)group format".to_string();
Self::new_from_span(message, span)
}
}

View File

@ -42,19 +42,6 @@ impl OutputFile {
}
}
pub fn exists_at(&self, path: &Path) -> bool {
let path = self.setup_file_path(path);
path.exists()
}
/// Reads the output register variables from the given file path if it exists.
pub fn read_from(&self, path: &Path) -> Result<String, OutputFileError> {
let path = self.setup_file_path(path);
let output = fs::read_to_string(&path).map_err(|_| OutputFileError::FileReadError(path.into_owned()))?;
Ok(output)
}
/// Writes output to a file.
pub fn write(&self, path: &Path, bytes: &[u8]) -> Result<(), OutputFileError> {
// create output file
@ -88,3 +75,26 @@ impl OutputFile {
path
}
}
#[cfg(test)]
mod test_output_file {
use crate::{OutputFile, OUTPUTS_DIRECTORY_NAME};
use std::{error::Error, fs};
#[test]
fn test_all() -> Result<(), Box<dyn Error>> {
let dir = tempfile::tempdir()?;
let file = OutputFile::new("test");
let path = dir.path();
assert!(file.write(path, Default::default()).is_err());
assert!(!(file.remove(path)?));
fs::create_dir(dir.path().join(OUTPUTS_DIRECTORY_NAME))?;
assert!(file.write(path, Default::default()).is_ok());
assert!(file.remove(path)?);
Ok(())
}
}

View File

@ -23,6 +23,7 @@ use crate::{
GroupType,
IndicatorAndConstrainedValue,
Integer,
IntegerTrait,
StatementResult,
};
use leo_asg::IterationStatement;

View File

@ -14,7 +14,7 @@
// You should have received a copy of the GNU General Public License
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
use crate::{errors::AddressError, ConstrainedValue, GroupType};
use crate::{errors::AddressError, ConstrainedValue, GroupType, IntegerTrait};
use leo_ast::{InputValue, Span};
use snarkvm_dpc::{account::AccountAddress, base_dpc::instantiated::Components};
@ -24,7 +24,7 @@ use snarkvm_gadgets::traits::utilities::{
boolean::Boolean,
eq::{ConditionalEqGadget, EqGadget, EvaluateEqGadget},
select::CondSelectGadget,
uint::{UInt, UInt8},
uint::UInt8,
};
use snarkvm_r1cs::{Assignment, ConstraintSystem, SynthesisError};
use snarkvm_utilities::ToBytes;

View File

@ -28,10 +28,10 @@ use snarkvm_gadgets::traits::utilities::{
eq::{ConditionalEqGadget, EqGadget, EvaluateEqGadget},
int::{Int128, Int16, Int32, Int64, Int8},
select::CondSelectGadget,
uint::*,
uint::{Sub as UIntSub, *},
};
use snarkvm_r1cs::{ConstraintSystem, SynthesisError};
use std::fmt;
use std::{convert::TryInto, fmt};
/// An integer type enum wrapping the integer value.
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd)]
@ -83,7 +83,7 @@ impl Integer {
pub fn get_bits(&self) -> Vec<Boolean> {
let integer = self;
match_integer!(integer => integer.get_bits())
match_integer!(integer => integer.to_bits_le())
}
// pub fn get_bits_typed(&self) -> (Vec<Boolean>, IntegerType) {
@ -113,7 +113,7 @@ impl Integer {
pub fn to_usize(&self) -> Option<usize> {
let unsigned_integer = self;
match_unsigned_integer!(unsigned_integer => unsigned_integer.get_index())
match_unsigned_integer!(unsigned_integer => unsigned_integer.value.map(|num| num.try_into().ok()).flatten())
}
pub fn get_type(&self) -> IntegerType {

View File

@ -14,41 +14,7 @@
// You should have received a copy of the GNU General Public License
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
use snarkvm_gadgets::traits::utilities::{
boolean::Boolean,
int::{Int128, Int16, Int32, Int64, Int8},
uint::{UInt128, UInt16, UInt32, UInt64, UInt8},
};
use std::{convert::TryInto, fmt::Debug};
pub trait IntegerTrait: Sized + Clone + Debug {
fn get_value(&self) -> Option<String>;
fn get_index(&self) -> Option<usize>;
fn get_bits(&self) -> Vec<Boolean>;
}
macro_rules! integer_trait_impl {
($($gadget: ident)*) => ($(
impl IntegerTrait for $gadget {
fn get_value(&self) -> Option<String> {
self.value.map(|num| num.to_string())
}
fn get_index(&self) -> Option<usize> {
self.value.map(|num| num.try_into().ok()).flatten()
}
fn get_bits(&self) -> Vec<Boolean> {
self.bits.clone()
}
}
)*)
}
integer_trait_impl!(UInt8 UInt16 UInt32 UInt64 UInt128 Int8 Int16 Int32 Int64 Int128);
pub use snarkvm_gadgets::traits::utilities::integer::Integer as IntegerTrait;
/// Useful macros to avoid duplicating `match` constructions.
#[macro_export]
@ -125,19 +91,19 @@ macro_rules! match_integers_span {
(($a: ident, $b: ident), $span: ident => $expression:expr) => {
match ($a, $b) {
(Integer::U8($a), Integer::U8($b)) => {
Some(Integer::U8($expression.map_err(|e| IntegerError::synthesis(e, $span))?))
Some(Integer::U8($expression.map_err(|e| IntegerError::unsigned(e, $span))?))
}
(Integer::U16($a), Integer::U16($b)) => {
Some(Integer::U16($expression.map_err(|e| IntegerError::unsigned(e, $span))?))
}
(Integer::U32($a), Integer::U32($b)) => {
Some(Integer::U32($expression.map_err(|e| IntegerError::unsigned(e, $span))?))
}
(Integer::U64($a), Integer::U64($b)) => {
Some(Integer::U64($expression.map_err(|e| IntegerError::unsigned(e, $span))?))
}
(Integer::U16($a), Integer::U16($b)) => Some(Integer::U16(
$expression.map_err(|e| IntegerError::synthesis(e, $span))?,
)),
(Integer::U32($a), Integer::U32($b)) => Some(Integer::U32(
$expression.map_err(|e| IntegerError::synthesis(e, $span))?,
)),
(Integer::U64($a), Integer::U64($b)) => Some(Integer::U64(
$expression.map_err(|e| IntegerError::synthesis(e, $span))?,
)),
(Integer::U128($a), Integer::U128($b)) => Some(Integer::U128(
$expression.map_err(|e| IntegerError::synthesis(e, $span))?,
$expression.map_err(|e| IntegerError::unsigned(e, $span))?,
)),
(Integer::I8($a), Integer::I8($b)) => {

View File

@ -2,11 +2,25 @@
"name": "",
"expected_input": [],
"imports": [],
"circuits": {},
"circuits": {
"{\"name\":\"Foo\",\"span\":\"{\\\"line_start\\\":1,\\\"line_stop\\\":1,\\\"col_start\\\":9,\\\"col_stop\\\":12,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\"circuit Foo {\\\"}\"}": {
"circuit_name": "{\"name\":\"Foo\",\"span\":\"{\\\"line_start\\\":1,\\\"line_stop\\\":1,\\\"col_start\\\":9,\\\"col_stop\\\":12,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\"circuit Foo {\\\"}\"}",
"members": [
{
"CircuitVariable": [
"{\"name\":\"f\",\"span\":\"{\\\"line_start\\\":2,\\\"line_stop\\\":2,\\\"col_start\\\":3,\\\"col_stop\\\":4,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\" f: u8,\\\"}\"}",
{
"IntegerType": "U8"
}
]
}
]
}
},
"functions": {
"{\"name\":\"main\",\"span\":\"{\\\"line_start\\\":1,\\\"line_stop\\\":1,\\\"col_start\\\":10,\\\"col_stop\\\":14,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\"function main() {\\\"}\"}": {
"{\"name\":\"main\",\"span\":\"{\\\"line_start\\\":4,\\\"line_stop\\\":4,\\\"col_start\\\":10,\\\"col_stop\\\":14,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\"function main() {\\\"}\"}": {
"annotations": [],
"identifier": "{\"name\":\"main\",\"span\":\"{\\\"line_start\\\":1,\\\"line_stop\\\":1,\\\"col_start\\\":10,\\\"col_stop\\\":14,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\"function main() {\\\"}\"}",
"identifier": "{\"name\":\"main\",\"span\":\"{\\\"line_start\\\":4,\\\"line_stop\\\":4,\\\"col_start\\\":10,\\\"col_stop\\\":14,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\"function main() {\\\"}\"}",
"input": [],
"output": {
"Tuple": []
@ -19,10 +33,10 @@
"variable_names": [
{
"mutable": true,
"identifier": "{\"name\":\"x\",\"span\":\"{\\\"line_start\\\":2,\\\"line_stop\\\":2,\\\"col_start\\\":7,\\\"col_stop\\\":8,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\" let x = 10u32;\\\"}\"}",
"identifier": "{\"name\":\"x\",\"span\":\"{\\\"line_start\\\":5,\\\"line_stop\\\":5,\\\"col_start\\\":7,\\\"col_stop\\\":8,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\" let x = 10u32;\\\"}\"}",
"span": {
"line_start": 2,
"line_stop": 2,
"line_start": 5,
"line_stop": 5,
"col_start": 7,
"col_stop": 8,
"path": "",
@ -37,8 +51,8 @@
"U32",
"10",
{
"line_start": 2,
"line_stop": 2,
"line_start": 5,
"line_stop": 5,
"col_start": 11,
"col_stop": 16,
"path": "",
@ -48,8 +62,8 @@
}
},
"span": {
"line_start": 2,
"line_stop": 2,
"line_start": 5,
"line_stop": 5,
"col_start": 3,
"col_stop": 16,
"path": "",
@ -61,11 +75,11 @@
"Assign": {
"operation": "Assign",
"assignee": {
"identifier": "{\"name\":\"x\",\"span\":\"{\\\"line_start\\\":3,\\\"line_stop\\\":3,\\\"col_start\\\":3,\\\"col_stop\\\":4,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\" x += 20;\\\"}\"}",
"identifier": "{\"name\":\"x\",\"span\":\"{\\\"line_start\\\":6,\\\"line_stop\\\":6,\\\"col_start\\\":3,\\\"col_stop\\\":4,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\" x += 20;\\\"}\"}",
"accesses": [],
"span": {
"line_start": 3,
"line_stop": 3,
"line_start": 6,
"line_stop": 6,
"col_start": 3,
"col_stop": 4,
"path": "",
@ -75,15 +89,15 @@
"value": {
"Binary": {
"left": {
"Identifier": "{\"name\":\"x\",\"span\":\"{\\\"line_start\\\":3,\\\"line_stop\\\":3,\\\"col_start\\\":3,\\\"col_stop\\\":4,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\" x += 20;\\\"}\"}"
"Identifier": "{\"name\":\"x\",\"span\":\"{\\\"line_start\\\":6,\\\"line_stop\\\":6,\\\"col_start\\\":3,\\\"col_stop\\\":4,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\" x += 20;\\\"}\"}"
},
"right": {
"Value": {
"Implicit": [
"20",
{
"line_start": 3,
"line_stop": 3,
"line_start": 6,
"line_stop": 6,
"col_start": 8,
"col_stop": 10,
"path": "",
@ -94,8 +108,8 @@
},
"op": "Add",
"span": {
"line_start": 3,
"line_stop": 3,
"line_start": 6,
"line_stop": 6,
"col_start": 3,
"col_stop": 10,
"path": "",
@ -104,33 +118,750 @@
}
},
"span": {
"line_start": 3,
"line_stop": 3,
"line_start": 6,
"line_stop": 6,
"col_start": 3,
"col_stop": 10,
"path": "",
"content": " x += 20;"
}
}
},
{
"Console": {
"function": {
"Assert": {
"Binary": {
"left": {
"Identifier": "{\"name\":\"x\",\"span\":\"{\\\"line_start\\\":7,\\\"line_stop\\\":7,\\\"col_start\\\":18,\\\"col_stop\\\":19,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\" console.assert(x == 30u32);\\\"}\"}"
},
"right": {
"Value": {
"Integer": [
"U32",
"30",
{
"line_start": 7,
"line_stop": 7,
"col_start": 23,
"col_stop": 28,
"path": "",
"content": " console.assert(x == 30u32);"
}
]
}
},
"op": "Eq",
"span": {
"line_start": 7,
"line_stop": 7,
"col_start": 18,
"col_stop": 28,
"path": "",
"content": " console.assert(x == 30u32);"
}
}
}
},
"span": {
"line_start": 7,
"line_stop": 7,
"col_start": 3,
"col_stop": 28,
"path": "",
"content": " console.assert(x == 30u32);"
}
}
},
{
"Definition": {
"declaration_type": "Let",
"variable_names": [
{
"mutable": true,
"identifier": "{\"name\":\"y\",\"span\":\"{\\\"line_start\\\":9,\\\"line_stop\\\":9,\\\"col_start\\\":7,\\\"col_stop\\\":8,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\" let y = [1u8, 2u8];\\\"}\"}",
"span": {
"line_start": 9,
"line_stop": 9,
"col_start": 7,
"col_stop": 8,
"path": "",
"content": " let y = [1u8, 2u8];"
}
}
],
"type_": null,
"value": {
"ArrayInline": {
"elements": [
{
"Expression": {
"Value": {
"Integer": [
"U8",
"1",
{
"line_start": 9,
"line_stop": 9,
"col_start": 12,
"col_stop": 15,
"path": "",
"content": " let y = [1u8, 2u8];"
}
]
}
}
},
{
"Expression": {
"Value": {
"Integer": [
"U8",
"2",
{
"line_start": 9,
"line_stop": 9,
"col_start": 17,
"col_stop": 20,
"path": "",
"content": " let y = [1u8, 2u8];"
}
]
}
}
}
],
"span": {
"line_start": 9,
"line_stop": 9,
"col_start": 11,
"col_stop": 21,
"path": "",
"content": " let y = [1u8, 2u8];"
}
}
},
"span": {
"line_start": 9,
"line_stop": 9,
"col_start": 3,
"col_stop": 21,
"path": "",
"content": " let y = [1u8, 2u8];"
}
}
},
{
"Assign": {
"operation": "Assign",
"assignee": {
"identifier": "{\"name\":\"y\",\"span\":\"{\\\"line_start\\\":10,\\\"line_stop\\\":10,\\\"col_start\\\":3,\\\"col_stop\\\":4,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\" y[0] += 3u8;\\\"}\"}",
"accesses": [
{
"ArrayIndex": {
"Value": {
"Implicit": [
"0",
{
"line_start": 10,
"line_stop": 10,
"col_start": 5,
"col_stop": 6,
"path": "",
"content": " y[0] += 3u8;"
}
]
}
}
}
],
"span": {
"line_start": 10,
"line_stop": 10,
"col_start": 3,
"col_stop": 7,
"path": "",
"content": " y[0] += 3u8;"
}
},
"value": {
"Binary": {
"left": {
"ArrayAccess": {
"array": {
"Identifier": "{\"name\":\"y\",\"span\":\"{\\\"line_start\\\":10,\\\"line_stop\\\":10,\\\"col_start\\\":3,\\\"col_stop\\\":4,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\" y[0] += 3u8;\\\"}\"}"
},
"index": {
"Value": {
"Implicit": [
"0",
{
"line_start": 10,
"line_stop": 10,
"col_start": 5,
"col_stop": 6,
"path": "",
"content": " y[0] += 3u8;"
}
]
}
},
"span": {
"line_start": 10,
"line_stop": 10,
"col_start": 3,
"col_stop": 14,
"path": "",
"content": " y[0] += 3u8;"
}
}
},
"right": {
"Value": {
"Integer": [
"U8",
"3",
{
"line_start": 10,
"line_stop": 10,
"col_start": 11,
"col_stop": 14,
"path": "",
"content": " y[0] += 3u8;"
}
]
}
},
"op": "Add",
"span": {
"line_start": 10,
"line_stop": 10,
"col_start": 3,
"col_stop": 14,
"path": "",
"content": " y[0] += 3u8;"
}
}
},
"span": {
"line_start": 10,
"line_stop": 10,
"col_start": 3,
"col_stop": 14,
"path": "",
"content": " y[0] += 3u8;"
}
}
},
{
"Console": {
"function": {
"Assert": {
"Binary": {
"left": {
"ArrayAccess": {
"array": {
"Identifier": "{\"name\":\"y\",\"span\":\"{\\\"line_start\\\":11,\\\"line_stop\\\":11,\\\"col_start\\\":18,\\\"col_stop\\\":19,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\" console.assert(y[0] == 4u8);\\\"}\"}"
},
"index": {
"Value": {
"Implicit": [
"0",
{
"line_start": 11,
"line_stop": 11,
"col_start": 20,
"col_stop": 21,
"path": "",
"content": " console.assert(y[0] == 4u8);"
}
]
}
},
"span": {
"line_start": 11,
"line_stop": 11,
"col_start": 18,
"col_stop": 22,
"path": "",
"content": " console.assert(y[0] == 4u8);"
}
}
},
"right": {
"Value": {
"Integer": [
"U8",
"4",
{
"line_start": 11,
"line_stop": 11,
"col_start": 26,
"col_stop": 29,
"path": "",
"content": " console.assert(y[0] == 4u8);"
}
]
}
},
"op": "Eq",
"span": {
"line_start": 11,
"line_stop": 11,
"col_start": 18,
"col_stop": 29,
"path": "",
"content": " console.assert(y[0] == 4u8);"
}
}
}
},
"span": {
"line_start": 11,
"line_stop": 11,
"col_start": 3,
"col_stop": 29,
"path": "",
"content": " console.assert(y[0] == 4u8);"
}
}
},
{
"Definition": {
"declaration_type": "Let",
"variable_names": [
{
"mutable": true,
"identifier": "{\"name\":\"z\",\"span\":\"{\\\"line_start\\\":13,\\\"line_stop\\\":13,\\\"col_start\\\":7,\\\"col_stop\\\":8,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\" let z = (1u8, 2u8);\\\"}\"}",
"span": {
"line_start": 13,
"line_stop": 13,
"col_start": 7,
"col_stop": 8,
"path": "",
"content": " let z = (1u8, 2u8);"
}
}
],
"type_": null,
"value": {
"TupleInit": {
"elements": [
{
"Value": {
"Integer": [
"U8",
"1",
{
"line_start": 13,
"line_stop": 13,
"col_start": 12,
"col_stop": 15,
"path": "",
"content": " let z = (1u8, 2u8);"
}
]
}
},
{
"Value": {
"Integer": [
"U8",
"2",
{
"line_start": 13,
"line_stop": 13,
"col_start": 17,
"col_stop": 20,
"path": "",
"content": " let z = (1u8, 2u8);"
}
]
}
}
],
"span": {
"line_start": 13,
"line_stop": 13,
"col_start": 11,
"col_stop": 21,
"path": "",
"content": " let z = (1u8, 2u8);"
}
}
},
"span": {
"line_start": 13,
"line_stop": 13,
"col_start": 3,
"col_stop": 21,
"path": "",
"content": " let z = (1u8, 2u8);"
}
}
},
{
"Assign": {
"operation": "Assign",
"assignee": {
"identifier": "{\"name\":\"z\",\"span\":\"{\\\"line_start\\\":14,\\\"line_stop\\\":14,\\\"col_start\\\":3,\\\"col_stop\\\":4,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\" z.1 += 3u8;\\\"}\"}",
"accesses": [
{
"Tuple": [
{
"value": "1"
},
{
"line_start": 14,
"line_stop": 14,
"col_start": 3,
"col_stop": 6,
"path": "",
"content": " z.1 += 3u8;"
}
]
}
],
"span": {
"line_start": 14,
"line_stop": 14,
"col_start": 3,
"col_stop": 6,
"path": "",
"content": " z.1 += 3u8;"
}
},
"value": {
"Binary": {
"left": {
"TupleAccess": {
"tuple": {
"Identifier": "{\"name\":\"z\",\"span\":\"{\\\"line_start\\\":14,\\\"line_stop\\\":14,\\\"col_start\\\":3,\\\"col_stop\\\":4,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\" z.1 += 3u8;\\\"}\"}"
},
"index": {
"value": "1"
},
"span": {
"line_start": 14,
"line_stop": 14,
"col_start": 3,
"col_stop": 13,
"path": "",
"content": " z.1 += 3u8;"
}
}
},
"right": {
"Value": {
"Integer": [
"U8",
"3",
{
"line_start": 14,
"line_stop": 14,
"col_start": 10,
"col_stop": 13,
"path": "",
"content": " z.1 += 3u8;"
}
]
}
},
"op": "Add",
"span": {
"line_start": 14,
"line_stop": 14,
"col_start": 3,
"col_stop": 13,
"path": "",
"content": " z.1 += 3u8;"
}
}
},
"span": {
"line_start": 14,
"line_stop": 14,
"col_start": 3,
"col_stop": 13,
"path": "",
"content": " z.1 += 3u8;"
}
}
},
{
"Console": {
"function": {
"Assert": {
"Binary": {
"left": {
"TupleAccess": {
"tuple": {
"Identifier": "{\"name\":\"z\",\"span\":\"{\\\"line_start\\\":15,\\\"line_stop\\\":15,\\\"col_start\\\":18,\\\"col_stop\\\":19,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\" console.assert(z.1 == 5u8);\\\"}\"}"
},
"index": {
"value": "1"
},
"span": {
"line_start": 15,
"line_stop": 15,
"col_start": 18,
"col_stop": 21,
"path": "",
"content": " console.assert(z.1 == 5u8);"
}
}
},
"right": {
"Value": {
"Integer": [
"U8",
"5",
{
"line_start": 15,
"line_stop": 15,
"col_start": 25,
"col_stop": 28,
"path": "",
"content": " console.assert(z.1 == 5u8);"
}
]
}
},
"op": "Eq",
"span": {
"line_start": 15,
"line_stop": 15,
"col_start": 18,
"col_stop": 28,
"path": "",
"content": " console.assert(z.1 == 5u8);"
}
}
}
},
"span": {
"line_start": 15,
"line_stop": 15,
"col_start": 3,
"col_stop": 28,
"path": "",
"content": " console.assert(z.1 == 5u8);"
}
}
},
{
"Definition": {
"declaration_type": "Let",
"variable_names": [
{
"mutable": true,
"identifier": "{\"name\":\"foo\",\"span\":\"{\\\"line_start\\\":17,\\\"line_stop\\\":17,\\\"col_start\\\":7,\\\"col_stop\\\":10,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\" let foo = Foo { f: 6u8 };\\\"}\"}",
"span": {
"line_start": 17,
"line_stop": 17,
"col_start": 7,
"col_stop": 10,
"path": "",
"content": " let foo = Foo { f: 6u8 };"
}
}
],
"type_": null,
"value": {
"CircuitInit": {
"name": "{\"name\":\"Foo\",\"span\":\"{\\\"line_start\\\":17,\\\"line_stop\\\":17,\\\"col_start\\\":13,\\\"col_stop\\\":16,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\" let foo = Foo { f: 6u8 };\\\"}\"}",
"members": [
{
"identifier": "{\"name\":\"f\",\"span\":\"{\\\"line_start\\\":17,\\\"line_stop\\\":17,\\\"col_start\\\":19,\\\"col_stop\\\":20,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\" let foo = Foo { f: 6u8 };\\\"}\"}",
"expression": {
"Value": {
"Integer": [
"U8",
"6",
{
"line_start": 17,
"line_stop": 17,
"col_start": 22,
"col_stop": 25,
"path": "",
"content": " let foo = Foo { f: 6u8 };"
}
]
}
}
}
],
"span": {
"line_start": 17,
"line_stop": 17,
"col_start": 13,
"col_stop": 27,
"path": "",
"content": " let foo = Foo { f: 6u8 };"
}
}
},
"span": {
"line_start": 17,
"line_stop": 17,
"col_start": 3,
"col_stop": 27,
"path": "",
"content": " let foo = Foo { f: 6u8 };"
}
}
},
{
"Assign": {
"operation": "Assign",
"assignee": {
"identifier": "{\"name\":\"foo\",\"span\":\"{\\\"line_start\\\":18,\\\"line_stop\\\":18,\\\"col_start\\\":3,\\\"col_stop\\\":6,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\" foo.f += 2u8;\\\"}\"}",
"accesses": [
{
"Member": "{\"name\":\"f\",\"span\":\"{\\\"line_start\\\":18,\\\"line_stop\\\":18,\\\"col_start\\\":7,\\\"col_stop\\\":8,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\" foo.f += 2u8;\\\"}\"}"
}
],
"span": {
"line_start": 18,
"line_stop": 18,
"col_start": 3,
"col_stop": 8,
"path": "",
"content": " foo.f += 2u8;"
}
},
"value": {
"Binary": {
"left": {
"CircuitMemberAccess": {
"circuit": {
"Identifier": "{\"name\":\"foo\",\"span\":\"{\\\"line_start\\\":18,\\\"line_stop\\\":18,\\\"col_start\\\":3,\\\"col_stop\\\":6,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\" foo.f += 2u8;\\\"}\"}"
},
"name": "{\"name\":\"f\",\"span\":\"{\\\"line_start\\\":18,\\\"line_stop\\\":18,\\\"col_start\\\":7,\\\"col_stop\\\":8,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\" foo.f += 2u8;\\\"}\"}",
"span": {
"line_start": 18,
"line_stop": 18,
"col_start": 3,
"col_stop": 15,
"path": "",
"content": " foo.f += 2u8;"
}
}
},
"right": {
"Value": {
"Integer": [
"U8",
"2",
{
"line_start": 18,
"line_stop": 18,
"col_start": 12,
"col_stop": 15,
"path": "",
"content": " foo.f += 2u8;"
}
]
}
},
"op": "Add",
"span": {
"line_start": 18,
"line_stop": 18,
"col_start": 3,
"col_stop": 15,
"path": "",
"content": " foo.f += 2u8;"
}
}
},
"span": {
"line_start": 18,
"line_stop": 18,
"col_start": 3,
"col_stop": 15,
"path": "",
"content": " foo.f += 2u8;"
}
}
},
{
"Console": {
"function": {
"Assert": {
"Binary": {
"left": {
"CircuitMemberAccess": {
"circuit": {
"Identifier": "{\"name\":\"foo\",\"span\":\"{\\\"line_start\\\":19,\\\"line_stop\\\":19,\\\"col_start\\\":18,\\\"col_stop\\\":21,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\" console.assert(foo.f == 8u8);\\\"}\"}"
},
"name": "{\"name\":\"f\",\"span\":\"{\\\"line_start\\\":19,\\\"line_stop\\\":19,\\\"col_start\\\":22,\\\"col_stop\\\":23,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\" console.assert(foo.f == 8u8);\\\"}\"}",
"span": {
"line_start": 19,
"line_stop": 19,
"col_start": 18,
"col_stop": 23,
"path": "",
"content": " console.assert(foo.f == 8u8);"
}
}
},
"right": {
"Value": {
"Integer": [
"U8",
"8",
{
"line_start": 19,
"line_stop": 19,
"col_start": 27,
"col_stop": 30,
"path": "",
"content": " console.assert(foo.f == 8u8);"
}
]
}
},
"op": "Eq",
"span": {
"line_start": 19,
"line_stop": 19,
"col_start": 18,
"col_stop": 30,
"path": "",
"content": " console.assert(foo.f == 8u8);"
}
}
}
},
"span": {
"line_start": 19,
"line_stop": 19,
"col_start": 3,
"col_stop": 30,
"path": "",
"content": " console.assert(foo.f == 8u8);"
}
}
}
],
"span": {
"line_start": 1,
"line_stop": 4,
"line_start": 4,
"line_stop": 20,
"col_start": 17,
"col_stop": 2,
"path": "",
"content": "function main() {\n...\n}"
"content": "function main() {\n...\n} "
}
},
"span": {
"line_start": 1,
"line_stop": 4,
"line_start": 4,
"line_stop": 20,
"col_start": 1,
"col_stop": 2,
"path": "",
"content": "function main() {\n...\n}\n"
"content": "function main() {\n...\n} \n\n\n\n\n\n\n\n\n\n\n\n\n\n"
}
}
}
}
}

View File

@ -1,4 +1,20 @@
circuit Foo {
f: u8,
}
function main() {
let x = 10u32;
x += 20;
console.assert(x == 30u32);
let y = [1u8, 2u8];
y[0] += 3u8;
console.assert(y[0] == 4u8);
let z = (1u8, 2u8);
z.1 += 3u8;
console.assert(z.1 == 5u8);
let foo = Foo { f: 6u8 };
foo.f += 2u8;
console.assert(foo.f == 8u8);
}

View File

@ -308,3 +308,11 @@ fn test_duplicate_name_context() {
assert_satisfied(program);
}
#[test]
fn test_mutable_call_immutable_context() {
let program_string = include_str!("mutable_call_immutable_context.leo");
let program = parse_program(program_string).unwrap();
assert_satisfied(program);
}

View File

@ -0,0 +1,12 @@
circuit TestMe {
x: u8,
function test_me(mut self) -> u8 {
self.x += 1;
return self.x
}
}
function main () {
const t = TestMe {x: 6u8}.test_me();
}

View File

@ -74,6 +74,14 @@ fn test_field() {
assert_satisfied(program)
}
#[test]
fn test_no_space_between_literal() {
let program_string = include_str!("no_space_between_literal.leo");
let mut program = parse_program(program_string).unwrap();
expect_compiler_error(program)
}
#[test]
fn test_add() {
use std::ops::Add;

View File

@ -0,0 +1,3 @@
function main() {
const f = 1 field;
}

View File

@ -0,0 +1,7 @@
function main() {
console.log("{}", 1u8);
}
function main() {
console.log("{}", 2u8);
}

View File

@ -211,3 +211,11 @@ fn test_array_params_direct_call() {
assert_satisfied(program);
}
#[test]
fn test_duplicate_function_definition() {
let program_string = include_str!("duplicate_definition.leo");
let error = parse_program(program_string).err().unwrap();
expect_asg_error(error);
}

View File

@ -45,6 +45,14 @@ pub fn group_element_to_input_value(g: EdwardsAffine) -> GroupValue {
})
}
#[test]
fn test_no_space_between_literal() {
let program_string = include_str!("no_space_between_literal.leo");
let mut program = parse_program(program_string).unwrap();
expect_compiler_error(program)
}
#[test]
fn test_one() {
let program_string = include_str!("one.leo");

View File

@ -0,0 +1,3 @@
function main() {
const g = (0,1) group;
}

View File

@ -132,3 +132,11 @@ fn test_i128_assert_eq() {
fn test_i128_ternary() {
TestI128::test_ternary();
}
#[test]
fn test_no_space_between_literal() {
let program_string = include_str!("no_space_between_literal.leo");
let program = parse_program(program_string);
assert!(program.is_err());
}

View File

@ -0,0 +1,3 @@
function main() {
const i = 1 i128;
}

View File

@ -131,3 +131,11 @@ fn test_i16_console_assert() {
fn test_i16_ternary() {
TestI16::test_ternary();
}
#[test]
fn test_no_space_between_literal() {
let program_string = include_str!("no_space_between_literal.leo");
let program = parse_program(program_string);
assert!(program.is_err());
}

View File

@ -0,0 +1,3 @@
function main() {
const i = 1 i16;
}

View File

@ -131,3 +131,11 @@ fn test_i32_console_assert() {
fn test_i32_ternary() {
TestI32::test_ternary();
}
#[test]
fn test_no_space_between_literal() {
let program_string = include_str!("no_space_between_literal.leo");
let program = parse_program(program_string);
assert!(program.is_err());
}

View File

@ -0,0 +1,3 @@
function main() {
const i = 1 i32;
}

View File

@ -132,3 +132,11 @@ fn test_i64_console_assert() {
fn test_i64_ternary() {
TestI64::test_ternary();
}
#[test]
fn test_no_space_between_literal() {
let program_string = include_str!("no_space_between_literal.leo");
let program = parse_program(program_string);
assert!(program.is_err());
}

View File

@ -0,0 +1,3 @@
function main() {
const i = 1 i64;
}

View File

@ -131,3 +131,11 @@ fn test_i8_console_assert() {
fn test_i8_ternary() {
TestI8::test_ternary();
}
#[test]
fn test_no_space_between_literal() {
let program_string = include_str!("no_space_between_literal.leo");
let program = parse_program(program_string);
assert!(program.is_err());
}

View File

@ -0,0 +1,3 @@
function main() {
const i = 1 i8;
}

View File

@ -116,3 +116,11 @@ fn test_u128_console_assert() {
fn test_u128_ternary() {
TestU128::test_ternary();
}
#[test]
fn test_no_space_between_literal() {
let program_string = include_str!("no_space_between_literal.leo");
let program = parse_program(program_string);
assert!(program.is_err());
}

View File

@ -0,0 +1,3 @@
function main() {
const i = 1 u128;
}

View File

@ -116,3 +116,11 @@ fn test_u16_console_assert() {
fn test_u16_ternary() {
TestU16::test_ternary();
}
#[test]
fn test_no_space_between_literal() {
let program_string = include_str!("no_space_between_literal.leo");
let program = parse_program(program_string);
assert!(program.is_err());
}

View File

@ -0,0 +1,3 @@
function main() {
const i = 1 u16;
}

View File

@ -116,3 +116,11 @@ fn test_u32_console_assert() {
fn test_u32_ternary() {
TestU32::test_ternary();
}
#[test]
fn test_no_space_between_literal() {
let program_string = include_str!("no_space_between_literal.leo");
let program = parse_program(program_string);
assert!(program.is_err());
}

View File

@ -0,0 +1,3 @@
function main() {
const i = 1 u32;
}

View File

@ -116,3 +116,11 @@ fn test_u64_console_assert() {
fn test_u64_ternary() {
TestU64::test_ternary();
}
#[test]
fn test_no_space_between_literal() {
let program_string = include_str!("no_space_between_literal.leo");
let program = parse_program(program_string);
assert!(program.is_err());
}

View File

@ -0,0 +1,3 @@
function main() {
const i = 1 u64;
}

View File

@ -116,3 +116,11 @@ fn test_u8_console_assert() {
fn test_u8_ternary() {
TestU8::test_ternary();
}
#[test]
fn test_no_space_between_literal() {
let program_string = include_str!("no_space_between_literal.leo");
let program = parse_program(program_string);
assert!(program.is_err());
}

View File

@ -0,0 +1,3 @@
function main() {
const i = 1 u8;
}

1
grammar/.gitattributes vendored Normal file
View File

@ -0,0 +1 @@
abnf-grammar.txt text eol=crlf

18
grammar/Cargo.toml Normal file
View File

@ -0,0 +1,18 @@
[package]
name = "leo-abnf"
version = "1.2.3"
authors = [ "The Aleo Team <hello@aleo.org>" ]
description = "ABNF to Markdown converter"
edition = "2018"
keywords = [
"aleo",
"cryptography",
"leo",
"programming-language",
"zero-knowledge",
"leo-abnf"
]
[dependencies]
abnf = "0.10.2"
anyhow = "1.0"

1731
grammar/abnf-grammar.md Normal file

File diff suppressed because it is too large Load Diff

1028
grammar/abnf-grammar.txt Normal file

File diff suppressed because it is too large Load Diff

220
grammar/src/main.rs Normal file
View File

@ -0,0 +1,220 @@
// Copyright (C) 2019-2021 Aleo Systems Inc.
// This file is part of the Leo library.
// The Leo library is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// The Leo library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
// ABNF PARSING RULES
//
// Header:
// ```abnf
// ; Introduction
// ; -------------
// ```
//
// Code block in docs (note double whitespace after colon):
// ```abnf
// ; code
// ; code
//```
//
// Rule:
// ```abnf
// address = "address"
// ```
//
// Line:
// ``` abnf
// ;;;;;;;;;
// ```
//
use abnf::types::{Node, Rule};
use anyhow::Result;
use std::collections::{HashMap, HashSet};
/// Processor's scope. Used when code block or definition starts or ends.
#[derive(Debug, Clone)]
enum Scope {
Free,
Code,
Definition(Rule),
}
/// Transforms abnf file into Markdown.
#[derive(Debug, Clone)]
struct Processor<'a> {
rules: HashMap<String, Rule>,
grammar: &'a str,
scope: Scope,
line: u32,
out: String,
}
impl<'a> Processor<'a> {
fn new(grammar: &'a str, abnf: Vec<Rule>) -> Processor<'a> {
// we need a hashmap to pull rules easily
let rules: HashMap<String, Rule> = abnf.into_iter().map(|rule| (rule.name().to_string(), rule)).collect();
Processor {
grammar,
line: 0,
out: String::new(),
rules,
scope: Scope::Free,
}
}
/// Main function for this struct.
/// Goes through each line and transforms it into proper markdown.
fn process(&mut self) {
let lines = self.grammar.lines();
let mut prev = "";
for line in lines {
self.line += 1;
// code block in comment (not highlighted as abnf)
if let Some(code) = line.strip_prefix("; ") {
self.enter_scope(Scope::Code);
self.append_str(code);
// just comment. end of code block
} else if let Some(code) = line.strip_prefix("; ") {
self.enter_scope(Scope::Free);
self.append_str(code);
// horizontal rule - section separator
} else if line.starts_with(";;;;;;;;;;") {
self.enter_scope(Scope::Free);
self.append_str("\n--------\n");
// empty line in comment. end of code block
} else if line.starts_with(';') {
self.enter_scope(Scope::Free);
self.append_str("\n\n");
// just empty line. end of doc, start of definition
} else if line.is_empty() {
self.enter_scope(Scope::Free);
self.append_str("");
// definition (may be multiline)
} else {
// if there's an equality sign and previous line was empty
if line.contains('=') && prev.is_empty() {
let (def, _) = line.split_at(line.find('=').unwrap());
let def = def.trim();
// try to find rule matching definition or fail
let rule = self.rules.get(&def.to_string()).cloned().unwrap();
self.enter_scope(Scope::Definition(rule));
}
self.append_str(line);
}
prev = line;
}
}
/// Append new line into output, add newline character.
fn append_str(&mut self, line: &str) {
self.out.push_str(line);
self.out.push('\n');
}
/// Enter new scope (definition or code block). Allows customizing
/// pre and post lines for each scope entered or exited.
fn enter_scope(&mut self, new_scope: Scope) {
match (&self.scope, &new_scope) {
// exchange scopes between Free and Code
(Scope::Free, Scope::Code) => self.append_str("```"),
(Scope::Code, Scope::Free) => self.append_str("```"),
// exchange scopes between Free and Definition
(Scope::Free, Scope::Definition(rule)) => {
self.append_str(&format!("<a name=\"{}\"></a>", rule.name()));
self.append_str("```abnf");
}
(Scope::Definition(rule), Scope::Free) => {
let mut rules: Vec<String> = Vec::new();
parse_abnf_node(rule.node(), &mut rules);
// 1. leave only unique keys
// 2. map each rule into a link
// 3. join results as a list
// Note: GitHub only allows custom tags with 'user-content-' prefix
let keys = rules
.into_iter()
.collect::<HashSet<_>>()
.into_iter()
.map(|tag| format!("[{}](#user-content-{})", &tag, tag))
.collect::<Vec<String>>()
.join(", ");
self.append_str("```");
if !keys.is_empty() {
self.append_str(&format!("\nGo to: _{}_;\n", keys));
}
}
(_, _) => (),
};
self.scope = new_scope;
}
}
/// Recursively parse ABNF Node and fill sum vec with found rule names.
fn parse_abnf_node(node: &Node, sum: &mut Vec<String>) {
match node {
// these two are just vectors of rules
Node::Alternation(vec) | Node::Concatenation(vec) => {
for node in vec {
parse_abnf_node(node, sum);
}
}
Node::Group(node) | Node::Optional(node) => parse_abnf_node(node.as_ref(), sum),
// push rulename if it is known
Node::Rulename(name) => sum.push(name.clone()),
// do nothing for other nodes
_ => (),
}
}
fn main() -> Result<()> {
// Take Leo ABNF grammar file.
let grammar = include_str!("../abnf-grammar.txt");
// A. Coglio's proposal for %s syntax for case-sensitive statements has not been implemented
// in this library, so we need to remove all occurrences of %s in the grammar file.
// Link to this proposal: https://www.kestrel.edu/people/coglio/vstte18.pdf
let grammar = &str::replace(grammar, "%s", "");
// Parse ABNF to get list of all definitions.
let parsed = abnf::rulelist(grammar).map_err(|e| {
eprintln!("{}", &e);
anyhow::anyhow!(e)
})?;
// Init parser and run it. That's it.
let mut parser = Processor::new(grammar, parsed);
parser.process();
// Print result of conversion to STDOUT.
println!("{}", parser.out);
Ok(())
}

View File

@ -15,15 +15,12 @@
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
use leo_asg::AsgConvertError;
use leo_ast::{FormattedError, Identifier, LeoError, Span};
use leo_parser::{DeprecatedError, SyntaxError};
use leo_parser::SyntaxError;
use std::{io, path::Path};
#[derive(Debug, Error)]
pub enum ImportParserError {
#[error("{}", _0)]
DeprecatedError(#[from] DeprecatedError),
#[error("{}", _0)]
Error(#[from] FormattedError),
@ -40,7 +37,6 @@ impl Into<AsgConvertError> for ImportParserError {
match self {
ImportParserError::Error(x) => AsgConvertError::ImportError(x),
ImportParserError::SyntaxError(x) => x.into(),
ImportParserError::DeprecatedError(x) => AsgConvertError::SyntaxError(SyntaxError::DeprecatedError(x)),
ImportParserError::AsgConvertError(x) => x,
}
}
@ -66,15 +62,6 @@ impl ImportParserError {
Self::new_from_span(message, span)
}
///
/// A core package name has been imported twice.
///
pub fn duplicate_core_package(identifier: Identifier) -> Self {
let message = format!("Duplicate core_package import `{}`.", identifier.name);
Self::new_from_span(message, &identifier.span)
}
///
/// Failed to convert a file path into an os string.
///
@ -106,15 +93,6 @@ impl ImportParserError {
Self::new_from_span(message, span)
}
///
/// Failed to import all symbols at a package path.
///
pub fn star(path: &Path, span: &Span) -> Self {
let message = format!("Cannot import `*` from path `{:?}`.", path);
Self::new_from_span(message, span)
}
///
/// Failed to find a library file for the current package.
///

View File

@ -14,34 +14,16 @@
// You should have received a copy of the GNU General Public License
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
use std::{ffi::OsString, fs::FileType, io};
use std::{ffi::OsString, io};
#[derive(Debug, Error)]
pub enum ImportsDirectoryError {
#[error("creating: {}", _0)]
Creating(io::Error),
#[error("file entry getting: {}", _0)]
GettingFileEntry(io::Error),
#[error("file {:?} extension getting", _0)]
GettingFileExtension(OsString),
#[error("file {:?} type getting: {}", _0, _1)]
GettingFileType(OsString, io::Error),
#[error("package {:?} does not exist as an import", _0)]
ImportDoesNotExist(OsString),
#[error("invalid file {:?} extension: {:?}", _0, _1)]
InvalidFileExtension(OsString, OsString),
#[error("invalid file {:?} type: {:?}", _0, _1)]
InvalidFileType(OsString, FileType),
#[error("reading: {}", _0)]
Reading(io::Error),
#[error("removing: {}", _0)]
Removing(io::Error),
}

View File

@ -26,9 +26,6 @@ pub enum InputsDirectoryError {
#[error("file entry getting: {}", _0)]
GettingFileEntry(io::Error),
#[error("file {:?} extension getting", _0)]
GettingFileExtension(OsString),
#[error("file {:?} name getting", _0)]
GettingFileName(OsString),
@ -38,9 +35,6 @@ pub enum InputsDirectoryError {
#[error("{}", _0)]
InputFileError(#[from] InputFileError),
#[error("invalid file {:?} extension: {:?}", _0, _1)]
InvalidFileExtension(String, OsString),
#[error("invalid file {:?} type: {:?}", _0, _1)]
InvalidFileType(OsString, FileType),

View File

@ -14,21 +14,15 @@
// You should have received a copy of the GNU General Public License
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
use std::{io, path::PathBuf};
use std::path::PathBuf;
#[derive(Debug, Error)]
pub enum InputFileError {
#[error("{}: {}", _0, _1)]
Crate(&'static str, String),
#[error("creating: {}", _0)]
Creating(io::Error),
#[error("Cannot read from the provided file path - {:?}", _0)]
FileReadError(PathBuf),
#[error("writing: {}", _0)]
Writing(io::Error),
}
impl From<std::io::Error> for InputFileError {

View File

@ -14,21 +14,15 @@
// You should have received a copy of the GNU General Public License
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
use std::{io, path::PathBuf};
use std::path::PathBuf;
#[derive(Debug, Error)]
pub enum StateFileError {
#[error("{}: {}", _0, _1)]
Crate(&'static str, String),
#[error("creating: {}", _0)]
Creating(io::Error),
#[error("Cannot read from the provided file path - {:?}", _0)]
FileReadError(PathBuf),
#[error("writing: {}", _0)]
Writing(io::Error),
}
impl From<std::io::Error> for StateFileError {

View File

@ -14,24 +14,18 @@
// You should have received a copy of the GNU General Public License
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
use std::{io, path::PathBuf};
use std::path::PathBuf;
#[derive(Debug, Error)]
pub enum ChecksumFileError {
#[error("{}: {}", _0, _1)]
Crate(&'static str, String),
#[error("creating: {}", _0)]
Creating(io::Error),
#[error("Cannot read from the provided file path - {:?}", _0)]
FileReadError(PathBuf),
#[error("Cannot remove the provided file - {:?}", _0)]
FileRemovalError(PathBuf),
#[error("writing: {}", _0)]
Writing(io::Error),
}
impl From<std::io::Error> for ChecksumFileError {

View File

@ -14,24 +14,18 @@
// You should have received a copy of the GNU General Public License
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
use std::{io, path::PathBuf};
use std::path::PathBuf;
#[derive(Debug, Error)]
pub enum CircuitFileError {
#[error("{}: {}", _0, _1)]
Crate(&'static str, String),
#[error("creating: {}", _0)]
Creating(io::Error),
#[error("Cannot read from the provided file path - {:?}", _0)]
FileReadError(PathBuf),
#[error("Cannot remove the provided file - {:?}", _0)]
FileRemovalError(PathBuf),
#[error("writing: {}", _0)]
Writing(io::Error),
}
impl From<std::io::Error> for CircuitFileError {

View File

@ -14,31 +14,13 @@
// You should have received a copy of the GNU General Public License
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
use std::{ffi::OsString, fs::FileType, io};
use std::io;
#[derive(Debug, Error)]
pub enum OutputsDirectoryError {
#[error("creating: {}", _0)]
Creating(io::Error),
#[error("file entry getting: {}", _0)]
GettingFileEntry(io::Error),
#[error("file {:?} extension getting", _0)]
GettingFileExtension(OsString),
#[error("file {:?} type getting: {}", _0, _1)]
GettingFileType(OsString, io::Error),
#[error("invalid file {:?} extension: {:?}", _0, _1)]
InvalidFileExtension(OsString, OsString),
#[error("invalid file {:?} type: {:?}", _0, _1)]
InvalidFileType(OsString, FileType),
#[error("reading: {}", _0)]
Reading(io::Error),
#[error("removing: {}", _0)]
Removing(io::Error),
}

View File

@ -14,24 +14,18 @@
// You should have received a copy of the GNU General Public License
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
use std::{io, path::PathBuf};
use std::path::PathBuf;
#[derive(Debug, Error)]
pub enum ProofFileError {
#[error("{}: {}", _0, _1)]
Crate(&'static str, String),
#[error("creating: {}", _0)]
Creating(io::Error),
#[error("Cannot read from the provided file path - {:?}", _0)]
FileReadError(PathBuf),
#[error("Cannot remove the provided file - {:?}", _0)]
FileRemovalError(PathBuf),
#[error("writing: {}", _0)]
Writing(io::Error),
}
impl From<std::io::Error> for ProofFileError {

View File

@ -14,24 +14,18 @@
// You should have received a copy of the GNU General Public License
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
use std::{io, path::PathBuf};
use std::path::PathBuf;
#[derive(Debug, Error)]
pub enum ProvingKeyFileError {
#[error("{}: {}", _0, _1)]
Crate(&'static str, String),
#[error("creating: {}", _0)]
Creating(io::Error),
#[error("Cannot read from the provided file path - {:?}", _0)]
FileReadError(PathBuf),
#[error("Cannot remove the provided file - {:?}", _0)]
FileRemovalError(PathBuf),
#[error("writing: {}", _0)]
Writing(io::Error),
}
impl From<std::io::Error> for ProvingKeyFileError {

View File

@ -14,27 +14,18 @@
// You should have received a copy of the GNU General Public License
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
use std::{io, path::PathBuf};
use std::path::PathBuf;
#[derive(Debug, Error)]
pub enum VerificationKeyFileError {
#[error("{}: {}", _0, _1)]
Crate(&'static str, String),
#[error("creating: {}", _0)]
Creating(io::Error),
#[error("Cannot read from the provided file path - {:?}", _0)]
FileReadError(PathBuf),
#[error("Cannot remove the provided file - {:?}", _0)]
FileRemovalError(PathBuf),
#[error("Verification key file was corrupted")]
IncorrectVerificationKey,
#[error("writing: {}", _0)]
Writing(io::Error),
}
impl From<std::io::Error> for VerificationKeyFileError {

View File

@ -13,24 +13,18 @@
// You should have received a copy of the GNU General Public License
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
use std::{ffi::OsString, io};
use std::ffi::OsString;
#[derive(Debug, Error)]
pub enum PackageError {
#[error("{}: {}", _0, _1)]
Crate(&'static str, String),
#[error("`{}` creating: {}", _0, _1)]
Creating(&'static str, io::Error),
#[error("Failed to initialize package {:?} ({:?})", _0, _1)]
FailedToInitialize(String, OsString),
#[error("Invalid project name: {:?}", _0)]
InvalidPackageName(String),
#[error("`{}` metadata: {}", _0, _1)]
Removing(&'static str, io::Error),
}
impl From<std::io::Error> for PackageError {

View File

@ -14,18 +14,10 @@
// You should have received a copy of the GNU General Public License
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
use std::io;
#[derive(Debug, Error)]
pub enum GitignoreError {
#[error("{}: {}", _0, _1)]
Crate(&'static str, String),
#[error("creating: {}", _0)]
Creating(io::Error),
#[error("writing: {}", _0)]
Writing(io::Error),
}
impl From<std::io::Error> for GitignoreError {

View File

@ -14,19 +14,11 @@
// You should have received a copy of the GNU General Public License
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
use std::io;
#[allow(clippy::upper_case_acronyms)]
#[derive(Debug, Error)]
pub enum READMEError {
#[error("{}: {}", _0, _1)]
Crate(&'static str, String),
#[error("creating: {}", _0)]
Creating(io::Error),
#[error("writing: {}", _0)]
Writing(io::Error),
}
impl From<std::io::Error> for READMEError {

View File

@ -14,8 +14,7 @@
// You should have received a copy of the GNU General Public License
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
use std::{io, path::PathBuf};
use walkdir::Error as WalkDirError;
use std::path::PathBuf;
use zip::result::ZipError;
#[derive(Debug, Error)]
@ -23,21 +22,12 @@ pub enum ZipFileError {
#[error("{}: {}", _0, _1)]
Crate(&'static str, String),
#[error("creating: {}", _0)]
Creating(io::Error),
#[error("Cannot read from the provided file path - {:?}", _0)]
FileReadError(PathBuf),
#[error("Cannot remove the provided file - {:?}", _0)]
FileRemovalError(PathBuf),
#[error("writing: {}", _0)]
Writing(io::Error),
#[error("{}", _0)]
WalkDirError(#[from] WalkDirError),
#[error("{}", _0)]
ZipError(#[from] ZipError),
}

View File

@ -14,18 +14,10 @@
// You should have received a copy of the GNU General Public License
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
use std::io;
#[derive(Debug, Error)]
pub enum LibraryFileError {
#[error("{}: {}", _0, _1)]
Crate(&'static str, String),
#[error("creating: {}", _0)]
Creating(io::Error),
#[error("writing: {}", _0)]
Writing(io::Error),
}
impl From<std::io::Error> for LibraryFileError {

View File

@ -14,18 +14,10 @@
// You should have received a copy of the GNU General Public License
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
use std::io;
#[derive(Debug, Error)]
pub enum MainFileError {
#[error("{}: {}", _0, _1)]
Crate(&'static str, String),
#[error("creating: {}", _0)]
Creating(io::Error),
#[error("writing: {}", _0)]
Writing(io::Error),
}
impl From<std::io::Error> for MainFileError {

View File

@ -33,20 +33,6 @@ impl ImportsDirectory {
fs::create_dir_all(&path).map_err(ImportsDirectoryError::Creating)
}
/// Removes the directory at the provided path.
pub fn remove(path: &Path) -> Result<(), ImportsDirectoryError> {
let mut path = Cow::from(path);
if path.is_dir() && !path.ends_with(IMPORTS_DIRECTORY_NAME) {
path.to_mut().push(IMPORTS_DIRECTORY_NAME);
}
if path.exists() {
fs::remove_dir_all(&path).map_err(ImportsDirectoryError::Removing)?;
}
Ok(())
}
/// Removes an imported package in the imports directory at the provided path.
pub fn remove_import(path: &Path, package_name: &str) -> Result<(), ImportsDirectoryError> {
let mut path = Cow::from(path);

View File

@ -41,6 +41,13 @@ impl SyntaxError {
Self::new_from_span("unexpected EOF".to_string(), span)
}
pub fn unexpected_whitespace(left: &str, right: &str, span: &Span) -> Self {
Self::new_from_span(
format!("Unexpected white space between terms {} and {}", left, right),
span,
)
}
pub fn unexpected(got: &Token, expected: &[Token], span: &Span) -> Self {
Self::new_from_span(
format!(

View File

@ -16,7 +16,7 @@
use std::unimplemented;
use crate::{tokenizer::*, SyntaxError, SyntaxResult, Token, KEYWORD_TOKENS};
use crate::{tokenizer::*, unexpected_whitespace, SyntaxError, SyntaxResult, Token, KEYWORD_TOKENS};
use leo_ast::*;
use tendril::format_tendril;
@ -154,7 +154,7 @@ impl ParserContext {
}
_ => GroupCoordinate::SignLow,
},
Token::Ident(x) if x.as_ref() == "_" => GroupCoordinate::Inferred,
Token::Underscore => GroupCoordinate::Inferred,
Token::Int(value) => GroupCoordinate::Number(value.clone(), token.span.clone()),
_ => return None,
})
@ -164,10 +164,16 @@ impl ParserContext {
/// Removes the next two tokens if they are a pair of [`GroupCoordinate`] and returns them,
/// or [None] if the next token is not a [`GroupCoordinate`].
///
pub fn eat_group_partial(&mut self) -> Option<(GroupCoordinate, GroupCoordinate, Span)> {
pub fn eat_group_partial(&mut self) -> SyntaxResult<Option<(GroupCoordinate, GroupCoordinate, Span)>> {
let mut i = self.tokens.len() - 1;
let start_span = self.tokens.get(i)?.span.clone();
let first = self.peek_group_coordinate(&mut i)?;
let start_span = match self.tokens.get(i) {
Some(span) => span.span.clone(),
None => return Ok(None),
};
let first = match self.peek_group_coordinate(&mut i) {
Some(coord) => coord,
None => return Ok(None),
};
match self.tokens.get(i) {
Some(SpannedToken {
token: Token::Comma, ..
@ -175,19 +181,24 @@ impl ParserContext {
i -= 1;
}
_ => {
return None;
return Ok(None);
}
}
let second = self.peek_group_coordinate(&mut i)?;
let second = match self.peek_group_coordinate(&mut i) {
Some(coord) => coord,
None => return Ok(None),
};
let right_paren_span;
match self.tokens.get(i) {
Some(SpannedToken {
token: Token::RightParen,
..
span,
}) => {
right_paren_span = span.clone();
i -= 1;
}
_ => {
return None;
return Ok(None);
}
}
let end_span;
@ -200,12 +211,18 @@ impl ParserContext {
i -= 1;
}
_ => {
return None;
return Ok(None);
}
}
self.tokens.drain((i + 1)..);
Some((first, second, start_span + end_span))
unexpected_whitespace(
&right_paren_span,
&end_span,
&format!("({},{})", first, second),
"group",
)?;
Ok(Some((first, second, start_span + end_span)))
}
///

View File

@ -46,7 +46,7 @@ impl ParserContext {
self.fuzzy_struct_state = false;
// Parse expression.
let result = self.parse_expression_fuzzy();
let result = self.parse_conditional_expression();
// Restore prior parser state.
self.fuzzy_struct_state = prior_fuzzy_state;
@ -58,17 +58,17 @@ impl ParserContext {
/// Returns an [`Expression`] AST node if the next tokens represent
/// a ternary expression. May or may not include circuit init expressions.
///
/// Otherwise, tries to parse the next token using [`parse_or_expression`].
/// Otherwise, tries to parse the next token using [`parse_disjunctive_expression`].
///
pub fn parse_expression_fuzzy(&mut self) -> SyntaxResult<Expression> {
pub fn parse_conditional_expression(&mut self) -> SyntaxResult<Expression> {
// Try to parse the next expression. Try BinaryOperation::Or.
let mut expr = self.parse_or_expression()?;
let mut expr = self.parse_disjunctive_expression()?;
// Parse the rest of the ternary expression.
if self.eat(Token::Question).is_some() {
let if_true = self.parse_expression()?;
self.expect(Token::Colon)?;
let if_false = self.parse_expression_fuzzy()?;
let if_false = self.parse_conditional_expression()?;
expr = Expression::Ternary(TernaryExpression {
span: expr.span() + if_false.span(),
condition: Box::new(expr),
@ -83,12 +83,12 @@ impl ParserContext {
/// Returns an [`Expression`] AST node if the next tokens represent
/// a binary or expression.
///
/// Otherwise, tries to parse the next token using [`parse_and_expression`].
/// Otherwise, tries to parse the next token using [`parse_conjunctive_expression`].
///
pub fn parse_or_expression(&mut self) -> SyntaxResult<Expression> {
let mut expr = self.parse_and_expression()?;
pub fn parse_disjunctive_expression(&mut self) -> SyntaxResult<Expression> {
let mut expr = self.parse_conjunctive_expression()?;
while self.eat(Token::Or).is_some() {
let right = self.parse_and_expression()?;
let right = self.parse_conjunctive_expression()?;
expr = Expression::Binary(BinaryExpression {
span: expr.span() + right.span(),
op: BinaryOperation::Or,
@ -105,10 +105,10 @@ impl ParserContext {
///
/// Otherwise, tries to parse the next token using [`parse_bit_or_expression`].
///
pub fn parse_and_expression(&mut self) -> SyntaxResult<Expression> {
let mut expr = self.parse_bit_or_expression()?;
pub fn parse_conjunctive_expression(&mut self) -> SyntaxResult<Expression> {
let mut expr = self.parse_equality_expression()?;
while self.eat(Token::And).is_some() {
let right = self.parse_bit_or_expression()?;
let right = self.parse_equality_expression()?;
expr = Expression::Binary(BinaryExpression {
span: expr.span() + right.span(),
op: BinaryOperation::And,
@ -125,19 +125,19 @@ impl ParserContext {
///
/// Otherwise, tries to parse the next token using [`parse_bit_xor_expression`].
///
pub fn parse_bit_or_expression(&mut self) -> SyntaxResult<Expression> {
let mut expr = self.parse_bit_xor_expression()?;
while self.eat(Token::BitOr).is_some() {
let right = self.parse_bit_xor_expression()?;
expr = Expression::Binary(BinaryExpression {
span: expr.span() + right.span(),
op: BinaryOperation::BitOr,
left: Box::new(expr),
right: Box::new(right),
})
}
Ok(expr)
}
// pub fn parse_bit_or_expression(&mut self) -> SyntaxResult<Expression> {
// let mut expr = self.parse_bit_xor_expression()?;
// while self.eat(Token::BitOr).is_some() {
// let right = self.parse_bit_xor_expression()?;
// expr = Expression::Binary(BinaryExpression {
// span: expr.span() + right.span(),
// op: BinaryOperation::BitOr,
// left: Box::new(expr),
// right: Box::new(right),
// })
// }
// Ok(expr)
// }
///
/// Returns an [`Expression`] AST node if the next tokens represent a
@ -145,50 +145,50 @@ impl ParserContext {
///
/// Otherwise, tries to parse the next token using [`parse_bit_and_expression`].
///
pub fn parse_bit_xor_expression(&mut self) -> SyntaxResult<Expression> {
let mut expr = self.parse_bit_and_expression()?;
while self.eat(Token::BitXor).is_some() {
let right = self.parse_bit_and_expression()?;
expr = Expression::Binary(BinaryExpression {
span: expr.span() + right.span(),
op: BinaryOperation::BitXor,
left: Box::new(expr),
right: Box::new(right),
})
}
Ok(expr)
}
// pub fn parse_bit_xor_expression(&mut self) -> SyntaxResult<Expression> {
// let mut expr = self.parse_bit_and_expression()?;
// while self.eat(Token::BitXor).is_some() {
// let right = self.parse_bit_and_expression()?;
// expr = Expression::Binary(BinaryExpression {
// span: expr.span() + right.span(),
// op: BinaryOperation::BitXor,
// left: Box::new(expr),
// right: Box::new(right),
// })
// }
// Ok(expr)
// }
///
/// Returns an [`Expression`] AST node if the next tokens represent a
/// binary bitwise and expression.
///
/// Otherwise, tries to parse the next token using [`parse_eq_expression`].
/// Otherwise, tries to parse the next token using [`parse_equality_expression`].
///
pub fn parse_bit_and_expression(&mut self) -> SyntaxResult<Expression> {
let mut expr = self.parse_eq_expression()?;
while self.eat(Token::BitAnd).is_some() {
let right = self.parse_eq_expression()?;
expr = Expression::Binary(BinaryExpression {
span: expr.span() + right.span(),
op: BinaryOperation::BitAnd,
left: Box::new(expr),
right: Box::new(right),
})
}
Ok(expr)
}
// pub fn parse_bit_and_expression(&mut self) -> SyntaxResult<Expression> {
// let mut expr = self.parse_equality_expression()?;
// while self.eat(Token::BitAnd).is_some() {
// let right = self.parse_equality_expression()?;
// expr = Expression::Binary(BinaryExpression {
// span: expr.span() + right.span(),
// op: BinaryOperation::BitAnd,
// left: Box::new(expr),
// right: Box::new(right),
// })
// }
// Ok(expr)
// }
///
/// Returns an [`Expression`] AST node if the next tokens represent a
/// binary equals or not equals expression.
///
/// Otherwise, tries to parse the next token using [`parse_rel_expression`].
/// Otherwise, tries to parse the next token using [`parse_ordering_expression`].
///
pub fn parse_eq_expression(&mut self) -> SyntaxResult<Expression> {
let mut expr = self.parse_rel_expression()?;
pub fn parse_equality_expression(&mut self) -> SyntaxResult<Expression> {
let mut expr = self.parse_ordering_expression()?;
while let Some(SpannedToken { token: op, .. }) = self.eat_any(&[Token::Eq, Token::NotEq]) {
let right = self.parse_rel_expression()?;
let right = self.parse_ordering_expression()?;
expr = Expression::Binary(BinaryExpression {
span: expr.span() + right.span(),
op: match op {
@ -209,11 +209,11 @@ impl ParserContext {
///
/// Otherwise, tries to parse the next token using [`parse_shift_expression`].
///
pub fn parse_rel_expression(&mut self) -> SyntaxResult<Expression> {
let mut expr = self.parse_shift_expression()?;
pub fn parse_ordering_expression(&mut self) -> SyntaxResult<Expression> {
let mut expr = self.parse_additive_expression()?;
while let Some(SpannedToken { token: op, .. }) = self.eat_any(&[Token::Lt, Token::LtEq, Token::Gt, Token::GtEq])
{
let right = self.parse_shift_expression()?;
let right = self.parse_additive_expression()?;
expr = Expression::Binary(BinaryExpression {
span: expr.span() + right.span(),
op: match op {
@ -234,26 +234,26 @@ impl ParserContext {
/// Returns an [`Expression`] AST node if the next tokens represent a
/// binary shift expression.
///
/// Otherwise, tries to parse the next token using [`parse_add_sub_expression`].
/// Otherwise, tries to parse the next token using [`parse_additive_expression`].
///
pub fn parse_shift_expression(&mut self) -> SyntaxResult<Expression> {
let mut expr = self.parse_add_sub_expression()?;
while let Some(SpannedToken { token: op, .. }) = self.eat_any(&[Token::Shl, Token::Shr, Token::ShrSigned]) {
let right = self.parse_add_sub_expression()?;
expr = Expression::Binary(BinaryExpression {
span: expr.span() + right.span(),
op: match op {
Token::Shl => BinaryOperation::Shl,
Token::Shr => BinaryOperation::Shr,
Token::ShrSigned => BinaryOperation::ShrSigned,
_ => unimplemented!(),
},
left: Box::new(expr),
right: Box::new(right),
})
}
Ok(expr)
}
// pub fn parse_shift_expression(&mut self) -> SyntaxResult<Expression> {
// let mut expr = self.parse_additive_expression()?;
// while let Some(SpannedToken { token: op, .. }) = self.eat_any(&[Token::Shl, Token::Shr, Token::ShrSigned]) {
// let right = self.parse_additive_expression()?;
// expr = Expression::Binary(BinaryExpression {
// span: expr.span() + right.span(),
// op: match op {
// Token::Shl => BinaryOperation::Shl,
// Token::Shr => BinaryOperation::Shr,
// Token::ShrSigned => BinaryOperation::ShrSigned,
// _ => unimplemented!(),
// },
// left: Box::new(expr),
// right: Box::new(right),
// })
// }
// Ok(expr)
// }
///
/// Returns an [`Expression`] AST node if the next tokens represent a
@ -261,10 +261,10 @@ impl ParserContext {
///
/// Otherwise, tries to parse the next token using [`parse_mul_div_pow_expression`].
///
pub fn parse_add_sub_expression(&mut self) -> SyntaxResult<Expression> {
let mut expr = self.parse_mul_div_mod_expression()?;
pub fn parse_additive_expression(&mut self) -> SyntaxResult<Expression> {
let mut expr = self.parse_multiplicative_expression()?;
while let Some(SpannedToken { token: op, .. }) = self.eat_any(&[Token::Add, Token::Minus]) {
let right = self.parse_mul_div_mod_expression()?;
let right = self.parse_multiplicative_expression()?;
expr = Expression::Binary(BinaryExpression {
span: expr.span() + right.span(),
op: match op {
@ -283,18 +283,18 @@ impl ParserContext {
/// Returns an [`Expression`] AST node if the next tokens represent a
/// binary multiplication, division, or modulus expression.
///
/// Otherwise, tries to parse the next token using [`parse_exp_expression`].
/// Otherwise, tries to parse the next token using [`parse_exponential_expression`].
///
pub fn parse_mul_div_mod_expression(&mut self) -> SyntaxResult<Expression> {
let mut expr = self.parse_exp_expression()?;
while let Some(SpannedToken { token: op, .. }) = self.eat_any(&[Token::Mul, Token::Div, Token::Mod]) {
let right = self.parse_exp_expression()?;
pub fn parse_multiplicative_expression(&mut self) -> SyntaxResult<Expression> {
let mut expr = self.parse_exponential_expression()?;
while let Some(SpannedToken { token: op, .. }) = self.eat_any(&[Token::Mul, Token::Div]) {
let right = self.parse_exponential_expression()?;
expr = Expression::Binary(BinaryExpression {
span: expr.span() + right.span(),
op: match op {
Token::Mul => BinaryOperation::Mul,
Token::Div => BinaryOperation::Div,
Token::Mod => BinaryOperation::Mod,
// Token::Mod => BinaryOperation::Mod,
_ => unimplemented!(),
},
left: Box::new(expr),
@ -310,9 +310,8 @@ impl ParserContext {
///
/// Otherwise, tries to parse the next token using [`parse_cast_expression`].
///
pub fn parse_exp_expression(&mut self) -> SyntaxResult<Expression> {
pub fn parse_exponential_expression(&mut self) -> SyntaxResult<Expression> {
let mut exprs = vec![self.parse_cast_expression()?];
while self.eat(Token::Exp).is_some() {
exprs.push(self.parse_cast_expression()?);
}
@ -352,19 +351,19 @@ impl ParserContext {
/// Returns an [`Expression`] AST node if the next tokens represent a
/// unary not, negate, or bitwise not expression.
///
/// Otherwise, tries to parse the next token using [`parse_access_expression`].
/// Otherwise, tries to parse the next token using [`parse_postfix_expression`].
///
pub fn parse_unary_expression(&mut self) -> SyntaxResult<Expression> {
let mut ops = Vec::new();
while let Some(token) = self.eat_any(&[Token::Not, Token::Minus, Token::BitNot]) {
while let Some(token) = self.eat_any(&[Token::Not, Token::Minus]) {
ops.push(token);
}
let mut inner = self.parse_access_expression()?;
let mut inner = self.parse_postfix_expression()?;
for op in ops.into_iter().rev() {
let operation = match op.token {
Token::Not => UnaryOperation::Not,
Token::Minus => UnaryOperation::Negate,
Token::BitNot => UnaryOperation::BitNot,
// Token::BitNot => UnaryOperation::BitNot,
_ => unimplemented!(),
};
// hack for const signed integer overflow issues
@ -399,7 +398,7 @@ impl ParserContext {
///
/// Otherwise, tries to parse the next token using [`parse_primary_expression`].
///
pub fn parse_access_expression(&mut self) -> SyntaxResult<Expression> {
pub fn parse_postfix_expression(&mut self) -> SyntaxResult<Expression> {
let mut expr = self.parse_primary_expression()?;
while let Some(token) = self.eat_any(&[Token::LeftSquare, Token::Dot, Token::LeftParen, Token::DoubleColon]) {
match token.token {
@ -516,7 +515,7 @@ impl ParserContext {
/// Returns an [`Expression`] AST node if the next tokens represent an
/// circuit initialization expression.
///
pub fn parse_circuit_init(&mut self, identifier: Identifier) -> SyntaxResult<Expression> {
pub fn parse_circuit_expression(&mut self, identifier: Identifier) -> SyntaxResult<Expression> {
self.expect(Token::LeftCurly)?;
let mut members = Vec::new();
let end_span;
@ -550,11 +549,102 @@ impl ParserContext {
}))
}
///
/// Returns an [`Expression`] AST node if the next tokens represent an
/// tuple initialization expression.
///
pub fn parse_tuple_expression(&mut self, span: &Span) -> SyntaxResult<Expression> {
if let Some((left, right, span)) = self.eat_group_partial()? {
return Ok(Expression::Value(ValueExpression::Group(Box::new(GroupValue::Tuple(
GroupTuple {
span,
x: left,
y: right,
},
)))));
}
let mut args = Vec::new();
let end_span;
loop {
let end = self.eat(Token::RightParen);
if let Some(end) = end {
end_span = end.span;
break;
}
let expr = self.parse_expression()?;
args.push(expr);
if self.eat(Token::Comma).is_none() {
end_span = self.expect(Token::RightParen)?;
break;
}
}
if args.len() == 1 {
Ok(args.remove(0))
} else {
Ok(Expression::TupleInit(TupleInitExpression {
span: span + &end_span,
elements: args,
}))
}
}
///
/// Returns an [`Expression`] AST node if the next tokens represent an
/// array initialization expression.
///
pub fn parse_array_expression(&mut self, span: &Span) -> SyntaxResult<Expression> {
if let Some(end) = self.eat(Token::RightSquare) {
return Ok(Expression::ArrayInline(ArrayInlineExpression {
elements: Vec::new(),
span: span + &end.span,
}));
}
let first = self.parse_spread_or_expression()?;
if self.eat(Token::Semicolon).is_some() {
let dimensions = self.parse_array_dimensions()?;
let end = self.expect(Token::RightSquare)?;
let first = match first {
SpreadOrExpression::Spread(first) => {
let span = span + first.span();
return Err(SyntaxError::spread_in_array_init(&span));
}
SpreadOrExpression::Expression(x) => x,
};
Ok(Expression::ArrayInit(ArrayInitExpression {
span: span + &end,
element: Box::new(first),
dimensions,
}))
} else {
let end_span;
let mut elements = vec![first];
loop {
if let Some(token) = self.eat(Token::RightSquare) {
end_span = token.span;
break;
}
if elements.len() == 1 {
self.expect(Token::Comma)?;
}
elements.push(self.parse_spread_or_expression()?);
if self.eat(Token::Comma).is_none() {
end_span = self.expect(Token::RightSquare)?;
break;
}
}
Ok(Expression::ArrayInline(ArrayInlineExpression {
elements,
span: span + &end_span,
}))
}
}
///
/// Returns an [`Expression`] AST node if the next token is a primary expression:
/// - Scalar types: field, group, unsigned integer, signed integer, boolean, address
/// - Literals: field, group, unsigned integer, signed integer, boolean, address
/// - Aggregate types: array, tuple
/// - Identifiers: variables, keywords
/// - self
///
/// Returns an expression error if the token cannot be matched.
///
@ -567,19 +657,28 @@ impl ParserContext {
Some(SpannedToken {
token: Token::Field,
span: type_span,
}) => Expression::Value(ValueExpression::Field(value, span + type_span)),
}) => {
unexpected_whitespace(&span, &type_span, &value, "field")?;
Expression::Value(ValueExpression::Field(value, span + type_span))
}
Some(SpannedToken {
token: Token::Group,
span: type_span,
}) => Expression::Value(ValueExpression::Group(Box::new(GroupValue::Single(
value,
span + type_span,
)))),
Some(SpannedToken { token, span: type_span }) => Expression::Value(ValueExpression::Integer(
Self::token_to_int_type(token).expect("unknown int type token"),
value,
span + type_span,
)),
}) => {
unexpected_whitespace(&span, &type_span, &value, "group")?;
Expression::Value(ValueExpression::Group(Box::new(GroupValue::Single(
value,
span + type_span,
))))
}
Some(SpannedToken { token, span: type_span }) => {
unexpected_whitespace(&span, &type_span, &value, &token.to_string())?;
Expression::Value(ValueExpression::Integer(
Self::token_to_int_type(token).expect("unknown int type token"),
value,
span + type_span,
))
}
None => Expression::Value(ValueExpression::Implicit(value, span)),
}
}
@ -602,90 +701,12 @@ impl ParserContext {
let end = self.expect(Token::RightParen)?;
Expression::Value(ValueExpression::Address(value, span + end))
}
Token::LeftParen => {
if let Some((left, right, span)) = self.eat_group_partial() {
return Ok(Expression::Value(ValueExpression::Group(Box::new(GroupValue::Tuple(
GroupTuple {
span,
x: left,
y: right,
},
)))));
}
let mut args = Vec::new();
let end_span;
loop {
let end = self.eat(Token::RightParen);
if let Some(end) = end {
end_span = end.span;
break;
}
let expr = self.parse_expression()?;
args.push(expr);
if self.eat(Token::Comma).is_none() {
end_span = self.expect(Token::RightParen)?;
break;
}
}
if args.len() == 1 {
args.remove(0)
} else {
Expression::TupleInit(TupleInitExpression {
span: span + end_span,
elements: args,
})
}
}
Token::LeftSquare => {
if let Some(end) = self.eat(Token::RightSquare) {
return Ok(Expression::ArrayInline(ArrayInlineExpression {
elements: Vec::new(),
span: span + end.span,
}));
}
let first = self.parse_spread_or_expression()?;
if self.eat(Token::Semicolon).is_some() {
let dimensions = self.parse_array_dimensions()?;
let end = self.expect(Token::RightSquare)?;
let first = match first {
SpreadOrExpression::Spread(first) => {
let span = &span + first.span();
return Err(SyntaxError::spread_in_array_init(&span));
}
SpreadOrExpression::Expression(x) => x,
};
Expression::ArrayInit(ArrayInitExpression {
span: span + end,
element: Box::new(first),
dimensions,
})
} else {
let end_span;
let mut elements = vec![first];
loop {
if let Some(token) = self.eat(Token::RightSquare) {
end_span = token.span;
break;
}
if elements.len() == 1 {
self.expect(Token::Comma)?;
}
elements.push(self.parse_spread_or_expression()?);
if self.eat(Token::Comma).is_none() {
end_span = self.expect(Token::RightSquare)?;
break;
}
}
Expression::ArrayInline(ArrayInlineExpression {
elements,
span: span + end_span,
})
}
}
Token::LeftParen => self.parse_tuple_expression(&span)?,
Token::LeftSquare => self.parse_array_expression(&span)?,
Token::Ident(name) => {
let ident = Identifier { name, span };
if !self.fuzzy_struct_state && self.peek()?.token == Token::LeftCurly {
self.parse_circuit_init(ident)?
self.parse_circuit_expression(ident)?
} else {
Expression::Identifier(ident)
}
@ -696,7 +717,7 @@ impl ParserContext {
span,
};
if !self.fuzzy_struct_state && self.peek()?.token == Token::LeftCurly {
self.parse_circuit_init(ident)?
self.parse_circuit_expression(ident)?
} else {
Expression::Identifier(ident)
}

View File

@ -41,7 +41,7 @@ impl ParserContext {
circuits.insert(id, circuit);
}
Token::Function | Token::At => {
let (id, function) = self.parse_function()?;
let (id, function) = self.parse_function_declaration()?;
functions.insert(id, function);
}
Token::Ident(ident) if ident.as_ref() == "test" => {
@ -49,7 +49,7 @@ impl ParserContext {
&token.span,
)));
// self.expect(Token::Test)?;
// let (id, function) = self.parse_function()?;
// let (id, function) = self.parse_function_declaration()?;
// tests.insert(id, TestFunction {
// function,
// input_file: None,
@ -90,6 +90,9 @@ impl ParserContext {
&name.span,
)));
}
unexpected_whitespace(&start, &name.span, &name.name, "@")?;
let end_span;
let arguments = if self.eat(Token::LeftParen).is_some() {
let mut args = Vec::new();
@ -155,7 +158,7 @@ impl ParserContext {
token: Token::Ident(name.name),
span: name.span,
});
Ok(match self.parse_package_or_packages()? {
Ok(match self.parse_package_path()? {
PackageOrPackages::Package(p) => PackageAccess::SubPackage(Box::new(p)),
PackageOrPackages::Packages(p) => PackageAccess::Multiple(p),
})
@ -234,7 +237,7 @@ impl ParserContext {
/// Returns a [`PackageOrPackages`] AST node if the next tokens represent a valid package import
/// with accesses.
///
pub fn parse_package_or_packages(&mut self) -> SyntaxResult<PackageOrPackages> {
pub fn parse_package_path(&mut self) -> SyntaxResult<PackageOrPackages> {
let package_name = self.parse_package_name()?;
self.expect(Token::Dot)?;
if self.peek()?.token == Token::LeftParen {
@ -259,7 +262,7 @@ impl ParserContext {
///
pub fn parse_import(&mut self) -> SyntaxResult<ImportStatement> {
self.expect(Token::Import)?;
let package_or_packages = self.parse_package_or_packages()?;
let package_or_packages = self.parse_package_path()?;
self.expect(Token::Semicolon)?;
Ok(ImportStatement {
span: package_or_packages.span().clone(),
@ -274,7 +277,7 @@ impl ParserContext {
pub fn parse_circuit_member(&mut self) -> SyntaxResult<CircuitMember> {
let peeked = &self.peek()?.token;
if peeked == &Token::Function || peeked == &Token::At {
let function = self.parse_function()?;
let function = self.parse_function_declaration()?;
Ok(CircuitMember::CircuitFunction(function.1))
} else {
// circuit variable
@ -308,7 +311,7 @@ impl ParserContext {
///
/// Returns a [`FunctionInput`] AST node if the next tokens represent a function parameter.
///
pub fn parse_function_input(&mut self) -> SyntaxResult<FunctionInput> {
pub fn parse_function_parameters(&mut self) -> SyntaxResult<FunctionInput> {
if let Some(token) = self.eat(Token::Input) {
return Ok(FunctionInput::InputKeyword(InputKeyword {
identifier: Identifier {
@ -364,7 +367,7 @@ impl ParserContext {
/// Returns an [`(Identifier, Function)`] AST node if the next tokens represent a function name
/// and function definition.
///
pub fn parse_function(&mut self) -> SyntaxResult<(Identifier, Function)> {
pub fn parse_function_declaration(&mut self) -> SyntaxResult<(Identifier, Function)> {
let mut annotations = Vec::new();
while self.peek()?.token == Token::At {
annotations.push(self.parse_annotation()?);
@ -374,7 +377,7 @@ impl ParserContext {
self.expect(Token::LeftParen)?;
let mut inputs = Vec::new();
while self.eat(Token::RightParen).is_none() {
let input = self.parse_function_input()?;
let input = self.parse_function_parameters()?;
inputs.push(input);
if self.eat(Token::Comma).is_none() {
self.expect(Token::RightParen)?;

View File

@ -41,3 +41,14 @@ pub fn parse(path: &str, source: &str) -> SyntaxResult<Program> {
tokens.parse_program()
}
pub fn unexpected_whitespace(left_span: &Span, right_span: &Span, left: &str, right: &str) -> SyntaxResult<()> {
if left_span.col_stop != right_span.col_start {
let mut error_span = left_span + right_span;
error_span.col_start = left_span.col_stop - 1;
error_span.col_stop = right_span.col_start - 1;
return Err(SyntaxError::unexpected_whitespace(left, right, &error_span));
}
Ok(())
}

View File

@ -23,15 +23,15 @@ const ASSIGN_TOKENS: &[Token] = &[
Token::MulEq,
Token::DivEq,
Token::ExpEq,
Token::BitAndEq,
Token::BitOrEq,
Token::BitXorEq,
Token::ShlEq,
Token::ShrEq,
Token::ShrSignedEq,
Token::ModEq,
Token::OrEq,
Token::AndEq,
// Token::BitAndEq,
// Token::BitOrEq,
// Token::BitXorEq,
// Token::ShlEq,
// Token::ShrEq,
// Token::ShrSignedEq,
// Token::ModEq,
// Token::OrEq,
// Token::AndEq,
];
impl ParserContext {
@ -89,48 +89,53 @@ impl ParserContext {
match &self.peek()?.token {
Token::Return => Ok(Statement::Return(self.parse_return_statement()?)),
Token::If => Ok(Statement::Conditional(self.parse_conditional_statement()?)),
Token::For => Ok(Statement::Iteration(self.parse_for_statement()?)),
Token::For => Ok(Statement::Iteration(self.parse_loop_statement()?)),
Token::Console => Ok(Statement::Console(self.parse_console_statement()?)),
Token::Let | Token::Const => Ok(Statement::Definition(self.parse_definition_statement()?)),
Token::LeftCurly => Ok(Statement::Block(self.parse_block()?)),
_ => {
let expr = self.parse_expression()?;
_ => Ok(self.parse_assign_statement()?),
}
}
if let Some(operator) = self.eat_any(ASSIGN_TOKENS) {
let value = self.parse_expression()?;
let assignee = Self::construct_assignee(expr)?;
self.expect(Token::Semicolon)?;
Ok(Statement::Assign(AssignStatement {
span: &assignee.span + value.span(),
assignee,
operation: match operator.token {
Token::Assign => AssignOperation::Assign,
Token::AddEq => AssignOperation::Add,
Token::MinusEq => AssignOperation::Sub,
Token::MulEq => AssignOperation::Mul,
Token::DivEq => AssignOperation::Div,
Token::ExpEq => AssignOperation::Pow,
Token::OrEq => AssignOperation::Or,
Token::AndEq => AssignOperation::And,
Token::BitOrEq => AssignOperation::BitOr,
Token::BitAndEq => AssignOperation::BitAnd,
Token::BitXorEq => AssignOperation::BitXor,
Token::ShrEq => AssignOperation::Shr,
Token::ShrSignedEq => AssignOperation::ShrSigned,
Token::ShlEq => AssignOperation::Shl,
Token::ModEq => AssignOperation::Mod,
_ => unimplemented!(),
},
value,
}))
} else {
self.expect(Token::Semicolon)?;
Ok(Statement::Expression(ExpressionStatement {
span: expr.span().clone(),
expression: expr,
}))
}
}
///
/// Returns a [`Block`] AST node if the next tokens represent a assign, or expression statement.
///
pub fn parse_assign_statement(&mut self) -> SyntaxResult<Statement> {
let expr = self.parse_expression()?;
if let Some(operator) = self.eat_any(ASSIGN_TOKENS) {
let value = self.parse_expression()?;
let assignee = Self::construct_assignee(expr)?;
self.expect(Token::Semicolon)?;
Ok(Statement::Assign(AssignStatement {
span: &assignee.span + value.span(),
assignee,
operation: match operator.token {
Token::Assign => AssignOperation::Assign,
Token::AddEq => AssignOperation::Add,
Token::MinusEq => AssignOperation::Sub,
Token::MulEq => AssignOperation::Mul,
Token::DivEq => AssignOperation::Div,
Token::ExpEq => AssignOperation::Pow,
// Token::OrEq => AssignOperation::Or,
// Token::AndEq => AssignOperation::And,
// Token::BitOrEq => AssignOperation::BitOr,
// Token::BitAndEq => AssignOperation::BitAnd,
// Token::BitXorEq => AssignOperation::BitXor,
// Token::ShrEq => AssignOperation::Shr,
// Token::ShrSignedEq => AssignOperation::ShrSigned,
// Token::ShlEq => AssignOperation::Shl,
// Token::ModEq => AssignOperation::Mod,
_ => unimplemented!(),
},
value,
}))
} else {
self.expect(Token::Semicolon)?;
Ok(Statement::Expression(ExpressionStatement {
span: expr.span().clone(),
expression: expr,
}))
}
}
@ -176,7 +181,7 @@ impl ParserContext {
pub fn parse_conditional_statement(&mut self) -> SyntaxResult<ConditionalStatement> {
let start = self.expect(Token::If)?;
self.fuzzy_struct_state = true;
let expr = self.parse_expression_fuzzy()?;
let expr = self.parse_conditional_expression()?;
self.fuzzy_struct_state = false;
let body = self.parse_block()?;
let next = if self.eat(Token::Else).is_some() {
@ -196,14 +201,14 @@ impl ParserContext {
///
/// Returns an [`IterationStatement`] AST node if the next tokens represent an iteration statement.
///
pub fn parse_for_statement(&mut self) -> SyntaxResult<IterationStatement> {
pub fn parse_loop_statement(&mut self) -> SyntaxResult<IterationStatement> {
let start_span = self.expect(Token::For)?;
let ident = self.expect_ident()?;
self.expect(Token::In)?;
let start = self.parse_expression()?;
self.expect(Token::DotDot)?;
self.fuzzy_struct_state = true;
let stop = self.parse_expression_fuzzy()?;
let stop = self.parse_conditional_expression()?;
self.fuzzy_struct_state = false;
let block = self.parse_block()?;

View File

@ -45,9 +45,10 @@ fn eat_identifier(input_tendril: &StrTendril) -> Option<StrTendril> {
return None;
}
let input = input_tendril[..].as_bytes();
if !input[0].is_ascii_alphabetic() && input[0] != b'_' {
if !input[0].is_ascii_alphabetic() {
return None;
}
let mut i = 1usize;
while i < input.len() {
if !input[i].is_ascii_alphanumeric() && input[i] != b'_' {
@ -162,17 +163,19 @@ impl Token {
}
b'&' => {
if let Some(len) = eat(input, "&&") {
if let Some(inner_len) = eat(&input[len..], "=") {
return (len + inner_len, Some(Token::AndEq));
}
// if let Some(inner_len) = eat(&input[len..], "=") {
// return (len + inner_len, Some(Token::AndEq));
// }
return (len, Some(Token::And));
} else if let Some(len) = eat(input, "&=") {
return (len, Some(Token::BitAndEq));
}
return (1, Some(Token::BitAnd));
// else if let Some(len) = eat(input, "&=") {
// return (len, Some(Token::BitAndEq));
// }
// return (1, Some(Token::BitAnd));
}
b'(' => return (1, Some(Token::LeftParen)),
b')' => return (1, Some(Token::RightParen)),
b'_' => return (1, Some(Token::Underscore)),
b'*' => {
if let Some(len) = eat(input, "**") {
if let Some(inner_len) = eat(&input[len..], "=") {
@ -235,28 +238,30 @@ impl Token {
b'<' => {
if let Some(len) = eat(input, "<=") {
return (len, Some(Token::LtEq));
} else if let Some(len) = eat(input, "<<") {
if let Some(inner_len) = eat(&input[len..], "=") {
return (len + inner_len, Some(Token::ShlEq));
}
return (len, Some(Token::Shl));
}
// else if let Some(len) = eat(input, "<<") {
// if let Some(inner_len) = eat(&input[len..], "=") {
// return (len + inner_len, Some(Token::ShlEq));
// }
// return (len, Some(Token::Shl));
// }
return (1, Some(Token::Lt));
}
b'>' => {
if let Some(len) = eat(input, ">=") {
return (len, Some(Token::GtEq));
} else if let Some(len) = eat(input, ">>") {
if let Some(inner_len) = eat(&input[len..], "=") {
return (len + inner_len, Some(Token::ShrEq));
} else if let Some(inner_len) = eat(&input[len..], ">") {
if let Some(eq_len) = eat(&input[len + inner_len..], "=") {
return (len + inner_len + eq_len, Some(Token::ShrSignedEq));
}
return (len + inner_len, Some(Token::ShrSigned));
}
return (len, Some(Token::Shr));
}
// else if let Some(len) = eat(input, ">>") {
// if let Some(inner_len) = eat(&input[len..], "=") {
// return (len + inner_len, Some(Token::ShrEq));
// } else if let Some(inner_len) = eat(&input[len..], ">") {
// if let Some(eq_len) = eat(&input[len + inner_len..], "=") {
// return (len + inner_len + eq_len, Some(Token::ShrSignedEq));
// }
// return (len + inner_len, Some(Token::ShrSigned));
// }
// return (len, Some(Token::Shr));
// }
return (1, Some(Token::Gt));
}
b'=' => {
@ -272,28 +277,29 @@ impl Token {
b'}' => return (1, Some(Token::RightCurly)),
b'|' => {
if let Some(len) = eat(input, "||") {
if let Some(inner_len) = eat(&input[len..], "=") {
return (len + inner_len, Some(Token::OrEq));
}
// if let Some(inner_len) = eat(&input[len..], "=") {
// return (len + inner_len, Some(Token::OrEq));
// }
return (len, Some(Token::Or));
} else if let Some(len) = eat(input, "|=") {
return (len, Some(Token::BitOrEq));
}
return (1, Some(Token::BitOr));
}
b'^' => {
if let Some(len) = eat(input, "^=") {
return (len, Some(Token::BitXorEq));
}
return (1, Some(Token::BitXor));
}
b'~' => return (1, Some(Token::BitNot)),
b'%' => {
if let Some(len) = eat(input, "%=") {
return (len, Some(Token::ModEq));
}
return (1, Some(Token::Mod));
// else if let Some(len) = eat(input, "|=") {
// return (len, Some(Token::BitOrEq));
// }
// return (1, Some(Token::BitOr));
}
// b'^' => {
// if let Some(len) = eat(input, "^=") {
// return (len, Some(Token::BitXorEq));
// }
// return (1, Some(Token::BitXor));
// }
// b'~' => return (1, Some(Token::BitNot)),
// b'%' => {
// if let Some(len) = eat(input, "%=") {
// return (len, Some(Token::ModEq));
// }
// return (1, Some(Token::Mod));
// }
_ => (),
}
if let Some(ident) = eat_identifier(&input_tendril) {
@ -309,6 +315,7 @@ impl Token {
"as" => Token::As,
"bool" => Token::Bool,
"circuit" => Token::Circuit,
"console" => Token::Console,
"const" => Token::Const,
"else" => Token::Else,
"false" => Token::False,
@ -316,11 +323,11 @@ impl Token {
"for" => Token::For,
"function" => Token::Function,
"group" => Token::Group,
"i128" => Token::I128,
"i64" => Token::I64,
"i32" => Token::I32,
"i16" => Token::I16,
"i8" => Token::I8,
"i16" => Token::I16,
"i32" => Token::I32,
"i64" => Token::I64,
"i128" => Token::I128,
"if" => Token::If,
"import" => Token::Import,
"in" => Token::In,
@ -328,17 +335,16 @@ impl Token {
"let" => Token::Let,
"mut" => Token::Mut,
"return" => Token::Return,
"static" => Token::Static,
"string" => Token::Str,
"true" => Token::True,
"u128" => Token::U128,
"u64" => Token::U64,
"u32" => Token::U32,
"u16" => Token::U16,
"u8" => Token::U8,
"Self" => Token::BigSelf,
"self" => Token::LittleSelf,
"console" => Token::Console,
"static" => Token::Static,
"string" => Token::String,
"true" => Token::True,
"u8" => Token::U8,
"u16" => Token::U16,
"u32" => Token::U32,
"u64" => Token::U64,
"u128" => Token::U128,
_ => Token::Ident(ident),
}),
);

View File

@ -110,6 +110,24 @@ mod tests {
#[test]
fn test_tokenizer() {
// &
// &=
// |
// |=
// ^
// ^=
// ~
// <<
// <<=
// >>
// >>=
// >>>
// >>>=
// %
// %=
// ||=
// &&=
let tokens = tokenize(
"test_path",
r#"
@ -175,6 +193,7 @@ mod tests {
-
-=
->
_
.
..
...
@ -195,23 +214,6 @@ mod tests {
{{
}}
||
&
&=
|
|=
^
^=
~
<<
<<=
>>
>>=
>>>
>>>=
%
%=
||=
&&=
?
// test
/* test */
@ -223,9 +225,10 @@ mod tests {
for SpannedToken { token, .. } in tokens.iter() {
output += &format!("{} ", token.to_string());
}
// & &= | |= ^ ^= ~ << <<= >> >>= >>> >>>= % %= ||= &&=
assert_eq!(
output,
r#""test" "test{}test" "test{}" "{}test" "test{" "test}" "test{test" "test}test" "te{{}}" aleo1qnr4dkkvkgfqph0vzc3y6z2eu975wnpz2925ntjccd5cfqxtyu8sta57j8 test_ident 12345 address as bool circuit const else false field for function group i128 i64 i32 i16 i8 if import in input let mut return static string test true u128 u64 u32 u16 u8 self Self console ! != && ( ) * ** **= *= + += , - -= -> . .. ... / /= : :: ; < <= = == > >= @ [ ] { { } } || & &= | |= ^ ^= ~ << <<= >> >>= >>> >>>= % %= ||= &&= ? // test
r#""test" "test{}test" "test{}" "{}test" "test{" "test}" "test{test" "test}test" "te{{}}" aleo1qnr4dkkvkgfqph0vzc3y6z2eu975wnpz2925ntjccd5cfqxtyu8sta57j8 test_ident 12345 address as bool circuit const else false field for function group i128 i64 i32 i16 i8 if import in input let mut return static string test true u128 u64 u32 u16 u8 self Self console ! != && ( ) * ** **= *= + += , - -= -> _ . .. ... / /= : :: ; < <= = == > >= @ [ ] { { } } || ? // test
/* test */ // "#
);
}

View File

@ -37,99 +37,115 @@ impl fmt::Display for FormattedStringPart {
/// Represents all valid Leo syntax tokens.
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Token {
FormattedString(Vec<FormattedStringPart>),
AddressLit(#[serde(with = "leo_ast::common::tendril_json")] StrTendril),
Ident(#[serde(with = "leo_ast::common::tendril_json")] StrTendril),
Int(#[serde(with = "leo_ast::common::tendril_json")] StrTendril),
// Lexical Grammar
// Literals
CommentLine(#[serde(with = "leo_ast::common::tendril_json")] StrTendril),
CommentBlock(#[serde(with = "leo_ast::common::tendril_json")] StrTendril),
FormattedString(Vec<FormattedStringPart>),
Ident(#[serde(with = "leo_ast::common::tendril_json")] StrTendril),
Int(#[serde(with = "leo_ast::common::tendril_json")] StrTendril),
True,
False,
AddressLit(#[serde(with = "leo_ast::common::tendril_json")] StrTendril),
At,
// Symbols
Not,
NotEq,
And,
Or,
Eq,
NotEq,
Lt,
LtEq,
Gt,
GtEq,
Add,
Minus,
Mul,
Div,
Exp,
Assign,
AddEq,
MinusEq,
MulEq,
DivEq,
ExpEq,
LeftParen,
RightParen,
Mul,
Exp,
ExpEq,
MulEq,
Add,
AddEq,
LeftSquare,
RightSquare,
LeftCurly,
RightCurly,
Comma,
Minus,
MinusEq,
Arrow,
Dot,
DotDot,
DotDotDot,
Div,
DivEq,
Semicolon,
Colon,
DoubleColon,
Semicolon,
Lt,
LtEq,
Assign,
Eq,
Gt,
GtEq,
At,
LeftSquare,
RightSquare,
Address,
As,
Question,
Arrow,
Underscore,
// Syntactic Grammr
// Types
U8,
U16,
U32,
U64,
U128,
I8,
I16,
I32,
I64,
I128,
Field,
Group,
Bool,
Address,
BigSelf,
// primary expresion
Input,
LittleSelf,
// Import
Import,
// Regular Keywords
As,
Circuit,
Console,
Const,
Else,
False,
Field,
For,
Function,
Group,
I128,
I64,
I32,
I16,
I8,
If,
Import,
In,
Input,
Let,
Mut,
Return,
Static,
Str,
True,
U128,
U64,
U32,
U16,
U8,
BigSelf,
LittleSelf,
Console,
LeftCurly,
RightCurly,
Or,
BitAnd,
BitAndEq,
BitOr,
BitOrEq,
BitXor,
BitXorEq,
BitNot,
Shl,
ShlEq,
Shr,
ShrEq,
ShrSigned,
ShrSignedEq,
Mod,
ModEq,
OrEq,
AndEq,
Question,
String,
// Not yet in ABNF
// BitAnd,
// BitAndEq,
// BitOr,
// BitOrEq,
// BitXor,
// BitXorEq,
// BitNot,
// Shl,
// ShlEq,
// Shr,
// ShrEq,
// ShrSigned,
// ShrSignedEq,
// Mod,
// ModEq,
// OrEq,
// AndEq,
}
/// Represents all valid Leo keyword tokens.
@ -138,6 +154,7 @@ pub const KEYWORD_TOKENS: &[Token] = &[
Token::As,
Token::Bool,
Token::Circuit,
Token::Console,
Token::Const,
Token::Else,
Token::False,
@ -145,11 +162,11 @@ pub const KEYWORD_TOKENS: &[Token] = &[
Token::For,
Token::Function,
Token::Group,
Token::I128,
Token::I64,
Token::I32,
Token::I16,
Token::I8,
Token::I16,
Token::I32,
Token::I64,
Token::I128,
Token::If,
Token::Import,
Token::In,
@ -157,17 +174,16 @@ pub const KEYWORD_TOKENS: &[Token] = &[
Token::Let,
Token::Mut,
Token::Return,
Token::Static,
Token::Str,
Token::True,
Token::U128,
Token::U64,
Token::U32,
Token::U16,
Token::U8,
Token::BigSelf,
Token::LittleSelf,
Token::Console,
Token::Static,
Token::String,
Token::True,
Token::U8,
Token::U16,
Token::U32,
Token::U64,
Token::U128,
];
impl Token {
@ -183,6 +199,8 @@ impl fmt::Display for Token {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
use Token::*;
match self {
CommentLine(s) => write!(f, "{}", s),
CommentBlock(s) => write!(f, "{}", s),
FormattedString(parts) => {
// todo escapes
write!(f, "\"")?;
@ -191,98 +209,103 @@ impl fmt::Display for Token {
}
write!(f, "\"")
}
AddressLit(s) => write!(f, "{}", s),
Ident(s) => write!(f, "{}", s),
Int(s) => write!(f, "{}", s),
CommentLine(s) => write!(f, "{}", s),
CommentBlock(s) => write!(f, "{}", s),
True => write!(f, "true"),
False => write!(f, "false"),
AddressLit(s) => write!(f, "{}", s),
At => write!(f, "@"),
Not => write!(f, "!"),
NotEq => write!(f, "!="),
And => write!(f, "&&"),
Or => write!(f, "||"),
Eq => write!(f, "=="),
NotEq => write!(f, "!="),
Lt => write!(f, "<"),
LtEq => write!(f, "<="),
Gt => write!(f, ">"),
GtEq => write!(f, ">="),
Add => write!(f, "+"),
Minus => write!(f, "-"),
Mul => write!(f, "*"),
Div => write!(f, "/"),
Exp => write!(f, "**"),
Assign => write!(f, "="),
AddEq => write!(f, "+="),
MinusEq => write!(f, "-="),
MulEq => write!(f, "*="),
DivEq => write!(f, "/="),
ExpEq => write!(f, "**="),
LeftParen => write!(f, "("),
RightParen => write!(f, ")"),
Mul => write!(f, "*"),
Exp => write!(f, "**"),
ExpEq => write!(f, "**="),
MulEq => write!(f, "*="),
Add => write!(f, "+"),
AddEq => write!(f, "+="),
LeftSquare => write!(f, "["),
RightSquare => write!(f, "]"),
LeftCurly => write!(f, "{{"),
RightCurly => write!(f, "}}"),
Comma => write!(f, ","),
Minus => write!(f, "-"),
MinusEq => write!(f, "-="),
Arrow => write!(f, "->"),
Dot => write!(f, "."),
DotDot => write!(f, ".."),
DotDotDot => write!(f, "..."),
Div => write!(f, "/"),
DivEq => write!(f, "/="),
Semicolon => write!(f, ";"),
Colon => write!(f, ":"),
DoubleColon => write!(f, "::"),
Semicolon => write!(f, ";"),
Lt => write!(f, "<"),
LtEq => write!(f, "<="),
Assign => write!(f, "="),
Eq => write!(f, "=="),
Gt => write!(f, ">"),
GtEq => write!(f, ">="),
At => write!(f, "@"),
LeftSquare => write!(f, "["),
RightSquare => write!(f, "]"),
Address => write!(f, "address"),
As => write!(f, "as"),
Question => write!(f, "?"),
Arrow => write!(f, "->"),
Underscore => write!(f, "_"),
U8 => write!(f, "u8"),
U16 => write!(f, "u16"),
U32 => write!(f, "u32"),
U64 => write!(f, "u64"),
U128 => write!(f, "u128"),
I8 => write!(f, "i8"),
I16 => write!(f, "i16"),
I32 => write!(f, "i32"),
I64 => write!(f, "i64"),
I128 => write!(f, "i128"),
Field => write!(f, "field"),
Group => write!(f, "group"),
Bool => write!(f, "bool"),
Address => write!(f, "address"),
BigSelf => write!(f, "Self"),
Input => write!(f, "input"),
LittleSelf => write!(f, "self"),
Import => write!(f, "import"),
As => write!(f, "as"),
Circuit => write!(f, "circuit"),
Console => write!(f, "console"),
Const => write!(f, "const"),
Else => write!(f, "else"),
False => write!(f, "false"),
Field => write!(f, "field"),
For => write!(f, "for"),
Function => write!(f, "function"),
Group => write!(f, "group"),
I128 => write!(f, "i128"),
I64 => write!(f, "i64"),
I32 => write!(f, "i32"),
I16 => write!(f, "i16"),
I8 => write!(f, "i8"),
If => write!(f, "if"),
Import => write!(f, "import"),
In => write!(f, "in"),
Input => write!(f, "input"),
Let => write!(f, "let"),
Mut => write!(f, "mut"),
Return => write!(f, "return"),
Static => write!(f, "static"),
Str => write!(f, "string"),
True => write!(f, "true"),
U128 => write!(f, "u128"),
U64 => write!(f, "u64"),
U32 => write!(f, "u32"),
U16 => write!(f, "u16"),
U8 => write!(f, "u8"),
BigSelf => write!(f, "Self"),
LittleSelf => write!(f, "self"),
Console => write!(f, "console"),
LeftCurly => write!(f, "{{"),
RightCurly => write!(f, "}}"),
Or => write!(f, "||"),
BitAnd => write!(f, "&"),
BitAndEq => write!(f, "&="),
BitOr => write!(f, "|"),
BitOrEq => write!(f, "|="),
BitXor => write!(f, "^"),
BitXorEq => write!(f, "^="),
BitNot => write!(f, "~"),
Shl => write!(f, "<<"),
ShlEq => write!(f, "<<="),
Shr => write!(f, ">>"),
ShrEq => write!(f, ">>="),
ShrSigned => write!(f, ">>>"),
ShrSignedEq => write!(f, ">>>="),
Mod => write!(f, "%"),
ModEq => write!(f, "%="),
OrEq => write!(f, "||="),
AndEq => write!(f, "&&="),
Question => write!(f, "?"),
String => write!(f, "string"),
// BitAnd => write!(f, "&"),
// BitAndEq => write!(f, "&="),
// BitOr => write!(f, "|"),
// BitOrEq => write!(f, "|="),
// BitXor => write!(f, "^"),
// BitXorEq => write!(f, "^="),
// BitNot => write!(f, "~"),
// Shl => write!(f, "<<"),
// ShlEq => write!(f, "<<="),
// Shr => write!(f, ">>"),
// ShrEq => write!(f, ">>="),
// ShrSigned => write!(f, ">>>"),
// ShrSignedEq => write!(f, ">>>="),
// Mod => write!(f, "%"),
// ModEq => write!(f, "%="),
// OrEq => write!(f, "||="),
// AndEq => write!(f, "&&="),
}
}
}

View File

@ -26,19 +26,19 @@ path = "../ast"
version = "1.2.3"
[dependencies.snarkvm-algorithms]
version = "0.2.1"
version = "0.2.2"
#default-features = false
[dependencies.snarkvm-curves]
version = "0.2.1"
version = "0.2.2"
default-features = false
[dependencies.snarkvm-dpc]
version = "0.2.1"
version = "0.2.2"
default-features = false
[dependencies.snarkvm-utilities]
version = "0.2.1"
version = "0.2.2"
[dependencies.indexmap]
version = "1.6.2"
@ -54,7 +54,7 @@ version = "0.3"
version = "1.0"
[dev-dependencies.snarkvm-storage]
version = "0.2.1"
version = "0.2.2"
[dev-dependencies.rand_core]
version = "0.6.2"

View File

@ -18,19 +18,19 @@ license = "GPL-3.0"
edition = "2018"
[dependencies.snarkvm-curves]
version = "0.2.1"
version = "0.2.2"
default-features = false
[dependencies.snarkvm-fields]
version = "0.2.1"
version = "0.2.2"
default-features = false
[dependencies.snarkvm-gadgets]
version = "0.2.1"
version = "0.2.2"
default-features = false
[dependencies.snarkvm-r1cs]
version = "0.2.1"
version = "0.2.2"
default-features = false
[dependencies.num-bigint]