mirror of
https://github.com/AleoHQ/leo.git
synced 2024-12-25 10:32:13 +03:00
Merge branch 'master' into compiler-tests
This commit is contained in:
commit
dd4d909998
@ -158,6 +158,19 @@ jobs:
|
||||
export LEO=/home/circleci/project/project/bin/leo
|
||||
./project/.circleci/leo-add-remove.sh
|
||||
|
||||
leo-check-constraints:
|
||||
docker:
|
||||
- image: cimg/rust:1.50.0
|
||||
resource_class: xlarge
|
||||
steps:
|
||||
- attach_workspace:
|
||||
at: /home/circleci/project/
|
||||
- run:
|
||||
name: leo check constraints for Pedersen Hash
|
||||
command: |
|
||||
export LEO=/home/circleci/project/project/bin/leo
|
||||
./project/.circleci/leo-check-constraints.sh
|
||||
|
||||
leo-login-logout:
|
||||
docker:
|
||||
- image: cimg/rust:1.51.0
|
||||
@ -171,18 +184,18 @@ jobs:
|
||||
export LEO=/home/circleci/project/project/bin/leo
|
||||
./project/.circleci/leo-login-logout.sh
|
||||
|
||||
# leo-clone:
|
||||
# docker:
|
||||
# - image: cimg/rust:1.51.0
|
||||
# resource_class: xlarge
|
||||
# steps:
|
||||
# - attach_workspace:
|
||||
# at: /home/circleci/project/
|
||||
# - run:
|
||||
# name: leo clone
|
||||
# command: |
|
||||
# export LEO=/home/circleci/project/project/bin/leo
|
||||
# ./project/.circleci/leo-clone.sh
|
||||
leo-clone:
|
||||
docker:
|
||||
- image: cimg/rust:1.51.0
|
||||
resource_class: xlarge
|
||||
steps:
|
||||
- attach_workspace:
|
||||
at: /home/circleci/project/
|
||||
- run:
|
||||
name: leo clone
|
||||
command: |
|
||||
export LEO=/home/circleci/project/project/bin/leo
|
||||
./project/.circleci/leo-clone.sh
|
||||
|
||||
leo-publish:
|
||||
docker:
|
||||
@ -219,12 +232,15 @@ workflows:
|
||||
- leo-add-remove:
|
||||
requires:
|
||||
- leo-executable
|
||||
- leo-check-constraints:
|
||||
requires:
|
||||
- leo-executable
|
||||
- leo-login-logout:
|
||||
requires:
|
||||
- leo-executable
|
||||
# - leo-clone:
|
||||
# requires:
|
||||
# - leo-executable
|
||||
- leo-clone:
|
||||
requires:
|
||||
- leo-executable
|
||||
- leo-publish:
|
||||
requires:
|
||||
- leo-executable
|
||||
|
16
.circleci/leo-check-constraints.sh
Executable file
16
.circleci/leo-check-constraints.sh
Executable file
@ -0,0 +1,16 @@
|
||||
# leo new hello-world
|
||||
|
||||
cd ./project/examples/pedersen-hash
|
||||
|
||||
export PEDERSEN_HASH_CONSTRAINTS=1539;
|
||||
|
||||
# line that we're searching for is:
|
||||
# `Build Number of constraints - 1539`
|
||||
export ACTUAL_CONSTRAINTS=$($LEO build | grep constraints | awk '{print $NF}')
|
||||
|
||||
# if else expression with only else block
|
||||
[[ PEDERSEN_HASH_CONSTRAINTS -eq ACTUAL_CONSTRAINTS ]] || {
|
||||
echo >&2 "Number of constraints for Pedersen Hash is not $PEDERSEN_HASH_CONSTRAINTS";
|
||||
echo >&2 "Real number of constraints is $ACTUAL_CONSTRAINTS";
|
||||
exit 1;
|
||||
}
|
230
Cargo.lock
generated
230
Cargo.lock
generated
@ -75,18 +75,26 @@ version = "1.0.40"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "28b2cd92db5cbd74e8e5028f7e27dd7aa3090e89e4f2a197cc7c8dfb69c7063b"
|
||||
|
||||
[[package]]
|
||||
name = "arrayref"
|
||||
version = "0.3.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a4c527152e37cf757a3f78aae5a06fbeefdb07ccc535c980a3208ee3060dd544"
|
||||
|
||||
[[package]]
|
||||
name = "arrayvec"
|
||||
version = "0.5.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "23b62fc65de8e4e7f52534fb52b0f3ed04746ae267519eef2a83941e8085068b"
|
||||
|
||||
[[package]]
|
||||
name = "assert_cmd"
|
||||
version = "1.0.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f2475b58cd94eb4f70159f4fd8844ba3b807532fe3131b3373fae060bbe30396"
|
||||
dependencies = [
|
||||
"bstr",
|
||||
"doc-comment",
|
||||
"predicates",
|
||||
"predicates-core",
|
||||
"predicates-tree",
|
||||
"wait-timeout",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "atty"
|
||||
version = "0.2.14"
|
||||
@ -198,17 +206,6 @@ dependencies = [
|
||||
"opaque-debug 0.3.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "blake2b_simd"
|
||||
version = "0.5.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "afa748e348ad3be8263be728124b24a24f268266f6f5d58af9d75f6a40b5c587"
|
||||
dependencies = [
|
||||
"arrayref",
|
||||
"arrayvec",
|
||||
"constant_time_eq",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "block-buffer"
|
||||
version = "0.7.3"
|
||||
@ -410,12 +407,6 @@ dependencies = [
|
||||
"winapi 0.3.9",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "constant_time_eq"
|
||||
version = "0.1.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "245097e9a4535ee1e3e3931fcfcd55a796a44c643e8596ff6566d68f09b87bbc"
|
||||
|
||||
[[package]]
|
||||
name = "core-foundation"
|
||||
version = "0.9.1"
|
||||
@ -601,6 +592,12 @@ dependencies = [
|
||||
"syn 1.0.64",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "difference"
|
||||
version = "2.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "524cbf6897b527295dff137cec09ecf3a05f4fddffd7dfcd1585403449e74198"
|
||||
|
||||
[[package]]
|
||||
name = "digest"
|
||||
version = "0.8.1"
|
||||
@ -621,24 +618,30 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "dirs"
|
||||
version = "3.0.1"
|
||||
version = "3.0.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "142995ed02755914747cc6ca76fc7e4583cd18578746716d0508ea6ed558b9ff"
|
||||
checksum = "30baa043103c9d0c2a57cf537cc2f35623889dc0d405e6c3cccfadbc81c71309"
|
||||
dependencies = [
|
||||
"dirs-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "dirs-sys"
|
||||
version = "0.3.5"
|
||||
version = "0.3.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8e93d7f5705de3e49895a2b5e0b8855a1c27f080192ae9c32a6432d50741a57a"
|
||||
checksum = "03d86534ed367a67548dc68113a0f5db55432fdfbb6e6f9d77704397d95d5780"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"redox_users",
|
||||
"winapi 0.3.9",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "doc-comment"
|
||||
version = "0.3.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10"
|
||||
|
||||
[[package]]
|
||||
name = "dtoa"
|
||||
version = "0.4.8"
|
||||
@ -725,7 +728,7 @@ checksum = "1d34cfa13a63ae058bfa601fe9e313bbdb3746427c1459185464ce0fcf62e1e8"
|
||||
dependencies = [
|
||||
"cfg-if 1.0.0",
|
||||
"libc",
|
||||
"redox_syscall 0.2.5",
|
||||
"redox_syscall",
|
||||
"winapi 0.3.9",
|
||||
]
|
||||
|
||||
@ -1309,8 +1312,8 @@ dependencies = [
|
||||
"leo-test-framework",
|
||||
"num-bigint",
|
||||
"pest",
|
||||
"rand",
|
||||
"rand_core",
|
||||
"rand 0.8.3",
|
||||
"rand_core 0.6.2",
|
||||
"rand_xorshift",
|
||||
"serde",
|
||||
"serde_yaml",
|
||||
@ -1323,6 +1326,7 @@ dependencies = [
|
||||
"snarkvm-r1cs",
|
||||
"snarkvm-utilities",
|
||||
"tempfile",
|
||||
"tendril",
|
||||
"thiserror",
|
||||
"tracing",
|
||||
]
|
||||
@ -1357,6 +1361,7 @@ version = "1.4.0"
|
||||
dependencies = [
|
||||
"ansi_term 0.12.1",
|
||||
"anyhow",
|
||||
"assert_cmd",
|
||||
"clap",
|
||||
"colored",
|
||||
"console",
|
||||
@ -1371,8 +1376,8 @@ dependencies = [
|
||||
"leo-state",
|
||||
"leo-synthesizer",
|
||||
"notify",
|
||||
"rand",
|
||||
"rand_core",
|
||||
"rand 0.8.3",
|
||||
"rand_core 0.6.2",
|
||||
"reqwest",
|
||||
"rusty-hook",
|
||||
"self_update",
|
||||
@ -1384,6 +1389,7 @@ dependencies = [
|
||||
"snarkvm-r1cs",
|
||||
"snarkvm-utilities",
|
||||
"structopt",
|
||||
"test_dir",
|
||||
"thiserror",
|
||||
"toml",
|
||||
"tracing",
|
||||
@ -1432,8 +1438,8 @@ dependencies = [
|
||||
"indexmap",
|
||||
"leo-ast",
|
||||
"leo-input",
|
||||
"rand",
|
||||
"rand_core",
|
||||
"rand 0.8.3",
|
||||
"rand_core 0.6.2",
|
||||
"rand_xorshift",
|
||||
"snarkvm-algorithms",
|
||||
"snarkvm-curves",
|
||||
@ -1907,7 +1913,7 @@ dependencies = [
|
||||
"cfg-if 1.0.0",
|
||||
"instant",
|
||||
"libc",
|
||||
"redox_syscall 0.2.5",
|
||||
"redox_syscall",
|
||||
"smallvec",
|
||||
"winapi 0.3.9",
|
||||
]
|
||||
@ -2052,6 +2058,32 @@ version = "0.2.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ac74c624d6b2d21f425f752262f42188365d7b8ff1aff74c82e45136510a4857"
|
||||
|
||||
[[package]]
|
||||
name = "predicates"
|
||||
version = "1.0.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "eeb433456c1a57cc93554dea3ce40b4c19c4057e41c55d4a0f3d84ea71c325aa"
|
||||
dependencies = [
|
||||
"difference",
|
||||
"predicates-core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "predicates-core"
|
||||
version = "1.0.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "57e35a3326b75e49aa85f5dc6ec15b41108cf5aee58eabb1f274dd18b73c2451"
|
||||
|
||||
[[package]]
|
||||
name = "predicates-tree"
|
||||
version = "1.0.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "15f553275e5721409451eb85e15fd9a860a6e5ab4496eb215987502b5f5391f2"
|
||||
dependencies = [
|
||||
"predicates-core",
|
||||
"treeline",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro-crate"
|
||||
version = "0.1.5"
|
||||
@ -2142,6 +2174,19 @@ version = "0.5.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "941ba9d78d8e2f7ce474c015eea4d9c6d25b6a3327f9832ee29a4de27f91bbb8"
|
||||
|
||||
[[package]]
|
||||
name = "rand"
|
||||
version = "0.7.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03"
|
||||
dependencies = [
|
||||
"getrandom 0.1.16",
|
||||
"libc",
|
||||
"rand_chacha 0.2.2",
|
||||
"rand_core 0.5.1",
|
||||
"rand_hc 0.2.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rand"
|
||||
version = "0.8.3"
|
||||
@ -2149,9 +2194,19 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0ef9e7e66b4468674bfcb0c81af8b7fa0bb154fa9f28eb840da5c447baeb8d7e"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"rand_chacha",
|
||||
"rand_core",
|
||||
"rand_hc",
|
||||
"rand_chacha 0.3.0",
|
||||
"rand_core 0.6.2",
|
||||
"rand_hc 0.3.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rand_chacha"
|
||||
version = "0.2.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402"
|
||||
dependencies = [
|
||||
"ppv-lite86",
|
||||
"rand_core 0.5.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -2161,7 +2216,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e12735cf05c9e10bf21534da50a147b924d555dc7a547c42e6bb2d5b6017ae0d"
|
||||
dependencies = [
|
||||
"ppv-lite86",
|
||||
"rand_core",
|
||||
"rand_core 0.6.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rand_core"
|
||||
version = "0.5.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19"
|
||||
dependencies = [
|
||||
"getrandom 0.1.16",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -2173,13 +2237,22 @@ dependencies = [
|
||||
"getrandom 0.2.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rand_hc"
|
||||
version = "0.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c"
|
||||
dependencies = [
|
||||
"rand_core 0.5.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rand_hc"
|
||||
version = "0.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3190ef7066a446f2e7f42e239d161e905420ccab01eb967c9eb27d21b2322a73"
|
||||
dependencies = [
|
||||
"rand_core",
|
||||
"rand_core 0.6.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -2188,7 +2261,7 @@ version = "0.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d25bf25ec5ae4a3f1b92f929810509a2f53d7dca2f50b794ff57e3face536c8f"
|
||||
dependencies = [
|
||||
"rand_core",
|
||||
"rand_core 0.6.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -2216,12 +2289,6 @@ dependencies = [
|
||||
"num_cpus",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "redox_syscall"
|
||||
version = "0.1.57"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "41cc0f7e4d5d4544e8861606a285bb08d3e70712ccc7d2b84d7c0ccfaf4b05ce"
|
||||
|
||||
[[package]]
|
||||
name = "redox_syscall"
|
||||
version = "0.2.5"
|
||||
@ -2233,13 +2300,12 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "redox_users"
|
||||
version = "0.3.5"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "de0737333e7a9502c789a36d7c7fa6092a49895d4faa31ca5df163857ded2e9d"
|
||||
checksum = "528532f3d801c87aec9def2add9ca802fe569e44a544afe633765267840abe64"
|
||||
dependencies = [
|
||||
"getrandom 0.1.16",
|
||||
"redox_syscall 0.1.57",
|
||||
"rust-argon2",
|
||||
"getrandom 0.2.2",
|
||||
"redox_syscall",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -2324,18 +2390,6 @@ dependencies = [
|
||||
"librocksdb-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rust-argon2"
|
||||
version = "0.8.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4b18820d944b33caa75a71378964ac46f58517c92b6ae5f762636247c09e78fb"
|
||||
dependencies = [
|
||||
"base64",
|
||||
"blake2b_simd",
|
||||
"constant_time_eq",
|
||||
"crossbeam-utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustc-demangle"
|
||||
version = "0.1.18"
|
||||
@ -2619,8 +2673,8 @@ dependencies = [
|
||||
"derivative",
|
||||
"digest 0.9.0",
|
||||
"itertools 0.10.0",
|
||||
"rand",
|
||||
"rand_chacha",
|
||||
"rand 0.8.3",
|
||||
"rand_chacha 0.3.0",
|
||||
"rayon",
|
||||
"sha2",
|
||||
"smallvec",
|
||||
@ -2639,7 +2693,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "64610b135b8b1152439d5dfa4f745515933366082f08651961344aa0bb5abfca"
|
||||
dependencies = [
|
||||
"derivative",
|
||||
"rand",
|
||||
"rand 0.8.3",
|
||||
"rand_xorshift",
|
||||
"rustc_version 0.3.3",
|
||||
"serde",
|
||||
@ -2674,7 +2728,7 @@ dependencies = [
|
||||
"derivative",
|
||||
"hex",
|
||||
"itertools 0.10.0",
|
||||
"rand",
|
||||
"rand 0.8.3",
|
||||
"snarkvm-algorithms",
|
||||
"snarkvm-curves",
|
||||
"snarkvm-fields",
|
||||
@ -2695,7 +2749,7 @@ checksum = "8c49c69d02df11be58e07f626c9d6f5804c6dd4ccf42e425f2be8d79fe6e5bb7"
|
||||
dependencies = [
|
||||
"bincode",
|
||||
"derivative",
|
||||
"rand",
|
||||
"rand 0.8.3",
|
||||
"rand_xorshift",
|
||||
"serde",
|
||||
"snarkvm-utilities",
|
||||
@ -2730,7 +2784,7 @@ dependencies = [
|
||||
"chrono",
|
||||
"hex",
|
||||
"once_cell",
|
||||
"rand",
|
||||
"rand 0.8.3",
|
||||
"serde",
|
||||
"sha2",
|
||||
"snarkvm-algorithms",
|
||||
@ -2784,7 +2838,7 @@ dependencies = [
|
||||
"bincode",
|
||||
"hex",
|
||||
"parking_lot",
|
||||
"rand",
|
||||
"rand 0.8.3",
|
||||
"rocksdb",
|
||||
"serde",
|
||||
"snarkvm-algorithms",
|
||||
@ -2802,7 +2856,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c763843fa67a3aa4ce68173c8cd96b4f04aaa135a5792bc051c36eec0fe1cd73"
|
||||
dependencies = [
|
||||
"bincode",
|
||||
"rand",
|
||||
"rand 0.8.3",
|
||||
"snarkvm-derives",
|
||||
"thiserror",
|
||||
]
|
||||
@ -2908,8 +2962,8 @@ checksum = "dac1c663cfc93810f88aed9b8941d48cabf856a1b111c29a40439018d870eb22"
|
||||
dependencies = [
|
||||
"cfg-if 1.0.0",
|
||||
"libc",
|
||||
"rand",
|
||||
"redox_syscall 0.2.5",
|
||||
"rand 0.8.3",
|
||||
"redox_syscall",
|
||||
"remove_dir_all",
|
||||
"winapi 0.3.9",
|
||||
]
|
||||
@ -2944,6 +2998,15 @@ dependencies = [
|
||||
"winapi 0.3.9",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "test_dir"
|
||||
version = "0.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e571ebf9127a9da821890a9fa8a8ef777fce3e0f959ff6949cf06ca8b736381d"
|
||||
dependencies = [
|
||||
"rand 0.7.3",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "textwrap"
|
||||
version = "0.11.0"
|
||||
@ -3156,6 +3219,12 @@ dependencies = [
|
||||
"tracing-serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "treeline"
|
||||
version = "0.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a7f741b240f1a48843f9b8e0444fb55fb2a4ff67293b50a9179dfd5ea67f8d41"
|
||||
|
||||
[[package]]
|
||||
name = "try-lock"
|
||||
version = "0.2.3"
|
||||
@ -3273,6 +3342,15 @@ version = "1.0.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d"
|
||||
|
||||
[[package]]
|
||||
name = "wait-timeout"
|
||||
version = "0.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9f200f5b12eb75f8c1ed65abd4b2db8a6e1b138a20de009dacee265a2498f3f6"
|
||||
dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "walkdir"
|
||||
version = "2.3.2"
|
||||
@ -3472,9 +3550,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "zip"
|
||||
version = "0.5.10"
|
||||
version = "0.5.12"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5a8977234acab718eb2820494b2f96cbb16004c19dddf88b7445b27381450997"
|
||||
checksum = "9c83dc9b784d252127720168abd71ea82bf8c3d96b17dc565b5e2a02854f2b27"
|
||||
dependencies = [
|
||||
"byteorder",
|
||||
"bzip2",
|
||||
|
@ -100,7 +100,7 @@ version = "2.33.3"
|
||||
version = "2.0"
|
||||
|
||||
[dependencies.dirs]
|
||||
version = "3.0.1"
|
||||
version = "3.0.2"
|
||||
|
||||
[dependencies.console]
|
||||
version = "0.14.0"
|
||||
@ -157,6 +157,12 @@ version = "0.12.1"
|
||||
[dev-dependencies.rusty-hook]
|
||||
version = "0.11.2"
|
||||
|
||||
[dev-dependencies.assert_cmd]
|
||||
version = "1.0.3"
|
||||
|
||||
[dev-dependencies.test_dir]
|
||||
version = "0.1.0"
|
||||
|
||||
[features]
|
||||
default = [ ]
|
||||
ci_skip = [ "leo-compiler/ci_skip" ]
|
||||
|
@ -8,7 +8,7 @@
|
||||
<a href="https://github.com/AleoHQ/leo/actions"><img src="https://github.com/AleoHQ/leo/workflows/CI/badge.svg"></a>
|
||||
<a href="https://codecov.io/gh/AleoHQ/leo"><img src="https://codecov.io/gh/AleoHQ/leo/branch/master/graph/badge.svg?token=S6MWO60SYL"/></a>
|
||||
<a href="https://app.bors.tech/repositories/31738"><img src="https://bors.tech/images/badge_small.svg" alt="Bors enabled"></a>
|
||||
<a href="https://discord.gg/TTexWvt"><img src="https://img.shields.io/discord/700454073459015690?logo=discord"/></a>
|
||||
<a href="https://discord.gg/5v2ynrw2ds"><img src="https://img.shields.io/discord/700454073459015690?logo=discord"/></a>
|
||||
</p>
|
||||
|
||||
Leo is a functional, statically-typed programming language built for writing private applications.
|
||||
@ -95,9 +95,9 @@ This will generate an executable under the `./target/release` directory. To run
|
||||
Use the Leo CLI to create a new project
|
||||
|
||||
```bash
|
||||
# create a new `hello_world` Leo project
|
||||
leo new hello_world
|
||||
cd hello_world
|
||||
# create a new `hello-world` Leo project
|
||||
leo new hello-world
|
||||
cd hello-world
|
||||
|
||||
# build & setup & prove & verify
|
||||
leo run
|
||||
@ -113,6 +113,7 @@ Congratulations! You've just run your first Leo program.
|
||||
|
||||
* [Hello World - Next Steps](https://developer.aleo.org/developer/getting_started/hello_world)
|
||||
* [Leo Language Documentation](https://developer.aleo.org/developer/language/layout)
|
||||
* [Leo ABNF Grammar](./grammar/README.md)
|
||||
* [Leo CLI Documentation](https://developer.aleo.org/developer/cli/new)
|
||||
* [Homepage](https://developer.aleo.org/developer/getting_started/overview)
|
||||
|
||||
|
@ -68,7 +68,7 @@ impl<'a> AsgContextInner<'a> {
|
||||
|
||||
#[allow(clippy::mut_from_ref)]
|
||||
pub fn alloc_scope(&'a self, scope: Scope<'a>) -> &'a Scope<'a> {
|
||||
match self.arena.alloc(ArenaNode::Scope(scope)) {
|
||||
match self.arena.alloc(ArenaNode::Scope(Box::new(scope))) {
|
||||
ArenaNode::Scope(e) => e,
|
||||
_ => unimplemented!(),
|
||||
}
|
||||
|
@ -44,7 +44,7 @@ pub(super) trait FromAst<'a, T: leo_ast::Node + 'static>: Sized {
|
||||
|
||||
pub enum ArenaNode<'a> {
|
||||
Expression(Expression<'a>),
|
||||
Scope(Scope<'a>),
|
||||
Scope(Box<Scope<'a>>),
|
||||
Statement(Statement<'a>),
|
||||
Variable(Variable<'a>),
|
||||
Circuit(Circuit<'a>),
|
||||
|
@ -24,7 +24,17 @@ pub use circuit::*;
|
||||
mod function;
|
||||
pub use function::*;
|
||||
|
||||
use crate::{ArenaNode, AsgContext, AsgConvertError, ImportResolver, Input, Scope};
|
||||
use crate::{
|
||||
node::FromAst,
|
||||
ArenaNode,
|
||||
AsgContext,
|
||||
AsgConvertError,
|
||||
DefinitionStatement,
|
||||
ImportResolver,
|
||||
Input,
|
||||
Scope,
|
||||
Statement,
|
||||
};
|
||||
use leo_ast::{Identifier, PackageAccess, PackageOrPackages, Span};
|
||||
|
||||
use indexmap::IndexMap;
|
||||
@ -48,10 +58,12 @@ pub struct Program<'a> {
|
||||
/// Maps function name => function code block.
|
||||
pub functions: IndexMap<String, &'a Function<'a>>,
|
||||
|
||||
/// Maps global constant name => global const code block.
|
||||
pub global_consts: IndexMap<String, &'a DefinitionStatement<'a>>,
|
||||
|
||||
/// Maps circuit name => circuit code block.
|
||||
pub circuits: IndexMap<String, &'a Circuit<'a>>,
|
||||
|
||||
/// Bindings for names and additional program context.
|
||||
pub scope: &'a Scope<'a>,
|
||||
}
|
||||
|
||||
@ -168,6 +180,7 @@ impl<'a> Program<'a> {
|
||||
|
||||
let mut imported_functions: IndexMap<String, &'a Function<'a>> = IndexMap::new();
|
||||
let mut imported_circuits: IndexMap<String, &'a Circuit<'a>> = IndexMap::new();
|
||||
let mut imported_global_consts: IndexMap<String, &'a DefinitionStatement<'a>> = IndexMap::new();
|
||||
|
||||
// Prepare locally relevant scope of imports.
|
||||
for (package, symbol, span) in imported_symbols.into_iter() {
|
||||
@ -180,12 +193,15 @@ impl<'a> Program<'a> {
|
||||
ImportSymbol::All => {
|
||||
imported_functions.extend(resolved_package.functions.clone().into_iter());
|
||||
imported_circuits.extend(resolved_package.circuits.clone().into_iter());
|
||||
imported_global_consts.extend(resolved_package.global_consts.clone().into_iter());
|
||||
}
|
||||
ImportSymbol::Direct(name) => {
|
||||
if let Some(function) = resolved_package.functions.get(&name) {
|
||||
imported_functions.insert(name.clone(), *function);
|
||||
} else if let Some(circuit) = resolved_package.circuits.get(&name) {
|
||||
imported_circuits.insert(name.clone(), *circuit);
|
||||
} else if let Some(global_const) = resolved_package.global_consts.get(&name) {
|
||||
imported_global_consts.insert(name.clone(), *global_const);
|
||||
} else {
|
||||
return Err(AsgConvertError::unresolved_import(
|
||||
&*format!("{}.{}", pretty_package, name),
|
||||
@ -198,6 +214,8 @@ impl<'a> Program<'a> {
|
||||
imported_functions.insert(alias.clone(), *function);
|
||||
} else if let Some(circuit) = resolved_package.circuits.get(&name) {
|
||||
imported_circuits.insert(alias.clone(), *circuit);
|
||||
} else if let Some(global_const) = resolved_package.global_consts.get(&name) {
|
||||
imported_global_consts.insert(alias.clone(), *global_const);
|
||||
} else {
|
||||
return Err(AsgConvertError::unresolved_import(
|
||||
&*format!("{}.{}", pretty_package, name),
|
||||
@ -208,17 +226,18 @@ impl<'a> Program<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
let import_scope = match context.arena.alloc(ArenaNode::Scope(Scope {
|
||||
let import_scope = match context.arena.alloc(ArenaNode::Scope(Box::new(Scope {
|
||||
context,
|
||||
id: context.get_id(),
|
||||
parent_scope: Cell::new(None),
|
||||
circuit_self: Cell::new(None),
|
||||
variables: RefCell::new(IndexMap::new()),
|
||||
functions: RefCell::new(imported_functions),
|
||||
global_consts: RefCell::new(imported_global_consts),
|
||||
circuits: RefCell::new(imported_circuits),
|
||||
function: Cell::new(None),
|
||||
input: Cell::new(None),
|
||||
})) {
|
||||
}))) {
|
||||
ArenaNode::Scope(c) => c,
|
||||
_ => unimplemented!(),
|
||||
};
|
||||
@ -231,6 +250,7 @@ impl<'a> Program<'a> {
|
||||
circuit_self: Cell::new(None),
|
||||
variables: RefCell::new(IndexMap::new()),
|
||||
functions: RefCell::new(IndexMap::new()),
|
||||
global_consts: RefCell::new(IndexMap::new()),
|
||||
circuits: RefCell::new(IndexMap::new()),
|
||||
function: Cell::new(None),
|
||||
});
|
||||
@ -258,7 +278,29 @@ impl<'a> Program<'a> {
|
||||
scope.functions.borrow_mut().insert(name.name.to_string(), function);
|
||||
}
|
||||
|
||||
for (name, global_const) in program.global_consts.iter() {
|
||||
global_const
|
||||
.variable_names
|
||||
.iter()
|
||||
.for_each(|variable_name| assert!(name.contains(&variable_name.identifier.name.to_string())));
|
||||
let gc = <&Statement<'a>>::from_ast(scope, global_const, None)?;
|
||||
if let Statement::Definition(gc) = gc {
|
||||
scope.global_consts.borrow_mut().insert(name.clone(), gc);
|
||||
}
|
||||
}
|
||||
|
||||
// Load concrete definitions.
|
||||
let mut global_consts = IndexMap::new();
|
||||
for (name, global_const) in program.global_consts.iter() {
|
||||
global_const
|
||||
.variable_names
|
||||
.iter()
|
||||
.for_each(|variable_name| assert!(name.contains(&variable_name.identifier.name.to_string())));
|
||||
let asg_global_const = *scope.global_consts.borrow().get(name).unwrap();
|
||||
|
||||
global_consts.insert(name.clone(), asg_global_const);
|
||||
}
|
||||
|
||||
let mut functions = IndexMap::new();
|
||||
for (name, function) in program.functions.iter() {
|
||||
assert_eq!(name.name, function.identifier.name);
|
||||
@ -290,6 +332,7 @@ impl<'a> Program<'a> {
|
||||
id: context.get_id(),
|
||||
name: program.name.clone(),
|
||||
functions,
|
||||
global_consts,
|
||||
circuits,
|
||||
imported_modules: resolved_packages
|
||||
.into_iter()
|
||||
@ -340,6 +383,7 @@ pub fn reform_ast<'a>(program: &Program<'a>) -> leo_ast::Program {
|
||||
|
||||
let mut all_circuits: IndexMap<String, &'a Circuit<'a>> = IndexMap::new();
|
||||
let mut all_functions: IndexMap<String, &'a Function<'a>> = IndexMap::new();
|
||||
let mut all_global_consts: IndexMap<String, &'a DefinitionStatement<'a>> = IndexMap::new();
|
||||
let mut identifiers = InternalIdentifierGenerator { next: 0 };
|
||||
for (_, program) in all_programs.into_iter() {
|
||||
for (name, circuit) in program.circuits.iter() {
|
||||
@ -356,6 +400,11 @@ pub fn reform_ast<'a>(program: &Program<'a>) -> leo_ast::Program {
|
||||
function.name.borrow_mut().name = identifier.clone().into();
|
||||
all_functions.insert(identifier, *function);
|
||||
}
|
||||
|
||||
for (name, global_const) in program.global_consts.iter() {
|
||||
let identifier = format!("{}{}", identifiers.next().unwrap(), name);
|
||||
all_global_consts.insert(identifier, *global_const);
|
||||
}
|
||||
}
|
||||
|
||||
leo_ast::Program {
|
||||
@ -380,6 +429,20 @@ pub fn reform_ast<'a>(program: &Program<'a>) -> leo_ast::Program {
|
||||
.into_iter()
|
||||
.map(|(_, circuit)| (circuit.name.borrow().clone(), circuit.into()))
|
||||
.collect(),
|
||||
global_consts: all_global_consts
|
||||
.into_iter()
|
||||
.map(|(_, global_const)| {
|
||||
(
|
||||
global_const
|
||||
.variables
|
||||
.iter()
|
||||
.fold("".to_string(), |joined, variable_name| {
|
||||
format!("{}, {}", joined, variable_name.borrow().name.name)
|
||||
}),
|
||||
global_const.into(),
|
||||
)
|
||||
})
|
||||
.collect(),
|
||||
}
|
||||
}
|
||||
|
||||
@ -399,6 +462,21 @@ impl<'a> Into<leo_ast::Program> for &Program<'a> {
|
||||
.iter()
|
||||
.map(|(_, function)| (function.name.borrow().clone(), (*function).into()))
|
||||
.collect(),
|
||||
global_consts: self
|
||||
.global_consts
|
||||
.iter()
|
||||
.map(|(_, global_const)| {
|
||||
(
|
||||
global_const
|
||||
.variables
|
||||
.iter()
|
||||
.fold("".to_string(), |joined, variable_name| {
|
||||
format!("{}, {}", joined, variable_name.borrow().name.name)
|
||||
}),
|
||||
(*global_const).into(),
|
||||
)
|
||||
})
|
||||
.collect(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -322,6 +322,12 @@ impl<'a, R: ReconstructingReducerProgram<'a>> ReconstructingDirector<'a, R> {
|
||||
self.reducer.reduce_circuit(input, members)
|
||||
}
|
||||
|
||||
pub fn reduce_global_const(&mut self, input: &'a DefinitionStatement<'a>) -> &'a DefinitionStatement<'a> {
|
||||
let value = self.reduce_expression(input.value.get());
|
||||
|
||||
self.reducer.reduce_global_const(input, value)
|
||||
}
|
||||
|
||||
pub fn reduce_program(&mut self, input: Program<'a>) -> Program<'a> {
|
||||
let imported_modules = input
|
||||
.imported_modules
|
||||
@ -339,7 +345,13 @@ impl<'a, R: ReconstructingReducerProgram<'a>> ReconstructingDirector<'a, R> {
|
||||
.map(|(name, c)| (name.clone(), self.reduce_circuit(c)))
|
||||
.collect();
|
||||
|
||||
let global_consts = input
|
||||
.global_consts
|
||||
.iter()
|
||||
.map(|(name, gc)| (name.clone(), self.reduce_global_const(gc)))
|
||||
.collect();
|
||||
|
||||
self.reducer
|
||||
.reduce_program(input, imported_modules, functions, circuits)
|
||||
.reduce_program(input, imported_modules, functions, circuits, global_consts)
|
||||
}
|
||||
}
|
||||
|
@ -383,12 +383,22 @@ pub trait ReconstructingReducerProgram<'a>: ReconstructingReducerStatement<'a> {
|
||||
input
|
||||
}
|
||||
|
||||
fn reduce_global_const(
|
||||
&mut self,
|
||||
input: &'a DefinitionStatement<'a>,
|
||||
value: &'a Expression<'a>,
|
||||
) -> &'a DefinitionStatement<'a> {
|
||||
input.value.set(value);
|
||||
input
|
||||
}
|
||||
|
||||
fn reduce_program(
|
||||
&mut self,
|
||||
input: Program<'a>,
|
||||
imported_modules: Vec<(String, Program<'a>)>,
|
||||
functions: Vec<(String, &'a Function<'a>)>,
|
||||
circuits: Vec<(String, &'a Circuit<'a>)>,
|
||||
global_consts: Vec<(String, &'a DefinitionStatement<'a>)>,
|
||||
) -> Program<'a> {
|
||||
Program {
|
||||
context: input.context,
|
||||
@ -398,6 +408,7 @@ pub trait ReconstructingReducerProgram<'a>: ReconstructingReducerStatement<'a> {
|
||||
functions: functions.into_iter().collect(),
|
||||
circuits: circuits.into_iter().collect(),
|
||||
scope: input.scope,
|
||||
global_consts: global_consts.into_iter().collect(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -159,6 +159,10 @@ pub trait ProgramVisitor<'a>: StatementVisitor<'a> {
|
||||
Default::default()
|
||||
}
|
||||
|
||||
fn visit_global_const(&mut self, input: &'a DefinitionStatement<'a>) -> VisitResult {
|
||||
Default::default()
|
||||
}
|
||||
|
||||
fn visit_program(&mut self, input: &Program<'a>) -> VisitResult {
|
||||
Default::default()
|
||||
}
|
||||
|
@ -424,6 +424,16 @@ impl<'a, R: ProgramVisitor<'a>> VisitorDirector<'a, R> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn visit_global_const(&mut self, input: &'a DefinitionStatement<'a>) -> ConcreteVisitResult {
|
||||
match self.visitor.visit_global_const(input) {
|
||||
VisitResult::VisitChildren => {
|
||||
self.visit_expression(&input.value)?;
|
||||
Ok(())
|
||||
}
|
||||
x => x.into(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn visit_program(&mut self, input: &Program<'a>) -> ConcreteVisitResult {
|
||||
match self.visitor.visit_program(input) {
|
||||
VisitResult::VisitChildren => {
|
||||
@ -436,6 +446,9 @@ impl<'a, R: ProgramVisitor<'a>> VisitorDirector<'a, R> {
|
||||
for (_, circuit) in input.circuits.iter() {
|
||||
self.visit_circuit(circuit)?;
|
||||
}
|
||||
for (_, global_const) in input.global_consts.iter() {
|
||||
self.visit_global_const(global_const)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
x => x.into(),
|
||||
|
@ -14,7 +14,7 @@
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
use crate::{AsgContext, AsgConvertError, Circuit, Function, Input, Type, Variable};
|
||||
use crate::{AsgContext, AsgConvertError, Circuit, DefinitionStatement, Function, Input, Type, Variable};
|
||||
|
||||
use indexmap::IndexMap;
|
||||
use std::cell::{Cell, RefCell};
|
||||
@ -42,6 +42,9 @@ pub struct Scope<'a> {
|
||||
/// Maps function name => function.
|
||||
pub functions: RefCell<IndexMap<String, &'a Function<'a>>>,
|
||||
|
||||
/// Maps global constant name => global const code block.
|
||||
pub global_consts: RefCell<IndexMap<String, &'a DefinitionStatement<'a>>>,
|
||||
|
||||
/// Maps circuit name => circuit.
|
||||
pub circuits: RefCell<IndexMap<String, &'a Circuit<'a>>>,
|
||||
|
||||
@ -161,6 +164,7 @@ impl<'a> Scope<'a> {
|
||||
variables: RefCell::new(IndexMap::new()),
|
||||
functions: RefCell::new(IndexMap::new()),
|
||||
circuits: RefCell::new(IndexMap::new()),
|
||||
global_consts: RefCell::new(IndexMap::new()),
|
||||
function: Cell::new(None),
|
||||
input: Cell::new(None),
|
||||
})
|
||||
|
@ -39,6 +39,22 @@ pub struct DefinitionStatement<'a> {
|
||||
pub value: Cell<&'a Expression<'a>>,
|
||||
}
|
||||
|
||||
impl<'a> DefinitionStatement<'a> {
|
||||
pub fn split(&self) -> Vec<(String, Self)> {
|
||||
self.variables
|
||||
.iter()
|
||||
.map(|variable| {
|
||||
(variable.borrow().name.name.to_string(), DefinitionStatement {
|
||||
parent: self.parent.clone(),
|
||||
span: self.span.clone(),
|
||||
variables: vec![variable],
|
||||
value: self.value.clone(),
|
||||
})
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Node for DefinitionStatement<'a> {
|
||||
fn span(&self) -> Option<&Span> {
|
||||
self.span.as_ref()
|
||||
|
@ -14,7 +14,7 @@
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
use crate::{FormattedError, Span};
|
||||
use crate::{FormattedError, LeoError, Span};
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
pub enum CanonicalizeError {
|
||||
@ -22,6 +22,8 @@ pub enum CanonicalizeError {
|
||||
Error(#[from] FormattedError),
|
||||
}
|
||||
|
||||
impl LeoError for CanonicalizeError {}
|
||||
|
||||
impl CanonicalizeError {
|
||||
fn new_from_span(message: String, span: &Span) -> Self {
|
||||
CanonicalizeError::Error(FormattedError::new_from_span(message, span))
|
43
ast/src/errors/combiner.rs
Normal file
43
ast/src/errors/combiner.rs
Normal file
@ -0,0 +1,43 @@
|
||||
// Copyright (C) 2019-2021 Aleo Systems Inc.
|
||||
// This file is part of the Leo library.
|
||||
|
||||
// The Leo library is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// The Leo library is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
use crate::{FormattedError, LeoError, Span};
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
pub enum CombinerError {
|
||||
#[error("{}", _0)]
|
||||
Error(#[from] FormattedError),
|
||||
}
|
||||
|
||||
impl LeoError for CombinerError {}
|
||||
|
||||
impl CombinerError {
|
||||
fn new_from_span(message: String, span: &Span) -> Self {
|
||||
CombinerError::Error(FormattedError::new_from_span(message, span))
|
||||
}
|
||||
|
||||
pub fn asg_statement_not_block(span: &Span) -> Self {
|
||||
let message = "AstStatement should be be a block".to_string();
|
||||
|
||||
Self::new_from_span(message, span)
|
||||
}
|
||||
|
||||
pub fn illegal_compound_array_range(span: &Span) -> Self {
|
||||
let message = "Illegal compound assignement with array range".to_string();
|
||||
|
||||
Self::new_from_span(message, span)
|
||||
}
|
||||
}
|
@ -14,7 +14,16 @@
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
pub mod canonicalization;
|
||||
pub use canonicalization::*;
|
||||
|
||||
pub mod combiner;
|
||||
pub use combiner::*;
|
||||
|
||||
pub mod error;
|
||||
pub use error::*;
|
||||
|
||||
pub mod reducer;
|
||||
pub use reducer::*;
|
||||
|
||||
pub trait LeoError {}
|
||||
|
43
ast/src/errors/reducer.rs
Normal file
43
ast/src/errors/reducer.rs
Normal file
@ -0,0 +1,43 @@
|
||||
// Copyright (C) 2019-2021 Aleo Systems Inc.
|
||||
// This file is part of the Leo library.
|
||||
|
||||
// The Leo library is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// The Leo library is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
use crate::{CanonicalizeError, CombinerError, FormattedError, LeoError, Span};
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
pub enum ReducerError {
|
||||
#[error("{}", _0)]
|
||||
Error(#[from] FormattedError),
|
||||
|
||||
#[error("{}", _0)]
|
||||
CanonicalizeError(#[from] CanonicalizeError),
|
||||
|
||||
#[error("{}", _0)]
|
||||
CombinerError(#[from] CombinerError),
|
||||
}
|
||||
|
||||
impl LeoError for ReducerError {}
|
||||
|
||||
impl ReducerError {
|
||||
fn new_from_span(message: String, span: &Span) -> Self {
|
||||
ReducerError::Error(FormattedError::new_from_span(message, span))
|
||||
}
|
||||
|
||||
pub fn impossible_console_assert_call(span: &Span) -> Self {
|
||||
let message = "Console::Assert cannot be matched here, its handled in another case.".to_string();
|
||||
|
||||
Self::new_from_span(message, span)
|
||||
}
|
||||
}
|
@ -71,7 +71,7 @@ pub use node::*;
|
||||
/// These data types form a tree that begins from a [`Program`] type root.
|
||||
///
|
||||
/// A new [`Ast`] can be created from a [`Grammar`] generated by the pest parser in the `grammar` module.
|
||||
#[derive(Debug, Eq, PartialEq)]
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
pub struct Ast {
|
||||
ast: Program,
|
||||
}
|
||||
@ -83,7 +83,7 @@ impl Ast {
|
||||
}
|
||||
|
||||
/// Mutates the program ast by preforming canonicalization on it.
|
||||
pub fn canonicalize(&mut self) -> Result<(), CanonicalizeError> {
|
||||
pub fn canonicalize(&mut self) -> Result<(), ReducerError> {
|
||||
self.ast = ReconstructingDirector::new(Canonicalizer::default()).reduce_program(self.as_repr())?;
|
||||
Ok(())
|
||||
}
|
||||
|
@ -17,7 +17,7 @@
|
||||
//! A Leo program consists of import, circuit, and function definitions.
|
||||
//! Each defined type consists of ast statements and expressions.
|
||||
|
||||
use crate::{Circuit, Function, FunctionInput, Identifier, ImportStatement};
|
||||
use crate::{Circuit, DefinitionStatement, Function, FunctionInput, Identifier, ImportStatement};
|
||||
|
||||
use indexmap::IndexMap;
|
||||
use serde::{Deserialize, Serialize};
|
||||
@ -30,6 +30,7 @@ pub struct Program {
|
||||
pub expected_input: Vec<FunctionInput>,
|
||||
pub imports: Vec<ImportStatement>,
|
||||
pub circuits: IndexMap<Identifier, Circuit>,
|
||||
pub global_consts: IndexMap<String, DefinitionStatement>,
|
||||
pub functions: IndexMap<Identifier, Function>,
|
||||
}
|
||||
|
||||
@ -66,6 +67,7 @@ impl Program {
|
||||
expected_input: vec![],
|
||||
imports: vec![],
|
||||
circuits: IndexMap::new(),
|
||||
global_consts: IndexMap::new(),
|
||||
functions: IndexMap::new(),
|
||||
}
|
||||
}
|
||||
|
@ -25,11 +25,78 @@ use crate::*;
|
||||
pub struct Canonicalizer {
|
||||
// If we are in a circuit keep track of the circuit name.
|
||||
circuit_name: Option<Identifier>,
|
||||
in_circuit: bool,
|
||||
}
|
||||
|
||||
impl Default for Canonicalizer {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
circuit_name: None,
|
||||
in_circuit: false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Canonicalizer {
|
||||
pub fn default() -> Self {
|
||||
Self { circuit_name: None }
|
||||
pub fn canonicalize_accesses(
|
||||
&mut self,
|
||||
start: Expression,
|
||||
accesses: &[AssigneeAccess],
|
||||
span: &Span,
|
||||
) -> Result<Box<Expression>, ReducerError> {
|
||||
let mut left = Box::new(start);
|
||||
|
||||
for access in accesses.iter() {
|
||||
match self.canonicalize_assignee_access(&access) {
|
||||
AssigneeAccess::ArrayIndex(index) => {
|
||||
left = Box::new(Expression::ArrayAccess(ArrayAccessExpression {
|
||||
array: left,
|
||||
index: Box::new(index),
|
||||
span: span.clone(),
|
||||
}));
|
||||
}
|
||||
AssigneeAccess::Tuple(positive_number, _) => {
|
||||
left = Box::new(Expression::TupleAccess(TupleAccessExpression {
|
||||
tuple: left,
|
||||
index: positive_number,
|
||||
span: span.clone(),
|
||||
}));
|
||||
}
|
||||
AssigneeAccess::Member(identifier) => {
|
||||
left = Box::new(Expression::CircuitMemberAccess(CircuitMemberAccessExpression {
|
||||
circuit: left,
|
||||
name: identifier,
|
||||
span: span.clone(),
|
||||
}));
|
||||
}
|
||||
_ => return Err(ReducerError::from(CombinerError::illegal_compound_array_range(&span))),
|
||||
}
|
||||
}
|
||||
|
||||
Ok(left)
|
||||
}
|
||||
|
||||
pub fn compound_operation_converstion(
|
||||
&mut self,
|
||||
operation: &AssignOperation,
|
||||
) -> Result<BinaryOperation, ReducerError> {
|
||||
match operation {
|
||||
AssignOperation::Assign => unreachable!(),
|
||||
AssignOperation::Add => Ok(BinaryOperation::Add),
|
||||
AssignOperation::Sub => Ok(BinaryOperation::Sub),
|
||||
AssignOperation::Mul => Ok(BinaryOperation::Mul),
|
||||
AssignOperation::Div => Ok(BinaryOperation::Div),
|
||||
AssignOperation::Pow => Ok(BinaryOperation::Pow),
|
||||
AssignOperation::Or => Ok(BinaryOperation::Or),
|
||||
AssignOperation::And => Ok(BinaryOperation::And),
|
||||
AssignOperation::BitOr => Ok(BinaryOperation::BitOr),
|
||||
AssignOperation::BitAnd => Ok(BinaryOperation::BitAnd),
|
||||
AssignOperation::BitXor => Ok(BinaryOperation::BitXor),
|
||||
AssignOperation::Shr => Ok(BinaryOperation::Shr),
|
||||
AssignOperation::ShrSigned => Ok(BinaryOperation::ShrSigned),
|
||||
AssignOperation::Shl => Ok(BinaryOperation::Shl),
|
||||
AssignOperation::Mod => Ok(BinaryOperation::Mod),
|
||||
}
|
||||
}
|
||||
|
||||
fn is_self_type(&mut self, type_option: Option<&Type>) -> bool {
|
||||
@ -380,17 +447,21 @@ impl Canonicalizer {
|
||||
}
|
||||
|
||||
impl ReconstructingReducer for Canonicalizer {
|
||||
fn reduce_type(
|
||||
&mut self,
|
||||
_type_: &Type,
|
||||
new: Type,
|
||||
in_circuit: bool,
|
||||
span: &Span,
|
||||
) -> Result<Type, CanonicalizeError> {
|
||||
fn in_circuit(&self) -> bool {
|
||||
self.in_circuit
|
||||
}
|
||||
|
||||
fn swap_in_circuit(&mut self) {
|
||||
self.in_circuit = !self.in_circuit;
|
||||
}
|
||||
|
||||
fn reduce_type(&mut self, _type_: &Type, new: Type, span: &Span) -> Result<Type, ReducerError> {
|
||||
match new {
|
||||
Type::Array(type_, mut dimensions) => {
|
||||
if dimensions.is_zero() {
|
||||
return Err(CanonicalizeError::invalid_array_dimension_size(span));
|
||||
return Err(ReducerError::from(CanonicalizeError::invalid_array_dimension_size(
|
||||
span,
|
||||
)));
|
||||
}
|
||||
|
||||
let mut next = Type::Array(type_, ArrayDimensions(vec![dimensions.remove_last().unwrap()]));
|
||||
@ -407,7 +478,9 @@ impl ReconstructingReducer for Canonicalizer {
|
||||
|
||||
Ok(array)
|
||||
}
|
||||
Type::SelfType if !in_circuit => Err(CanonicalizeError::big_self_outside_of_circuit(span)),
|
||||
Type::SelfType if !self.in_circuit => {
|
||||
Err(ReducerError::from(CanonicalizeError::big_self_outside_of_circuit(span)))
|
||||
}
|
||||
_ => Ok(new.clone()),
|
||||
}
|
||||
}
|
||||
@ -416,10 +489,11 @@ impl ReconstructingReducer for Canonicalizer {
|
||||
&mut self,
|
||||
array_init: &ArrayInitExpression,
|
||||
element: Expression,
|
||||
_in_circuit: bool,
|
||||
) -> Result<ArrayInitExpression, CanonicalizeError> {
|
||||
) -> Result<ArrayInitExpression, ReducerError> {
|
||||
if array_init.dimensions.is_zero() {
|
||||
return Err(CanonicalizeError::invalid_array_dimension_size(&array_init.span));
|
||||
return Err(ReducerError::from(CanonicalizeError::invalid_array_dimension_size(
|
||||
&array_init.span,
|
||||
)));
|
||||
}
|
||||
|
||||
let element = Box::new(element);
|
||||
@ -466,58 +540,39 @@ impl ReconstructingReducer for Canonicalizer {
|
||||
assign: &AssignStatement,
|
||||
assignee: Assignee,
|
||||
value: Expression,
|
||||
_in_circuit: bool,
|
||||
) -> Result<AssignStatement, CanonicalizeError> {
|
||||
) -> Result<AssignStatement, ReducerError> {
|
||||
match value {
|
||||
Expression::Binary(binary_expr) if assign.operation != AssignOperation::Assign => {
|
||||
let left = self.canonicalize_accesses(
|
||||
Expression::Identifier(assignee.identifier.clone()),
|
||||
&assignee.accesses,
|
||||
&assign.span,
|
||||
)?;
|
||||
let right = Box::new(Expression::Binary(binary_expr));
|
||||
let op = self.compound_operation_converstion(&assign.operation)?;
|
||||
|
||||
let new_value = Expression::Binary(BinaryExpression {
|
||||
left,
|
||||
right,
|
||||
op,
|
||||
span: assign.span.clone(),
|
||||
});
|
||||
|
||||
Ok(AssignStatement {
|
||||
operation: AssignOperation::Assign,
|
||||
assignee,
|
||||
value: new_value,
|
||||
span: assign.span.clone(),
|
||||
})
|
||||
}
|
||||
Expression::Value(value_expr) if assign.operation != AssignOperation::Assign => {
|
||||
let mut left = Box::new(Expression::Identifier(assignee.identifier.clone()));
|
||||
|
||||
for access in assignee.accesses.iter() {
|
||||
match self.canonicalize_assignee_access(&access) {
|
||||
AssigneeAccess::ArrayIndex(index) => {
|
||||
left = Box::new(Expression::ArrayAccess(ArrayAccessExpression {
|
||||
array: left,
|
||||
index: Box::new(index),
|
||||
span: assign.span.clone(),
|
||||
}));
|
||||
}
|
||||
AssigneeAccess::Tuple(positive_number, _) => {
|
||||
left = Box::new(Expression::TupleAccess(TupleAccessExpression {
|
||||
tuple: left,
|
||||
index: positive_number,
|
||||
span: assign.span.clone(),
|
||||
}));
|
||||
}
|
||||
AssigneeAccess::Member(identifier) => {
|
||||
left = Box::new(Expression::CircuitMemberAccess(CircuitMemberAccessExpression {
|
||||
circuit: left,
|
||||
name: identifier,
|
||||
span: assign.span.clone(),
|
||||
}));
|
||||
}
|
||||
_ => unimplemented!(), // No reason for someone to compute ArrayRanges.
|
||||
}
|
||||
}
|
||||
|
||||
let left = self.canonicalize_accesses(
|
||||
Expression::Identifier(assignee.identifier.clone()),
|
||||
&assignee.accesses,
|
||||
&assign.span,
|
||||
)?;
|
||||
let right = Box::new(Expression::Value(value_expr));
|
||||
|
||||
let op = match assign.operation {
|
||||
AssignOperation::Assign => unimplemented!(), // Imposible
|
||||
AssignOperation::Add => BinaryOperation::Add,
|
||||
AssignOperation::Sub => BinaryOperation::Sub,
|
||||
AssignOperation::Mul => BinaryOperation::Mul,
|
||||
AssignOperation::Div => BinaryOperation::Div,
|
||||
AssignOperation::Pow => BinaryOperation::Pow,
|
||||
AssignOperation::Or => BinaryOperation::Or,
|
||||
AssignOperation::And => BinaryOperation::And,
|
||||
AssignOperation::BitOr => BinaryOperation::BitOr,
|
||||
AssignOperation::BitAnd => BinaryOperation::BitAnd,
|
||||
AssignOperation::BitXor => BinaryOperation::BitXor,
|
||||
AssignOperation::Shr => BinaryOperation::Shr,
|
||||
AssignOperation::ShrSigned => BinaryOperation::ShrSigned,
|
||||
AssignOperation::Shl => BinaryOperation::Shl,
|
||||
AssignOperation::Mod => BinaryOperation::Mod,
|
||||
};
|
||||
let op = self.compound_operation_converstion(&assign.operation)?;
|
||||
|
||||
let new_value = Expression::Binary(BinaryExpression {
|
||||
left,
|
||||
@ -545,8 +600,7 @@ impl ReconstructingReducer for Canonicalizer {
|
||||
input: Vec<FunctionInput>,
|
||||
output: Option<Type>,
|
||||
block: Block,
|
||||
_in_circuit: bool,
|
||||
) -> Result<Function, CanonicalizeError> {
|
||||
) -> Result<Function, ReducerError> {
|
||||
let new_output = match output {
|
||||
None => Some(Type::Tuple(vec![])),
|
||||
_ => output,
|
||||
@ -567,7 +621,7 @@ impl ReconstructingReducer for Canonicalizer {
|
||||
_circuit: &Circuit,
|
||||
circuit_name: Identifier,
|
||||
members: Vec<CircuitMember>,
|
||||
) -> Result<Circuit, CanonicalizeError> {
|
||||
) -> Result<Circuit, ReducerError> {
|
||||
self.circuit_name = Some(circuit_name.clone());
|
||||
let circ = Circuit {
|
||||
circuit_name,
|
||||
|
@ -17,9 +17,6 @@
|
||||
mod canonicalization;
|
||||
pub use canonicalization::*;
|
||||
|
||||
mod errors;
|
||||
pub use errors::*;
|
||||
|
||||
mod reconstructing_reducer;
|
||||
pub use reconstructing_reducer::*;
|
||||
|
||||
|
@ -22,18 +22,14 @@ use indexmap::IndexMap;
|
||||
|
||||
pub struct ReconstructingDirector<R: ReconstructingReducer> {
|
||||
reducer: R,
|
||||
in_circuit: bool,
|
||||
}
|
||||
|
||||
impl<R: ReconstructingReducer> ReconstructingDirector<R> {
|
||||
pub fn new(reducer: R) -> Self {
|
||||
Self {
|
||||
reducer,
|
||||
in_circuit: false,
|
||||
}
|
||||
Self { reducer }
|
||||
}
|
||||
|
||||
pub fn reduce_type(&mut self, type_: &Type, span: &Span) -> Result<Type, CanonicalizeError> {
|
||||
pub fn reduce_type(&mut self, type_: &Type, span: &Span) -> Result<Type, ReducerError> {
|
||||
let new = match type_ {
|
||||
Type::Array(type_, dimensions) => Type::Array(Box::new(self.reduce_type(type_, span)?), dimensions.clone()),
|
||||
Type::Tuple(types) => {
|
||||
@ -48,11 +44,11 @@ impl<R: ReconstructingReducer> ReconstructingDirector<R> {
|
||||
_ => type_.clone(),
|
||||
};
|
||||
|
||||
self.reducer.reduce_type(type_, new, self.in_circuit, span)
|
||||
self.reducer.reduce_type(type_, new, span)
|
||||
}
|
||||
|
||||
// Expressions
|
||||
pub fn reduce_expression(&mut self, expression: &Expression) -> Result<Expression, CanonicalizeError> {
|
||||
pub fn reduce_expression(&mut self, expression: &Expression) -> Result<Expression, ReducerError> {
|
||||
let new = match expression {
|
||||
Expression::Identifier(identifier) => Expression::Identifier(self.reduce_identifier(&identifier)?),
|
||||
Expression::Value(value) => Expression::Value(self.reduce_value(&value)?),
|
||||
@ -84,18 +80,18 @@ impl<R: ReconstructingReducer> ReconstructingDirector<R> {
|
||||
Expression::Call(call) => Expression::Call(self.reduce_call(&call)?),
|
||||
};
|
||||
|
||||
self.reducer.reduce_expression(expression, new, self.in_circuit)
|
||||
self.reducer.reduce_expression(expression, new)
|
||||
}
|
||||
|
||||
pub fn reduce_identifier(&mut self, identifier: &Identifier) -> Result<Identifier, CanonicalizeError> {
|
||||
pub fn reduce_identifier(&mut self, identifier: &Identifier) -> Result<Identifier, ReducerError> {
|
||||
self.reducer.reduce_identifier(identifier)
|
||||
}
|
||||
|
||||
pub fn reduce_group_tuple(&mut self, group_tuple: &GroupTuple) -> Result<GroupTuple, CanonicalizeError> {
|
||||
pub fn reduce_group_tuple(&mut self, group_tuple: &GroupTuple) -> Result<GroupTuple, ReducerError> {
|
||||
self.reducer.reduce_group_tuple(group_tuple)
|
||||
}
|
||||
|
||||
pub fn reduce_group_value(&mut self, group_value: &GroupValue) -> Result<GroupValue, CanonicalizeError> {
|
||||
pub fn reduce_group_value(&mut self, group_value: &GroupValue) -> Result<GroupValue, ReducerError> {
|
||||
let new = match group_value {
|
||||
GroupValue::Tuple(group_tuple) => GroupValue::Tuple(self.reduce_group_tuple(&group_tuple)?),
|
||||
_ => group_value.clone(),
|
||||
@ -104,7 +100,7 @@ impl<R: ReconstructingReducer> ReconstructingDirector<R> {
|
||||
self.reducer.reduce_group_value(group_value, new)
|
||||
}
|
||||
|
||||
pub fn reduce_value(&mut self, value: &ValueExpression) -> Result<ValueExpression, CanonicalizeError> {
|
||||
pub fn reduce_value(&mut self, value: &ValueExpression) -> Result<ValueExpression, ReducerError> {
|
||||
let new = match value {
|
||||
ValueExpression::Group(group_value) => {
|
||||
ValueExpression::Group(Box::new(self.reduce_group_value(&group_value)?))
|
||||
@ -115,41 +111,38 @@ impl<R: ReconstructingReducer> ReconstructingDirector<R> {
|
||||
self.reducer.reduce_value(value, new)
|
||||
}
|
||||
|
||||
pub fn reduce_binary(&mut self, binary: &BinaryExpression) -> Result<BinaryExpression, CanonicalizeError> {
|
||||
pub fn reduce_binary(&mut self, binary: &BinaryExpression) -> Result<BinaryExpression, ReducerError> {
|
||||
let left = self.reduce_expression(&binary.left)?;
|
||||
let right = self.reduce_expression(&binary.right)?;
|
||||
|
||||
self.reducer
|
||||
.reduce_binary(binary, left, right, binary.op.clone(), self.in_circuit)
|
||||
self.reducer.reduce_binary(binary, left, right, binary.op.clone())
|
||||
}
|
||||
|
||||
pub fn reduce_unary(&mut self, unary: &UnaryExpression) -> Result<UnaryExpression, CanonicalizeError> {
|
||||
pub fn reduce_unary(&mut self, unary: &UnaryExpression) -> Result<UnaryExpression, ReducerError> {
|
||||
let inner = self.reduce_expression(&unary.inner)?;
|
||||
|
||||
self.reducer
|
||||
.reduce_unary(unary, inner, unary.op.clone(), self.in_circuit)
|
||||
self.reducer.reduce_unary(unary, inner, unary.op.clone())
|
||||
}
|
||||
|
||||
pub fn reduce_ternary(&mut self, ternary: &TernaryExpression) -> Result<TernaryExpression, CanonicalizeError> {
|
||||
pub fn reduce_ternary(&mut self, ternary: &TernaryExpression) -> Result<TernaryExpression, ReducerError> {
|
||||
let condition = self.reduce_expression(&ternary.condition)?;
|
||||
let if_true = self.reduce_expression(&ternary.if_true)?;
|
||||
let if_false = self.reduce_expression(&ternary.if_false)?;
|
||||
|
||||
self.reducer
|
||||
.reduce_ternary(ternary, condition, if_true, if_false, self.in_circuit)
|
||||
self.reducer.reduce_ternary(ternary, condition, if_true, if_false)
|
||||
}
|
||||
|
||||
pub fn reduce_cast(&mut self, cast: &CastExpression) -> Result<CastExpression, CanonicalizeError> {
|
||||
pub fn reduce_cast(&mut self, cast: &CastExpression) -> Result<CastExpression, ReducerError> {
|
||||
let inner = self.reduce_expression(&cast.inner)?;
|
||||
let target_type = self.reduce_type(&cast.target_type, &cast.span)?;
|
||||
|
||||
self.reducer.reduce_cast(cast, inner, target_type, self.in_circuit)
|
||||
self.reducer.reduce_cast(cast, inner, target_type)
|
||||
}
|
||||
|
||||
pub fn reduce_array_inline(
|
||||
&mut self,
|
||||
array_inline: &ArrayInlineExpression,
|
||||
) -> Result<ArrayInlineExpression, CanonicalizeError> {
|
||||
) -> Result<ArrayInlineExpression, ReducerError> {
|
||||
let mut elements = vec![];
|
||||
for element in array_inline.elements.iter() {
|
||||
let reduced_element = match element {
|
||||
@ -164,34 +157,29 @@ impl<R: ReconstructingReducer> ReconstructingDirector<R> {
|
||||
elements.push(reduced_element);
|
||||
}
|
||||
|
||||
self.reducer
|
||||
.reduce_array_inline(array_inline, elements, self.in_circuit)
|
||||
self.reducer.reduce_array_inline(array_inline, elements)
|
||||
}
|
||||
|
||||
pub fn reduce_array_init(
|
||||
&mut self,
|
||||
array_init: &ArrayInitExpression,
|
||||
) -> Result<ArrayInitExpression, CanonicalizeError> {
|
||||
pub fn reduce_array_init(&mut self, array_init: &ArrayInitExpression) -> Result<ArrayInitExpression, ReducerError> {
|
||||
let element = self.reduce_expression(&array_init.element)?;
|
||||
|
||||
self.reducer.reduce_array_init(array_init, element, self.in_circuit)
|
||||
self.reducer.reduce_array_init(array_init, element)
|
||||
}
|
||||
|
||||
pub fn reduce_array_access(
|
||||
&mut self,
|
||||
array_access: &ArrayAccessExpression,
|
||||
) -> Result<ArrayAccessExpression, CanonicalizeError> {
|
||||
) -> Result<ArrayAccessExpression, ReducerError> {
|
||||
let array = self.reduce_expression(&array_access.array)?;
|
||||
let index = self.reduce_expression(&array_access.index)?;
|
||||
|
||||
self.reducer
|
||||
.reduce_array_access(array_access, array, index, self.in_circuit)
|
||||
self.reducer.reduce_array_access(array_access, array, index)
|
||||
}
|
||||
|
||||
pub fn reduce_array_range_access(
|
||||
&mut self,
|
||||
array_range_access: &ArrayRangeAccessExpression,
|
||||
) -> Result<ArrayRangeAccessExpression, CanonicalizeError> {
|
||||
) -> Result<ArrayRangeAccessExpression, ReducerError> {
|
||||
let array = self.reduce_expression(&array_range_access.array)?;
|
||||
let left = array_range_access
|
||||
.left
|
||||
@ -205,34 +193,31 @@ impl<R: ReconstructingReducer> ReconstructingDirector<R> {
|
||||
.transpose()?;
|
||||
|
||||
self.reducer
|
||||
.reduce_array_range_access(array_range_access, array, left, right, self.in_circuit)
|
||||
.reduce_array_range_access(array_range_access, array, left, right)
|
||||
}
|
||||
|
||||
pub fn reduce_tuple_init(
|
||||
&mut self,
|
||||
tuple_init: &TupleInitExpression,
|
||||
) -> Result<TupleInitExpression, CanonicalizeError> {
|
||||
pub fn reduce_tuple_init(&mut self, tuple_init: &TupleInitExpression) -> Result<TupleInitExpression, ReducerError> {
|
||||
let mut elements = vec![];
|
||||
for element in tuple_init.elements.iter() {
|
||||
elements.push(self.reduce_expression(element)?);
|
||||
}
|
||||
|
||||
self.reducer.reduce_tuple_init(tuple_init, elements, self.in_circuit)
|
||||
self.reducer.reduce_tuple_init(tuple_init, elements)
|
||||
}
|
||||
|
||||
pub fn reduce_tuple_access(
|
||||
&mut self,
|
||||
tuple_access: &TupleAccessExpression,
|
||||
) -> Result<TupleAccessExpression, CanonicalizeError> {
|
||||
) -> Result<TupleAccessExpression, ReducerError> {
|
||||
let tuple = self.reduce_expression(&tuple_access.tuple)?;
|
||||
|
||||
self.reducer.reduce_tuple_access(tuple_access, tuple, self.in_circuit)
|
||||
self.reducer.reduce_tuple_access(tuple_access, tuple)
|
||||
}
|
||||
|
||||
pub fn reduce_circuit_implied_variable_definition(
|
||||
&mut self,
|
||||
variable: &CircuitImpliedVariableDefinition,
|
||||
) -> Result<CircuitImpliedVariableDefinition, CanonicalizeError> {
|
||||
) -> Result<CircuitImpliedVariableDefinition, ReducerError> {
|
||||
let identifier = self.reduce_identifier(&variable.identifier)?;
|
||||
let expression = variable
|
||||
.expression
|
||||
@ -241,13 +226,13 @@ impl<R: ReconstructingReducer> ReconstructingDirector<R> {
|
||||
.transpose()?;
|
||||
|
||||
self.reducer
|
||||
.reduce_circuit_implied_variable_definition(variable, identifier, expression, self.in_circuit)
|
||||
.reduce_circuit_implied_variable_definition(variable, identifier, expression)
|
||||
}
|
||||
|
||||
pub fn reduce_circuit_init(
|
||||
&mut self,
|
||||
circuit_init: &CircuitInitExpression,
|
||||
) -> Result<CircuitInitExpression, CanonicalizeError> {
|
||||
) -> Result<CircuitInitExpression, ReducerError> {
|
||||
let name = self.reduce_identifier(&circuit_init.name)?;
|
||||
|
||||
let mut members = vec![];
|
||||
@ -255,33 +240,32 @@ impl<R: ReconstructingReducer> ReconstructingDirector<R> {
|
||||
members.push(self.reduce_circuit_implied_variable_definition(member)?);
|
||||
}
|
||||
|
||||
self.reducer
|
||||
.reduce_circuit_init(circuit_init, name, members, self.in_circuit)
|
||||
self.reducer.reduce_circuit_init(circuit_init, name, members)
|
||||
}
|
||||
|
||||
pub fn reduce_circuit_member_access(
|
||||
&mut self,
|
||||
circuit_member_access: &CircuitMemberAccessExpression,
|
||||
) -> Result<CircuitMemberAccessExpression, CanonicalizeError> {
|
||||
) -> Result<CircuitMemberAccessExpression, ReducerError> {
|
||||
let circuit = self.reduce_expression(&circuit_member_access.circuit)?;
|
||||
let name = self.reduce_identifier(&circuit_member_access.name)?;
|
||||
|
||||
self.reducer
|
||||
.reduce_circuit_member_access(circuit_member_access, circuit, name, self.in_circuit)
|
||||
.reduce_circuit_member_access(circuit_member_access, circuit, name)
|
||||
}
|
||||
|
||||
pub fn reduce_circuit_static_fn_access(
|
||||
&mut self,
|
||||
circuit_static_fn_access: &CircuitStaticFunctionAccessExpression,
|
||||
) -> Result<CircuitStaticFunctionAccessExpression, CanonicalizeError> {
|
||||
) -> Result<CircuitStaticFunctionAccessExpression, ReducerError> {
|
||||
let circuit = self.reduce_expression(&circuit_static_fn_access.circuit)?;
|
||||
let name = self.reduce_identifier(&circuit_static_fn_access.name)?;
|
||||
|
||||
self.reducer
|
||||
.reduce_circuit_static_fn_access(circuit_static_fn_access, circuit, name, self.in_circuit)
|
||||
.reduce_circuit_static_fn_access(circuit_static_fn_access, circuit, name)
|
||||
}
|
||||
|
||||
pub fn reduce_call(&mut self, call: &CallExpression) -> Result<CallExpression, CanonicalizeError> {
|
||||
pub fn reduce_call(&mut self, call: &CallExpression) -> Result<CallExpression, ReducerError> {
|
||||
let function = self.reduce_expression(&call.function)?;
|
||||
|
||||
let mut arguments = vec![];
|
||||
@ -289,11 +273,11 @@ impl<R: ReconstructingReducer> ReconstructingDirector<R> {
|
||||
arguments.push(self.reduce_expression(argument)?);
|
||||
}
|
||||
|
||||
self.reducer.reduce_call(call, function, arguments, self.in_circuit)
|
||||
self.reducer.reduce_call(call, function, arguments)
|
||||
}
|
||||
|
||||
// Statements
|
||||
pub fn reduce_statement(&mut self, statement: &Statement) -> Result<Statement, CanonicalizeError> {
|
||||
pub fn reduce_statement(&mut self, statement: &Statement) -> Result<Statement, ReducerError> {
|
||||
let new = match statement {
|
||||
Statement::Return(return_statement) => Statement::Return(self.reduce_return(&return_statement)?),
|
||||
Statement::Definition(definition) => Statement::Definition(self.reduce_definition(&definition)?),
|
||||
@ -305,26 +289,22 @@ impl<R: ReconstructingReducer> ReconstructingDirector<R> {
|
||||
Statement::Block(block) => Statement::Block(self.reduce_block(&block)?),
|
||||
};
|
||||
|
||||
self.reducer.reduce_statement(statement, new, self.in_circuit)
|
||||
self.reducer.reduce_statement(statement, new)
|
||||
}
|
||||
|
||||
pub fn reduce_return(&mut self, return_statement: &ReturnStatement) -> Result<ReturnStatement, CanonicalizeError> {
|
||||
pub fn reduce_return(&mut self, return_statement: &ReturnStatement) -> Result<ReturnStatement, ReducerError> {
|
||||
let expression = self.reduce_expression(&return_statement.expression)?;
|
||||
|
||||
self.reducer
|
||||
.reduce_return(return_statement, expression, self.in_circuit)
|
||||
self.reducer.reduce_return(return_statement, expression)
|
||||
}
|
||||
|
||||
pub fn reduce_variable_name(&mut self, variable_name: &VariableName) -> Result<VariableName, CanonicalizeError> {
|
||||
pub fn reduce_variable_name(&mut self, variable_name: &VariableName) -> Result<VariableName, ReducerError> {
|
||||
let identifier = self.reduce_identifier(&variable_name.identifier)?;
|
||||
|
||||
self.reducer.reduce_variable_name(variable_name, identifier)
|
||||
}
|
||||
|
||||
pub fn reduce_definition(
|
||||
&mut self,
|
||||
definition: &DefinitionStatement,
|
||||
) -> Result<DefinitionStatement, CanonicalizeError> {
|
||||
pub fn reduce_definition(&mut self, definition: &DefinitionStatement) -> Result<DefinitionStatement, ReducerError> {
|
||||
let mut variable_names = vec![];
|
||||
for variable_name in definition.variable_names.iter() {
|
||||
variable_names.push(self.reduce_variable_name(variable_name)?);
|
||||
@ -338,11 +318,10 @@ impl<R: ReconstructingReducer> ReconstructingDirector<R> {
|
||||
|
||||
let value = self.reduce_expression(&definition.value)?;
|
||||
|
||||
self.reducer
|
||||
.reduce_definition(definition, variable_names, type_, value, self.in_circuit)
|
||||
self.reducer.reduce_definition(definition, variable_names, type_, value)
|
||||
}
|
||||
|
||||
pub fn reduce_assignee_access(&mut self, access: &AssigneeAccess) -> Result<AssigneeAccess, CanonicalizeError> {
|
||||
pub fn reduce_assignee_access(&mut self, access: &AssigneeAccess) -> Result<AssigneeAccess, ReducerError> {
|
||||
let new = match access {
|
||||
AssigneeAccess::ArrayRange(left, right) => {
|
||||
let left = left.as_ref().map(|left| self.reduce_expression(left)).transpose()?;
|
||||
@ -355,10 +334,10 @@ impl<R: ReconstructingReducer> ReconstructingDirector<R> {
|
||||
_ => access.clone(),
|
||||
};
|
||||
|
||||
self.reducer.reduce_assignee_access(access, new, self.in_circuit)
|
||||
self.reducer.reduce_assignee_access(access, new)
|
||||
}
|
||||
|
||||
pub fn reduce_assignee(&mut self, assignee: &Assignee) -> Result<Assignee, CanonicalizeError> {
|
||||
pub fn reduce_assignee(&mut self, assignee: &Assignee) -> Result<Assignee, ReducerError> {
|
||||
let identifier = self.reduce_identifier(&assignee.identifier)?;
|
||||
|
||||
let mut accesses = vec![];
|
||||
@ -366,21 +345,20 @@ impl<R: ReconstructingReducer> ReconstructingDirector<R> {
|
||||
accesses.push(self.reduce_assignee_access(access)?);
|
||||
}
|
||||
|
||||
self.reducer
|
||||
.reduce_assignee(assignee, identifier, accesses, self.in_circuit)
|
||||
self.reducer.reduce_assignee(assignee, identifier, accesses)
|
||||
}
|
||||
|
||||
pub fn reduce_assign(&mut self, assign: &AssignStatement) -> Result<AssignStatement, CanonicalizeError> {
|
||||
pub fn reduce_assign(&mut self, assign: &AssignStatement) -> Result<AssignStatement, ReducerError> {
|
||||
let assignee = self.reduce_assignee(&assign.assignee)?;
|
||||
let value = self.reduce_expression(&assign.value)?;
|
||||
|
||||
self.reducer.reduce_assign(assign, assignee, value, self.in_circuit)
|
||||
self.reducer.reduce_assign(assign, assignee, value)
|
||||
}
|
||||
|
||||
pub fn reduce_conditional(
|
||||
&mut self,
|
||||
conditional: &ConditionalStatement,
|
||||
) -> Result<ConditionalStatement, CanonicalizeError> {
|
||||
) -> Result<ConditionalStatement, ReducerError> {
|
||||
let condition = self.reduce_expression(&conditional.condition)?;
|
||||
let block = self.reduce_block(&conditional.block)?;
|
||||
let next = conditional
|
||||
@ -389,27 +367,22 @@ impl<R: ReconstructingReducer> ReconstructingDirector<R> {
|
||||
.map(|condition| self.reduce_statement(condition))
|
||||
.transpose()?;
|
||||
|
||||
self.reducer
|
||||
.reduce_conditional(conditional, condition, block, next, self.in_circuit)
|
||||
self.reducer.reduce_conditional(conditional, condition, block, next)
|
||||
}
|
||||
|
||||
pub fn reduce_iteration(
|
||||
&mut self,
|
||||
iteration: &IterationStatement,
|
||||
) -> Result<IterationStatement, CanonicalizeError> {
|
||||
pub fn reduce_iteration(&mut self, iteration: &IterationStatement) -> Result<IterationStatement, ReducerError> {
|
||||
let variable = self.reduce_identifier(&iteration.variable)?;
|
||||
let start = self.reduce_expression(&iteration.start)?;
|
||||
let stop = self.reduce_expression(&iteration.stop)?;
|
||||
let block = self.reduce_block(&iteration.block)?;
|
||||
|
||||
self.reducer
|
||||
.reduce_iteration(iteration, variable, start, stop, block, self.in_circuit)
|
||||
self.reducer.reduce_iteration(iteration, variable, start, stop, block)
|
||||
}
|
||||
|
||||
pub fn reduce_console(
|
||||
&mut self,
|
||||
console_function_call: &ConsoleStatement,
|
||||
) -> Result<ConsoleStatement, CanonicalizeError> {
|
||||
) -> Result<ConsoleStatement, ReducerError> {
|
||||
let function = match &console_function_call.function {
|
||||
ConsoleFunction::Assert(expression) => ConsoleFunction::Assert(self.reduce_expression(expression)?),
|
||||
ConsoleFunction::Debug(format) | ConsoleFunction::Error(format) | ConsoleFunction::Log(format) => {
|
||||
@ -428,35 +401,33 @@ impl<R: ReconstructingReducer> ReconstructingDirector<R> {
|
||||
ConsoleFunction::Debug(_) => ConsoleFunction::Debug(formatted),
|
||||
ConsoleFunction::Error(_) => ConsoleFunction::Error(formatted),
|
||||
ConsoleFunction::Log(_) => ConsoleFunction::Log(formatted),
|
||||
_ => unimplemented!(), // impossible
|
||||
_ => return Err(ReducerError::impossible_console_assert_call(&format.span)),
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
self.reducer
|
||||
.reduce_console(console_function_call, function, self.in_circuit)
|
||||
self.reducer.reduce_console(console_function_call, function)
|
||||
}
|
||||
|
||||
pub fn reduce_expression_statement(
|
||||
&mut self,
|
||||
expression: &ExpressionStatement,
|
||||
) -> Result<ExpressionStatement, CanonicalizeError> {
|
||||
) -> Result<ExpressionStatement, ReducerError> {
|
||||
let inner_expression = self.reduce_expression(&expression.expression)?;
|
||||
self.reducer
|
||||
.reduce_expression_statement(expression, inner_expression, self.in_circuit)
|
||||
self.reducer.reduce_expression_statement(expression, inner_expression)
|
||||
}
|
||||
|
||||
pub fn reduce_block(&mut self, block: &Block) -> Result<Block, CanonicalizeError> {
|
||||
pub fn reduce_block(&mut self, block: &Block) -> Result<Block, ReducerError> {
|
||||
let mut statements = vec![];
|
||||
for statement in block.statements.iter() {
|
||||
statements.push(self.reduce_statement(statement)?);
|
||||
}
|
||||
|
||||
self.reducer.reduce_block(block, statements, self.in_circuit)
|
||||
self.reducer.reduce_block(block, statements)
|
||||
}
|
||||
|
||||
// Program
|
||||
pub fn reduce_program(&mut self, program: &Program) -> Result<Program, CanonicalizeError> {
|
||||
pub fn reduce_program(&mut self, program: &Program) -> Result<Program, ReducerError> {
|
||||
let mut inputs = vec![];
|
||||
for input in program.expected_input.iter() {
|
||||
inputs.push(self.reduce_function_input(input)?);
|
||||
@ -468,31 +439,37 @@ impl<R: ReconstructingReducer> ReconstructingDirector<R> {
|
||||
}
|
||||
|
||||
let mut circuits = IndexMap::new();
|
||||
self.reducer.swap_in_circuit();
|
||||
for (identifier, circuit) in program.circuits.iter() {
|
||||
circuits.insert(self.reduce_identifier(identifier)?, self.reduce_circuit(circuit)?);
|
||||
}
|
||||
self.reducer.swap_in_circuit();
|
||||
|
||||
let mut functions = IndexMap::new();
|
||||
for (identifier, function) in program.functions.iter() {
|
||||
functions.insert(self.reduce_identifier(identifier)?, self.reduce_function(function)?);
|
||||
}
|
||||
|
||||
let mut global_consts = IndexMap::new();
|
||||
for (name, definition) in program.global_consts.iter() {
|
||||
global_consts.insert(name.clone(), self.reduce_definition(&definition)?);
|
||||
}
|
||||
|
||||
self.reducer
|
||||
.reduce_program(program, inputs, imports, circuits, functions)
|
||||
.reduce_program(program, inputs, imports, circuits, functions, global_consts)
|
||||
}
|
||||
|
||||
pub fn reduce_function_input_variable(
|
||||
&mut self,
|
||||
variable: &FunctionInputVariable,
|
||||
) -> Result<FunctionInputVariable, CanonicalizeError> {
|
||||
) -> Result<FunctionInputVariable, ReducerError> {
|
||||
let identifier = self.reduce_identifier(&variable.identifier)?;
|
||||
let type_ = self.reduce_type(&variable.type_, &variable.span)?;
|
||||
|
||||
self.reducer
|
||||
.reduce_function_input_variable(variable, identifier, type_, self.in_circuit)
|
||||
self.reducer.reduce_function_input_variable(variable, identifier, type_)
|
||||
}
|
||||
|
||||
pub fn reduce_function_input(&mut self, input: &FunctionInput) -> Result<FunctionInput, CanonicalizeError> {
|
||||
pub fn reduce_function_input(&mut self, input: &FunctionInput) -> Result<FunctionInput, ReducerError> {
|
||||
let new = match input {
|
||||
FunctionInput::Variable(function_input_variable) => {
|
||||
FunctionInput::Variable(self.reduce_function_input_variable(function_input_variable)?)
|
||||
@ -500,13 +477,13 @@ impl<R: ReconstructingReducer> ReconstructingDirector<R> {
|
||||
_ => input.clone(),
|
||||
};
|
||||
|
||||
self.reducer.reduce_function_input(input, new, self.in_circuit)
|
||||
self.reducer.reduce_function_input(input, new)
|
||||
}
|
||||
|
||||
pub fn reduce_package_or_packages(
|
||||
&mut self,
|
||||
package_or_packages: &PackageOrPackages,
|
||||
) -> Result<PackageOrPackages, CanonicalizeError> {
|
||||
) -> Result<PackageOrPackages, ReducerError> {
|
||||
let new = match package_or_packages {
|
||||
PackageOrPackages::Package(package) => PackageOrPackages::Package(Package {
|
||||
name: self.reduce_identifier(&package.name)?,
|
||||
@ -523,17 +500,13 @@ impl<R: ReconstructingReducer> ReconstructingDirector<R> {
|
||||
self.reducer.reduce_package_or_packages(package_or_packages, new)
|
||||
}
|
||||
|
||||
pub fn reduce_import(&mut self, import: &ImportStatement) -> Result<ImportStatement, CanonicalizeError> {
|
||||
pub fn reduce_import(&mut self, import: &ImportStatement) -> Result<ImportStatement, ReducerError> {
|
||||
let package_or_packages = self.reduce_package_or_packages(&import.package_or_packages)?;
|
||||
|
||||
self.reducer.reduce_import(import, package_or_packages)
|
||||
}
|
||||
|
||||
pub fn reduce_circuit_member(
|
||||
&mut self,
|
||||
circuit_member: &CircuitMember,
|
||||
) -> Result<CircuitMember, CanonicalizeError> {
|
||||
self.in_circuit = !self.in_circuit;
|
||||
pub fn reduce_circuit_member(&mut self, circuit_member: &CircuitMember) -> Result<CircuitMember, ReducerError> {
|
||||
let new = match circuit_member {
|
||||
CircuitMember::CircuitVariable(identifier, type_) => CircuitMember::CircuitVariable(
|
||||
self.reduce_identifier(&identifier)?,
|
||||
@ -543,12 +516,11 @@ impl<R: ReconstructingReducer> ReconstructingDirector<R> {
|
||||
CircuitMember::CircuitFunction(self.reduce_function(&function)?)
|
||||
}
|
||||
};
|
||||
self.in_circuit = !self.in_circuit;
|
||||
|
||||
self.reducer.reduce_circuit_member(circuit_member, new)
|
||||
}
|
||||
|
||||
pub fn reduce_circuit(&mut self, circuit: &Circuit) -> Result<Circuit, CanonicalizeError> {
|
||||
pub fn reduce_circuit(&mut self, circuit: &Circuit) -> Result<Circuit, ReducerError> {
|
||||
let circuit_name = self.reduce_identifier(&circuit.circuit_name)?;
|
||||
|
||||
let mut members = vec![];
|
||||
@ -559,13 +531,13 @@ impl<R: ReconstructingReducer> ReconstructingDirector<R> {
|
||||
self.reducer.reduce_circuit(circuit, circuit_name, members)
|
||||
}
|
||||
|
||||
fn reduce_annotation(&mut self, annotation: &Annotation) -> Result<Annotation, CanonicalizeError> {
|
||||
fn reduce_annotation(&mut self, annotation: &Annotation) -> Result<Annotation, ReducerError> {
|
||||
let name = self.reduce_identifier(&annotation.name)?;
|
||||
|
||||
self.reducer.reduce_annotation(annotation, name)
|
||||
}
|
||||
|
||||
pub fn reduce_function(&mut self, function: &Function) -> Result<Function, CanonicalizeError> {
|
||||
pub fn reduce_function(&mut self, function: &Function) -> Result<Function, ReducerError> {
|
||||
let identifier = self.reduce_identifier(&function.identifier)?;
|
||||
|
||||
let mut annotations = vec![];
|
||||
@ -586,14 +558,7 @@ impl<R: ReconstructingReducer> ReconstructingDirector<R> {
|
||||
|
||||
let block = self.reduce_block(&function.block)?;
|
||||
|
||||
self.reducer.reduce_function(
|
||||
function,
|
||||
identifier,
|
||||
annotations,
|
||||
inputs,
|
||||
output,
|
||||
block,
|
||||
self.in_circuit,
|
||||
)
|
||||
self.reducer
|
||||
.reduce_function(function, identifier, annotations, inputs, output, block)
|
||||
}
|
||||
}
|
||||
|
@ -20,34 +20,26 @@ use indexmap::IndexMap;
|
||||
// Needed to fix clippy bug.
|
||||
#[allow(clippy::redundant_closure)]
|
||||
pub trait ReconstructingReducer {
|
||||
fn reduce_type(
|
||||
&mut self,
|
||||
_type_: &Type,
|
||||
new: Type,
|
||||
_in_circuit: bool,
|
||||
_span: &Span,
|
||||
) -> Result<Type, CanonicalizeError> {
|
||||
fn in_circuit(&self) -> bool;
|
||||
fn swap_in_circuit(&mut self);
|
||||
|
||||
fn reduce_type(&mut self, _type_: &Type, new: Type, _span: &Span) -> Result<Type, ReducerError> {
|
||||
Ok(new)
|
||||
}
|
||||
|
||||
// Expressions
|
||||
fn reduce_expression(
|
||||
&mut self,
|
||||
_expression: &Expression,
|
||||
new: Expression,
|
||||
_in_circuit: bool,
|
||||
) -> Result<Expression, CanonicalizeError> {
|
||||
fn reduce_expression(&mut self, _expression: &Expression, new: Expression) -> Result<Expression, ReducerError> {
|
||||
Ok(new)
|
||||
}
|
||||
|
||||
fn reduce_identifier(&mut self, identifier: &Identifier) -> Result<Identifier, CanonicalizeError> {
|
||||
fn reduce_identifier(&mut self, identifier: &Identifier) -> Result<Identifier, ReducerError> {
|
||||
Ok(Identifier {
|
||||
name: identifier.name.clone(),
|
||||
span: identifier.span.clone(),
|
||||
})
|
||||
}
|
||||
|
||||
fn reduce_group_tuple(&mut self, group_tuple: &GroupTuple) -> Result<GroupTuple, CanonicalizeError> {
|
||||
fn reduce_group_tuple(&mut self, group_tuple: &GroupTuple) -> Result<GroupTuple, ReducerError> {
|
||||
Ok(GroupTuple {
|
||||
x: group_tuple.x.clone(),
|
||||
y: group_tuple.y.clone(),
|
||||
@ -55,11 +47,7 @@ pub trait ReconstructingReducer {
|
||||
})
|
||||
}
|
||||
|
||||
fn reduce_group_value(
|
||||
&mut self,
|
||||
_group_value: &GroupValue,
|
||||
new: GroupValue,
|
||||
) -> Result<GroupValue, CanonicalizeError> {
|
||||
fn reduce_group_value(&mut self, _group_value: &GroupValue, new: GroupValue) -> Result<GroupValue, ReducerError> {
|
||||
Ok(new)
|
||||
}
|
||||
|
||||
@ -67,7 +55,7 @@ pub trait ReconstructingReducer {
|
||||
&mut self,
|
||||
_value: &ValueExpression,
|
||||
new: ValueExpression,
|
||||
) -> Result<ValueExpression, CanonicalizeError> {
|
||||
) -> Result<ValueExpression, ReducerError> {
|
||||
Ok(new)
|
||||
}
|
||||
|
||||
@ -77,8 +65,7 @@ pub trait ReconstructingReducer {
|
||||
left: Expression,
|
||||
right: Expression,
|
||||
op: BinaryOperation,
|
||||
_in_circuit: bool,
|
||||
) -> Result<BinaryExpression, CanonicalizeError> {
|
||||
) -> Result<BinaryExpression, ReducerError> {
|
||||
Ok(BinaryExpression {
|
||||
left: Box::new(left),
|
||||
right: Box::new(right),
|
||||
@ -92,8 +79,7 @@ pub trait ReconstructingReducer {
|
||||
unary: &UnaryExpression,
|
||||
inner: Expression,
|
||||
op: UnaryOperation,
|
||||
_in_circuit: bool,
|
||||
) -> Result<UnaryExpression, CanonicalizeError> {
|
||||
) -> Result<UnaryExpression, ReducerError> {
|
||||
Ok(UnaryExpression {
|
||||
inner: Box::new(inner),
|
||||
op,
|
||||
@ -107,8 +93,7 @@ pub trait ReconstructingReducer {
|
||||
condition: Expression,
|
||||
if_true: Expression,
|
||||
if_false: Expression,
|
||||
_in_circuit: bool,
|
||||
) -> Result<TernaryExpression, CanonicalizeError> {
|
||||
) -> Result<TernaryExpression, ReducerError> {
|
||||
Ok(TernaryExpression {
|
||||
condition: Box::new(condition),
|
||||
if_true: Box::new(if_true),
|
||||
@ -122,8 +107,7 @@ pub trait ReconstructingReducer {
|
||||
cast: &CastExpression,
|
||||
inner: Expression,
|
||||
target_type: Type,
|
||||
_in_circuit: bool,
|
||||
) -> Result<CastExpression, CanonicalizeError> {
|
||||
) -> Result<CastExpression, ReducerError> {
|
||||
Ok(CastExpression {
|
||||
inner: Box::new(inner),
|
||||
target_type,
|
||||
@ -135,8 +119,7 @@ pub trait ReconstructingReducer {
|
||||
&mut self,
|
||||
array_inline: &ArrayInlineExpression,
|
||||
elements: Vec<SpreadOrExpression>,
|
||||
_in_circuit: bool,
|
||||
) -> Result<ArrayInlineExpression, CanonicalizeError> {
|
||||
) -> Result<ArrayInlineExpression, ReducerError> {
|
||||
Ok(ArrayInlineExpression {
|
||||
elements,
|
||||
span: array_inline.span.clone(),
|
||||
@ -147,8 +130,7 @@ pub trait ReconstructingReducer {
|
||||
&mut self,
|
||||
array_init: &ArrayInitExpression,
|
||||
element: Expression,
|
||||
_in_circuit: bool,
|
||||
) -> Result<ArrayInitExpression, CanonicalizeError> {
|
||||
) -> Result<ArrayInitExpression, ReducerError> {
|
||||
Ok(ArrayInitExpression {
|
||||
element: Box::new(element),
|
||||
dimensions: array_init.dimensions.clone(),
|
||||
@ -161,8 +143,7 @@ pub trait ReconstructingReducer {
|
||||
array_access: &ArrayAccessExpression,
|
||||
array: Expression,
|
||||
index: Expression,
|
||||
_in_circuit: bool,
|
||||
) -> Result<ArrayAccessExpression, CanonicalizeError> {
|
||||
) -> Result<ArrayAccessExpression, ReducerError> {
|
||||
Ok(ArrayAccessExpression {
|
||||
array: Box::new(array),
|
||||
index: Box::new(index),
|
||||
@ -176,8 +157,7 @@ pub trait ReconstructingReducer {
|
||||
array: Expression,
|
||||
left: Option<Expression>,
|
||||
right: Option<Expression>,
|
||||
_in_circuit: bool,
|
||||
) -> Result<ArrayRangeAccessExpression, CanonicalizeError> {
|
||||
) -> Result<ArrayRangeAccessExpression, ReducerError> {
|
||||
Ok(ArrayRangeAccessExpression {
|
||||
array: Box::new(array),
|
||||
left: left.map(|expr| Box::new(expr)),
|
||||
@ -190,8 +170,7 @@ pub trait ReconstructingReducer {
|
||||
&mut self,
|
||||
tuple_init: &TupleInitExpression,
|
||||
elements: Vec<Expression>,
|
||||
_in_circuit: bool,
|
||||
) -> Result<TupleInitExpression, CanonicalizeError> {
|
||||
) -> Result<TupleInitExpression, ReducerError> {
|
||||
Ok(TupleInitExpression {
|
||||
elements,
|
||||
span: tuple_init.span.clone(),
|
||||
@ -202,8 +181,7 @@ pub trait ReconstructingReducer {
|
||||
&mut self,
|
||||
tuple_access: &TupleAccessExpression,
|
||||
tuple: Expression,
|
||||
_in_circuit: bool,
|
||||
) -> Result<TupleAccessExpression, CanonicalizeError> {
|
||||
) -> Result<TupleAccessExpression, ReducerError> {
|
||||
Ok(TupleAccessExpression {
|
||||
tuple: Box::new(tuple),
|
||||
index: tuple_access.index.clone(),
|
||||
@ -216,8 +194,7 @@ pub trait ReconstructingReducer {
|
||||
_variable: &CircuitImpliedVariableDefinition,
|
||||
identifier: Identifier,
|
||||
expression: Option<Expression>,
|
||||
_in_circuit: bool,
|
||||
) -> Result<CircuitImpliedVariableDefinition, CanonicalizeError> {
|
||||
) -> Result<CircuitImpliedVariableDefinition, ReducerError> {
|
||||
Ok(CircuitImpliedVariableDefinition { identifier, expression })
|
||||
}
|
||||
|
||||
@ -226,8 +203,7 @@ pub trait ReconstructingReducer {
|
||||
circuit_init: &CircuitInitExpression,
|
||||
name: Identifier,
|
||||
members: Vec<CircuitImpliedVariableDefinition>,
|
||||
_in_circuit: bool,
|
||||
) -> Result<CircuitInitExpression, CanonicalizeError> {
|
||||
) -> Result<CircuitInitExpression, ReducerError> {
|
||||
Ok(CircuitInitExpression {
|
||||
name,
|
||||
members,
|
||||
@ -240,8 +216,7 @@ pub trait ReconstructingReducer {
|
||||
circuit_member_access: &CircuitMemberAccessExpression,
|
||||
circuit: Expression,
|
||||
name: Identifier,
|
||||
_in_circuit: bool,
|
||||
) -> Result<CircuitMemberAccessExpression, CanonicalizeError> {
|
||||
) -> Result<CircuitMemberAccessExpression, ReducerError> {
|
||||
Ok(CircuitMemberAccessExpression {
|
||||
circuit: Box::new(circuit),
|
||||
name,
|
||||
@ -254,8 +229,7 @@ pub trait ReconstructingReducer {
|
||||
circuit_static_fn_access: &CircuitStaticFunctionAccessExpression,
|
||||
circuit: Expression,
|
||||
name: Identifier,
|
||||
_in_circuit: bool,
|
||||
) -> Result<CircuitStaticFunctionAccessExpression, CanonicalizeError> {
|
||||
) -> Result<CircuitStaticFunctionAccessExpression, ReducerError> {
|
||||
Ok(CircuitStaticFunctionAccessExpression {
|
||||
circuit: Box::new(circuit),
|
||||
name,
|
||||
@ -268,8 +242,7 @@ pub trait ReconstructingReducer {
|
||||
call: &CallExpression,
|
||||
function: Expression,
|
||||
arguments: Vec<Expression>,
|
||||
_in_circuit: bool,
|
||||
) -> Result<CallExpression, CanonicalizeError> {
|
||||
) -> Result<CallExpression, ReducerError> {
|
||||
Ok(CallExpression {
|
||||
function: Box::new(function),
|
||||
arguments,
|
||||
@ -278,12 +251,7 @@ pub trait ReconstructingReducer {
|
||||
}
|
||||
|
||||
// Statements
|
||||
fn reduce_statement(
|
||||
&mut self,
|
||||
_statement: &Statement,
|
||||
new: Statement,
|
||||
_in_circuit: bool,
|
||||
) -> Result<Statement, CanonicalizeError> {
|
||||
fn reduce_statement(&mut self, _statement: &Statement, new: Statement) -> Result<Statement, ReducerError> {
|
||||
Ok(new)
|
||||
}
|
||||
|
||||
@ -291,8 +259,7 @@ pub trait ReconstructingReducer {
|
||||
&mut self,
|
||||
return_statement: &ReturnStatement,
|
||||
expression: Expression,
|
||||
_in_circuit: bool,
|
||||
) -> Result<ReturnStatement, CanonicalizeError> {
|
||||
) -> Result<ReturnStatement, ReducerError> {
|
||||
Ok(ReturnStatement {
|
||||
expression,
|
||||
span: return_statement.span.clone(),
|
||||
@ -303,7 +270,7 @@ pub trait ReconstructingReducer {
|
||||
&mut self,
|
||||
variable_name: &VariableName,
|
||||
identifier: Identifier,
|
||||
) -> Result<VariableName, CanonicalizeError> {
|
||||
) -> Result<VariableName, ReducerError> {
|
||||
Ok(VariableName {
|
||||
mutable: variable_name.mutable,
|
||||
identifier,
|
||||
@ -317,8 +284,7 @@ pub trait ReconstructingReducer {
|
||||
variable_names: Vec<VariableName>,
|
||||
type_: Option<Type>,
|
||||
value: Expression,
|
||||
_in_circuit: bool,
|
||||
) -> Result<DefinitionStatement, CanonicalizeError> {
|
||||
) -> Result<DefinitionStatement, ReducerError> {
|
||||
Ok(DefinitionStatement {
|
||||
declaration_type: definition.declaration_type.clone(),
|
||||
variable_names,
|
||||
@ -332,8 +298,7 @@ pub trait ReconstructingReducer {
|
||||
&mut self,
|
||||
_access: &AssigneeAccess,
|
||||
new: AssigneeAccess,
|
||||
_in_circuit: bool,
|
||||
) -> Result<AssigneeAccess, CanonicalizeError> {
|
||||
) -> Result<AssigneeAccess, ReducerError> {
|
||||
Ok(new)
|
||||
}
|
||||
|
||||
@ -342,8 +307,7 @@ pub trait ReconstructingReducer {
|
||||
assignee: &Assignee,
|
||||
identifier: Identifier,
|
||||
accesses: Vec<AssigneeAccess>,
|
||||
_in_circuit: bool,
|
||||
) -> Result<Assignee, CanonicalizeError> {
|
||||
) -> Result<Assignee, ReducerError> {
|
||||
Ok(Assignee {
|
||||
identifier,
|
||||
accesses,
|
||||
@ -356,8 +320,7 @@ pub trait ReconstructingReducer {
|
||||
assign: &AssignStatement,
|
||||
assignee: Assignee,
|
||||
value: Expression,
|
||||
_in_circuit: bool,
|
||||
) -> Result<AssignStatement, CanonicalizeError> {
|
||||
) -> Result<AssignStatement, ReducerError> {
|
||||
Ok(AssignStatement {
|
||||
operation: assign.operation.clone(),
|
||||
assignee,
|
||||
@ -372,8 +335,7 @@ pub trait ReconstructingReducer {
|
||||
condition: Expression,
|
||||
block: Block,
|
||||
statement: Option<Statement>,
|
||||
_in_circuit: bool,
|
||||
) -> Result<ConditionalStatement, CanonicalizeError> {
|
||||
) -> Result<ConditionalStatement, ReducerError> {
|
||||
Ok(ConditionalStatement {
|
||||
condition,
|
||||
block,
|
||||
@ -389,8 +351,7 @@ pub trait ReconstructingReducer {
|
||||
start: Expression,
|
||||
stop: Expression,
|
||||
block: Block,
|
||||
_in_circuit: bool,
|
||||
) -> Result<IterationStatement, CanonicalizeError> {
|
||||
) -> Result<IterationStatement, ReducerError> {
|
||||
Ok(IterationStatement {
|
||||
variable,
|
||||
start,
|
||||
@ -404,8 +365,7 @@ pub trait ReconstructingReducer {
|
||||
&mut self,
|
||||
console: &ConsoleStatement,
|
||||
function: ConsoleFunction,
|
||||
_in_circuit: bool,
|
||||
) -> Result<ConsoleStatement, CanonicalizeError> {
|
||||
) -> Result<ConsoleStatement, ReducerError> {
|
||||
Ok(ConsoleStatement {
|
||||
function,
|
||||
span: console.span.clone(),
|
||||
@ -416,20 +376,14 @@ pub trait ReconstructingReducer {
|
||||
&mut self,
|
||||
expression_statement: &ExpressionStatement,
|
||||
expression: Expression,
|
||||
_in_circuit: bool,
|
||||
) -> Result<ExpressionStatement, CanonicalizeError> {
|
||||
) -> Result<ExpressionStatement, ReducerError> {
|
||||
Ok(ExpressionStatement {
|
||||
expression,
|
||||
span: expression_statement.span.clone(),
|
||||
})
|
||||
}
|
||||
|
||||
fn reduce_block(
|
||||
&mut self,
|
||||
block: &Block,
|
||||
statements: Vec<Statement>,
|
||||
_in_circuit: bool,
|
||||
) -> Result<Block, CanonicalizeError> {
|
||||
fn reduce_block(&mut self, block: &Block, statements: Vec<Statement>) -> Result<Block, ReducerError> {
|
||||
Ok(Block {
|
||||
statements,
|
||||
span: block.span.clone(),
|
||||
@ -444,13 +398,15 @@ pub trait ReconstructingReducer {
|
||||
imports: Vec<ImportStatement>,
|
||||
circuits: IndexMap<Identifier, Circuit>,
|
||||
functions: IndexMap<Identifier, Function>,
|
||||
) -> Result<Program, CanonicalizeError> {
|
||||
global_consts: IndexMap<String, DefinitionStatement>,
|
||||
) -> Result<Program, ReducerError> {
|
||||
Ok(Program {
|
||||
name: program.name.clone(),
|
||||
expected_input,
|
||||
imports,
|
||||
circuits,
|
||||
functions,
|
||||
global_consts,
|
||||
})
|
||||
}
|
||||
|
||||
@ -459,8 +415,7 @@ pub trait ReconstructingReducer {
|
||||
variable: &FunctionInputVariable,
|
||||
identifier: Identifier,
|
||||
type_: Type,
|
||||
_in_circuit: bool,
|
||||
) -> Result<FunctionInputVariable, CanonicalizeError> {
|
||||
) -> Result<FunctionInputVariable, ReducerError> {
|
||||
Ok(FunctionInputVariable {
|
||||
identifier,
|
||||
const_: variable.const_,
|
||||
@ -474,8 +429,7 @@ pub trait ReconstructingReducer {
|
||||
&mut self,
|
||||
_input: &FunctionInput,
|
||||
new: FunctionInput,
|
||||
_in_circuit: bool,
|
||||
) -> Result<FunctionInput, CanonicalizeError> {
|
||||
) -> Result<FunctionInput, ReducerError> {
|
||||
Ok(new)
|
||||
}
|
||||
|
||||
@ -483,7 +437,7 @@ pub trait ReconstructingReducer {
|
||||
&mut self,
|
||||
_package_or_packages: &PackageOrPackages,
|
||||
new: PackageOrPackages,
|
||||
) -> Result<PackageOrPackages, CanonicalizeError> {
|
||||
) -> Result<PackageOrPackages, ReducerError> {
|
||||
Ok(new)
|
||||
}
|
||||
|
||||
@ -491,7 +445,7 @@ pub trait ReconstructingReducer {
|
||||
&mut self,
|
||||
import: &ImportStatement,
|
||||
package_or_packages: PackageOrPackages,
|
||||
) -> Result<ImportStatement, CanonicalizeError> {
|
||||
) -> Result<ImportStatement, ReducerError> {
|
||||
Ok(ImportStatement {
|
||||
package_or_packages,
|
||||
span: import.span.clone(),
|
||||
@ -502,7 +456,7 @@ pub trait ReconstructingReducer {
|
||||
&mut self,
|
||||
_circuit_member: &CircuitMember,
|
||||
new: CircuitMember,
|
||||
) -> Result<CircuitMember, CanonicalizeError> {
|
||||
) -> Result<CircuitMember, ReducerError> {
|
||||
Ok(new)
|
||||
}
|
||||
|
||||
@ -511,15 +465,11 @@ pub trait ReconstructingReducer {
|
||||
_circuit: &Circuit,
|
||||
circuit_name: Identifier,
|
||||
members: Vec<CircuitMember>,
|
||||
) -> Result<Circuit, CanonicalizeError> {
|
||||
) -> Result<Circuit, ReducerError> {
|
||||
Ok(Circuit { circuit_name, members })
|
||||
}
|
||||
|
||||
fn reduce_annotation(
|
||||
&mut self,
|
||||
annotation: &Annotation,
|
||||
name: Identifier,
|
||||
) -> Result<Annotation, CanonicalizeError> {
|
||||
fn reduce_annotation(&mut self, annotation: &Annotation, name: Identifier) -> Result<Annotation, ReducerError> {
|
||||
Ok(Annotation {
|
||||
span: annotation.span.clone(),
|
||||
name,
|
||||
@ -536,8 +486,7 @@ pub trait ReconstructingReducer {
|
||||
input: Vec<FunctionInput>,
|
||||
output: Option<Type>,
|
||||
block: Block,
|
||||
_in_circuit: bool,
|
||||
) -> Result<Function, CanonicalizeError> {
|
||||
) -> Result<Function, ReducerError> {
|
||||
Ok(Function {
|
||||
identifier,
|
||||
annotations,
|
||||
|
@ -53,6 +53,9 @@ version = "1.4.0"
|
||||
path = "../synthesizer"
|
||||
version = "1.4.0"
|
||||
|
||||
[dependencies.tendril]
|
||||
version = "0.4"
|
||||
|
||||
[dependencies.snarkvm-curves]
|
||||
version = "0.2.2"
|
||||
default-features = false
|
||||
|
@ -3,6 +3,7 @@
|
||||
"expected_input": [],
|
||||
"imports": [],
|
||||
"circuits": {},
|
||||
"global_consts": {},
|
||||
"functions": {
|
||||
"{\"name\":\"main\",\"span\":\"{\\\"line_start\\\":1,\\\"line_stop\\\":1,\\\"col_start\\\":10,\\\"col_stop\\\":14,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\"function main(a: [group; (2, 1)]) {\\\"}\"}": {
|
||||
"annotations": [],
|
||||
|
@ -126,6 +126,7 @@
|
||||
]
|
||||
}
|
||||
},
|
||||
"global_consts": {},
|
||||
"functions": {
|
||||
"{\"name\":\"main\",\"span\":\"{\\\"line_start\\\":13,\\\"line_stop\\\":13,\\\"col_start\\\":10,\\\"col_stop\\\":14,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\"function main() {\\\"}\"}": {
|
||||
"annotations": [],
|
||||
|
@ -2,6 +2,7 @@
|
||||
"name": "",
|
||||
"expected_input": [],
|
||||
"imports": [],
|
||||
"global_consts": {},
|
||||
"circuits": {
|
||||
"{\"name\":\"Foo\",\"span\":\"{\\\"line_start\\\":1,\\\"line_stop\\\":1,\\\"col_start\\\":9,\\\"col_stop\\\":12,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\"circuit Foo {\\\"}\"}": {
|
||||
"circuit_name": "{\"name\":\"Foo\",\"span\":\"{\\\"line_start\\\":1,\\\"line_stop\\\":1,\\\"col_start\\\":9,\\\"col_stop\\\":12,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\"circuit Foo {\\\"}\"}",
|
||||
@ -225,7 +226,9 @@
|
||||
}
|
||||
}
|
||||
],
|
||||
"type_": null,
|
||||
"type_": {
|
||||
"IntegerType": "U32"
|
||||
},
|
||||
"value": {
|
||||
"Value": {
|
||||
"Integer": [
|
||||
@ -274,7 +277,8 @@
|
||||
},
|
||||
"right": {
|
||||
"Value": {
|
||||
"Implicit": [
|
||||
"Integer": [
|
||||
"U32",
|
||||
"20",
|
||||
{
|
||||
"line_start": 12,
|
||||
@ -371,7 +375,18 @@
|
||||
}
|
||||
}
|
||||
],
|
||||
"type_": null,
|
||||
"type_": {
|
||||
"Array": [
|
||||
{
|
||||
"IntegerType": "U8"
|
||||
},
|
||||
[
|
||||
{
|
||||
"value": "2"
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"value": {
|
||||
"ArrayInline": {
|
||||
"elements": [
|
||||
@ -441,7 +456,8 @@
|
||||
{
|
||||
"ArrayIndex": {
|
||||
"Value": {
|
||||
"Implicit": [
|
||||
"Integer": [
|
||||
"U32",
|
||||
"0",
|
||||
{
|
||||
"line_start": 16,
|
||||
@ -474,7 +490,8 @@
|
||||
},
|
||||
"index": {
|
||||
"Value": {
|
||||
"Implicit": [
|
||||
"Integer": [
|
||||
"U32",
|
||||
"0",
|
||||
{
|
||||
"line_start": 16,
|
||||
@ -546,7 +563,8 @@
|
||||
},
|
||||
"index": {
|
||||
"Value": {
|
||||
"Implicit": [
|
||||
"Integer": [
|
||||
"U32",
|
||||
"0",
|
||||
{
|
||||
"line_start": 17,
|
||||
@ -624,7 +642,16 @@
|
||||
}
|
||||
}
|
||||
],
|
||||
"type_": null,
|
||||
"type_": {
|
||||
"Tuple": [
|
||||
{
|
||||
"IntegerType": "U8"
|
||||
},
|
||||
{
|
||||
"IntegerType": "U8"
|
||||
}
|
||||
]
|
||||
},
|
||||
"value": {
|
||||
"TupleInit": {
|
||||
"elements": [
|
||||
@ -847,7 +874,9 @@
|
||||
}
|
||||
}
|
||||
],
|
||||
"type_": null,
|
||||
"type_": {
|
||||
"Circuit": "{\"name\":\"Foo\",\"span\":\"{\\\"line_start\\\":1,\\\"line_stop\\\":1,\\\"col_start\\\":9,\\\"col_stop\\\":12,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\"circuit Foo {\\\"}\"}"
|
||||
},
|
||||
"value": {
|
||||
"CircuitInit": {
|
||||
"name": "{\"name\":\"Foo\",\"span\":\"{\\\"line_start\\\":23,\\\"line_stop\\\":23,\\\"col_start\\\":13,\\\"col_stop\\\":16,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\" let foo = Foo { f: 6u8, y: (1u8, 1u8) };\\\"}\"}",
|
||||
@ -1080,18 +1109,245 @@
|
||||
"variable_names": [
|
||||
{
|
||||
"mutable": true,
|
||||
"identifier": "{\"name\":\"a\",\"span\":\"{\\\"line_start\\\":27,\\\"line_stop\\\":27,\\\"col_start\\\":7,\\\"col_stop\\\":8,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\" let a = [[0u8; 1]; 4];\\\"}\"}",
|
||||
"identifier": "{\"name\":\"complex\",\"span\":\"{\\\"line_start\\\":27,\\\"line_stop\\\":27,\\\"col_start\\\":7,\\\"col_stop\\\":14,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\" let complex = 2u8;\\\"}\"}",
|
||||
"span": {
|
||||
"line_start": 27,
|
||||
"line_stop": 27,
|
||||
"col_start": 7,
|
||||
"col_stop": 14,
|
||||
"path": "",
|
||||
"content": " let complex = 2u8;"
|
||||
}
|
||||
}
|
||||
],
|
||||
"type_": {
|
||||
"IntegerType": "U8"
|
||||
},
|
||||
"value": {
|
||||
"Value": {
|
||||
"Integer": [
|
||||
"U8",
|
||||
"2",
|
||||
{
|
||||
"line_start": 27,
|
||||
"line_stop": 27,
|
||||
"col_start": 17,
|
||||
"col_stop": 20,
|
||||
"path": "",
|
||||
"content": " let complex = 2u8;"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"span": {
|
||||
"line_start": 27,
|
||||
"line_stop": 27,
|
||||
"col_start": 3,
|
||||
"col_stop": 20,
|
||||
"path": "",
|
||||
"content": " let complex = 2u8;"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"Assign": {
|
||||
"operation": "Assign",
|
||||
"assignee": {
|
||||
"identifier": "{\"name\":\"complex\",\"span\":\"{\\\"line_start\\\":28,\\\"line_stop\\\":28,\\\"col_start\\\":3,\\\"col_stop\\\":10,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\" complex += 22u8 - 2u8+ 1u8;\\\"}\"}",
|
||||
"accesses": [],
|
||||
"span": {
|
||||
"line_start": 28,
|
||||
"line_stop": 28,
|
||||
"col_start": 3,
|
||||
"col_stop": 10,
|
||||
"path": "",
|
||||
"content": " complex += 22u8 - 2u8+ 1u8;"
|
||||
}
|
||||
},
|
||||
"value": {
|
||||
"Binary": {
|
||||
"left": {
|
||||
"Identifier": "{\"name\":\"complex\",\"span\":\"{\\\"line_start\\\":28,\\\"line_stop\\\":28,\\\"col_start\\\":3,\\\"col_stop\\\":10,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\" complex += 22u8 - 2u8+ 1u8;\\\"}\"}"
|
||||
},
|
||||
"right": {
|
||||
"Binary": {
|
||||
"left": {
|
||||
"Binary": {
|
||||
"left": {
|
||||
"Value": {
|
||||
"Integer": [
|
||||
"U8",
|
||||
"22",
|
||||
{
|
||||
"line_start": 28,
|
||||
"line_stop": 28,
|
||||
"col_start": 14,
|
||||
"col_stop": 18,
|
||||
"path": "",
|
||||
"content": " complex += 22u8 - 2u8+ 1u8;"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"right": {
|
||||
"Value": {
|
||||
"Integer": [
|
||||
"U8",
|
||||
"2",
|
||||
{
|
||||
"line_start": 28,
|
||||
"line_stop": 28,
|
||||
"col_start": 21,
|
||||
"col_stop": 24,
|
||||
"path": "",
|
||||
"content": " complex += 22u8 - 2u8+ 1u8;"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"op": "Sub",
|
||||
"span": {
|
||||
"line_start": 28,
|
||||
"line_stop": 28,
|
||||
"col_start": 14,
|
||||
"col_stop": 24,
|
||||
"path": "",
|
||||
"content": " complex += 22u8 - 2u8+ 1u8;"
|
||||
}
|
||||
}
|
||||
},
|
||||
"right": {
|
||||
"Value": {
|
||||
"Integer": [
|
||||
"U8",
|
||||
"1",
|
||||
{
|
||||
"line_start": 28,
|
||||
"line_stop": 28,
|
||||
"col_start": 26,
|
||||
"col_stop": 29,
|
||||
"path": "",
|
||||
"content": " complex += 22u8 - 2u8+ 1u8;"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"op": "Add",
|
||||
"span": {
|
||||
"line_start": 28,
|
||||
"line_stop": 28,
|
||||
"col_start": 14,
|
||||
"col_stop": 29,
|
||||
"path": "",
|
||||
"content": " complex += 22u8 - 2u8+ 1u8;"
|
||||
}
|
||||
}
|
||||
},
|
||||
"op": "Add",
|
||||
"span": {
|
||||
"line_start": 28,
|
||||
"line_stop": 28,
|
||||
"col_start": 3,
|
||||
"col_stop": 29,
|
||||
"path": "",
|
||||
"content": " complex += 22u8 - 2u8+ 1u8;"
|
||||
}
|
||||
}
|
||||
},
|
||||
"span": {
|
||||
"line_start": 28,
|
||||
"line_stop": 28,
|
||||
"col_start": 3,
|
||||
"col_stop": 29,
|
||||
"path": "",
|
||||
"content": " complex += 22u8 - 2u8+ 1u8;"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"Console": {
|
||||
"function": {
|
||||
"Assert": {
|
||||
"Binary": {
|
||||
"left": {
|
||||
"Identifier": "{\"name\":\"complex\",\"span\":\"{\\\"line_start\\\":29,\\\"line_stop\\\":29,\\\"col_start\\\":18,\\\"col_stop\\\":25,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\" console.assert(complex == 23u8);\\\"}\"}"
|
||||
},
|
||||
"right": {
|
||||
"Value": {
|
||||
"Integer": [
|
||||
"U8",
|
||||
"23",
|
||||
{
|
||||
"line_start": 29,
|
||||
"line_stop": 29,
|
||||
"col_start": 29,
|
||||
"col_stop": 33,
|
||||
"path": "",
|
||||
"content": " console.assert(complex == 23u8);"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"op": "Eq",
|
||||
"span": {
|
||||
"line_start": 29,
|
||||
"line_stop": 29,
|
||||
"col_start": 18,
|
||||
"col_stop": 33,
|
||||
"path": "",
|
||||
"content": " console.assert(complex == 23u8);"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"span": {
|
||||
"line_start": 29,
|
||||
"line_stop": 29,
|
||||
"col_start": 3,
|
||||
"col_stop": 33,
|
||||
"path": "",
|
||||
"content": " console.assert(complex == 23u8);"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"Definition": {
|
||||
"declaration_type": "Let",
|
||||
"variable_names": [
|
||||
{
|
||||
"mutable": true,
|
||||
"identifier": "{\"name\":\"a\",\"span\":\"{\\\"line_start\\\":30,\\\"line_stop\\\":30,\\\"col_start\\\":7,\\\"col_stop\\\":8,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\" let a = [[0u8; 1]; 4];\\\"}\"}",
|
||||
"span": {
|
||||
"line_start": 30,
|
||||
"line_stop": 30,
|
||||
"col_start": 7,
|
||||
"col_stop": 8,
|
||||
"path": "",
|
||||
"content": " let a = [[0u8; 1]; 4];"
|
||||
}
|
||||
}
|
||||
],
|
||||
"type_": null,
|
||||
"type_": {
|
||||
"Array": [
|
||||
{
|
||||
"Array": [
|
||||
{
|
||||
"IntegerType": "U8"
|
||||
},
|
||||
[
|
||||
{
|
||||
"value": "1"
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
[
|
||||
{
|
||||
"value": "4"
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"value": {
|
||||
"ArrayInit": {
|
||||
"element": {
|
||||
@ -1102,8 +1358,8 @@
|
||||
"U8",
|
||||
"0",
|
||||
{
|
||||
"line_start": 27,
|
||||
"line_stop": 27,
|
||||
"line_start": 30,
|
||||
"line_stop": 30,
|
||||
"col_start": 13,
|
||||
"col_stop": 16,
|
||||
"path": "",
|
||||
@ -1118,8 +1374,8 @@
|
||||
}
|
||||
],
|
||||
"span": {
|
||||
"line_start": 27,
|
||||
"line_stop": 27,
|
||||
"line_start": 30,
|
||||
"line_stop": 30,
|
||||
"col_start": 12,
|
||||
"col_stop": 20,
|
||||
"path": "",
|
||||
@ -1133,8 +1389,8 @@
|
||||
}
|
||||
],
|
||||
"span": {
|
||||
"line_start": 27,
|
||||
"line_stop": 27,
|
||||
"line_start": 30,
|
||||
"line_stop": 30,
|
||||
"col_start": 11,
|
||||
"col_stop": 24,
|
||||
"path": "",
|
||||
@ -1143,8 +1399,8 @@
|
||||
}
|
||||
},
|
||||
"span": {
|
||||
"line_start": 27,
|
||||
"line_stop": 27,
|
||||
"line_start": 30,
|
||||
"line_stop": 30,
|
||||
"col_start": 3,
|
||||
"col_stop": 24,
|
||||
"path": "",
|
||||
@ -1156,16 +1412,17 @@
|
||||
"Assign": {
|
||||
"operation": "Assign",
|
||||
"assignee": {
|
||||
"identifier": "{\"name\":\"a\",\"span\":\"{\\\"line_start\\\":28,\\\"line_stop\\\":28,\\\"col_start\\\":3,\\\"col_stop\\\":4,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\" a[2][0] += 1u8;\\\"}\"}",
|
||||
"identifier": "{\"name\":\"a\",\"span\":\"{\\\"line_start\\\":31,\\\"line_stop\\\":31,\\\"col_start\\\":3,\\\"col_stop\\\":4,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\" a[2][0] += 1u8;\\\"}\"}",
|
||||
"accesses": [
|
||||
{
|
||||
"ArrayIndex": {
|
||||
"Value": {
|
||||
"Implicit": [
|
||||
"Integer": [
|
||||
"U32",
|
||||
"2",
|
||||
{
|
||||
"line_start": 28,
|
||||
"line_stop": 28,
|
||||
"line_start": 31,
|
||||
"line_stop": 31,
|
||||
"col_start": 5,
|
||||
"col_stop": 6,
|
||||
"path": "",
|
||||
@ -1178,11 +1435,12 @@
|
||||
{
|
||||
"ArrayIndex": {
|
||||
"Value": {
|
||||
"Implicit": [
|
||||
"Integer": [
|
||||
"U32",
|
||||
"0",
|
||||
{
|
||||
"line_start": 28,
|
||||
"line_stop": 28,
|
||||
"line_start": 31,
|
||||
"line_stop": 31,
|
||||
"col_start": 8,
|
||||
"col_stop": 9,
|
||||
"path": "",
|
||||
@ -1194,8 +1452,8 @@
|
||||
}
|
||||
],
|
||||
"span": {
|
||||
"line_start": 28,
|
||||
"line_stop": 28,
|
||||
"line_start": 31,
|
||||
"line_stop": 31,
|
||||
"col_start": 3,
|
||||
"col_stop": 10,
|
||||
"path": "",
|
||||
@ -1209,15 +1467,16 @@
|
||||
"array": {
|
||||
"ArrayAccess": {
|
||||
"array": {
|
||||
"Identifier": "{\"name\":\"a\",\"span\":\"{\\\"line_start\\\":28,\\\"line_stop\\\":28,\\\"col_start\\\":3,\\\"col_stop\\\":4,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\" a[2][0] += 1u8;\\\"}\"}"
|
||||
"Identifier": "{\"name\":\"a\",\"span\":\"{\\\"line_start\\\":31,\\\"line_stop\\\":31,\\\"col_start\\\":3,\\\"col_stop\\\":4,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\" a[2][0] += 1u8;\\\"}\"}"
|
||||
},
|
||||
"index": {
|
||||
"Value": {
|
||||
"Implicit": [
|
||||
"Integer": [
|
||||
"U32",
|
||||
"2",
|
||||
{
|
||||
"line_start": 28,
|
||||
"line_stop": 28,
|
||||
"line_start": 31,
|
||||
"line_stop": 31,
|
||||
"col_start": 5,
|
||||
"col_stop": 6,
|
||||
"path": "",
|
||||
@ -1227,8 +1486,8 @@
|
||||
}
|
||||
},
|
||||
"span": {
|
||||
"line_start": 28,
|
||||
"line_stop": 28,
|
||||
"line_start": 31,
|
||||
"line_stop": 31,
|
||||
"col_start": 3,
|
||||
"col_stop": 17,
|
||||
"path": "",
|
||||
@ -1238,11 +1497,12 @@
|
||||
},
|
||||
"index": {
|
||||
"Value": {
|
||||
"Implicit": [
|
||||
"Integer": [
|
||||
"U32",
|
||||
"0",
|
||||
{
|
||||
"line_start": 28,
|
||||
"line_stop": 28,
|
||||
"line_start": 31,
|
||||
"line_stop": 31,
|
||||
"col_start": 8,
|
||||
"col_stop": 9,
|
||||
"path": "",
|
||||
@ -1252,8 +1512,8 @@
|
||||
}
|
||||
},
|
||||
"span": {
|
||||
"line_start": 28,
|
||||
"line_stop": 28,
|
||||
"line_start": 31,
|
||||
"line_stop": 31,
|
||||
"col_start": 3,
|
||||
"col_stop": 17,
|
||||
"path": "",
|
||||
@ -1267,8 +1527,8 @@
|
||||
"U8",
|
||||
"1",
|
||||
{
|
||||
"line_start": 28,
|
||||
"line_stop": 28,
|
||||
"line_start": 31,
|
||||
"line_stop": 31,
|
||||
"col_start": 14,
|
||||
"col_stop": 17,
|
||||
"path": "",
|
||||
@ -1279,8 +1539,8 @@
|
||||
},
|
||||
"op": "Add",
|
||||
"span": {
|
||||
"line_start": 28,
|
||||
"line_stop": 28,
|
||||
"line_start": 31,
|
||||
"line_stop": 31,
|
||||
"col_start": 3,
|
||||
"col_stop": 17,
|
||||
"path": "",
|
||||
@ -1289,8 +1549,8 @@
|
||||
}
|
||||
},
|
||||
"span": {
|
||||
"line_start": 28,
|
||||
"line_stop": 28,
|
||||
"line_start": 31,
|
||||
"line_stop": 31,
|
||||
"col_start": 3,
|
||||
"col_stop": 17,
|
||||
"path": "",
|
||||
@ -1308,15 +1568,16 @@
|
||||
"array": {
|
||||
"ArrayAccess": {
|
||||
"array": {
|
||||
"Identifier": "{\"name\":\"a\",\"span\":\"{\\\"line_start\\\":29,\\\"line_stop\\\":29,\\\"col_start\\\":18,\\\"col_stop\\\":19,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\" console.assert(a[2][0] == 1u8);\\\"}\"}"
|
||||
"Identifier": "{\"name\":\"a\",\"span\":\"{\\\"line_start\\\":32,\\\"line_stop\\\":32,\\\"col_start\\\":18,\\\"col_stop\\\":19,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\" console.assert(a[2][0] == 1u8);\\\"}\"}"
|
||||
},
|
||||
"index": {
|
||||
"Value": {
|
||||
"Implicit": [
|
||||
"Integer": [
|
||||
"U32",
|
||||
"2",
|
||||
{
|
||||
"line_start": 29,
|
||||
"line_stop": 29,
|
||||
"line_start": 32,
|
||||
"line_stop": 32,
|
||||
"col_start": 20,
|
||||
"col_stop": 21,
|
||||
"path": "",
|
||||
@ -1326,8 +1587,8 @@
|
||||
}
|
||||
},
|
||||
"span": {
|
||||
"line_start": 29,
|
||||
"line_stop": 29,
|
||||
"line_start": 32,
|
||||
"line_stop": 32,
|
||||
"col_start": 18,
|
||||
"col_stop": 22,
|
||||
"path": "",
|
||||
@ -1337,11 +1598,12 @@
|
||||
},
|
||||
"index": {
|
||||
"Value": {
|
||||
"Implicit": [
|
||||
"Integer": [
|
||||
"U32",
|
||||
"0",
|
||||
{
|
||||
"line_start": 29,
|
||||
"line_stop": 29,
|
||||
"line_start": 32,
|
||||
"line_stop": 32,
|
||||
"col_start": 23,
|
||||
"col_stop": 24,
|
||||
"path": "",
|
||||
@ -1351,8 +1613,8 @@
|
||||
}
|
||||
},
|
||||
"span": {
|
||||
"line_start": 29,
|
||||
"line_stop": 29,
|
||||
"line_start": 32,
|
||||
"line_stop": 32,
|
||||
"col_start": 18,
|
||||
"col_stop": 25,
|
||||
"path": "",
|
||||
@ -1366,8 +1628,8 @@
|
||||
"U8",
|
||||
"1",
|
||||
{
|
||||
"line_start": 29,
|
||||
"line_stop": 29,
|
||||
"line_start": 32,
|
||||
"line_stop": 32,
|
||||
"col_start": 29,
|
||||
"col_stop": 32,
|
||||
"path": "",
|
||||
@ -1378,8 +1640,8 @@
|
||||
},
|
||||
"op": "Eq",
|
||||
"span": {
|
||||
"line_start": 29,
|
||||
"line_stop": 29,
|
||||
"line_start": 32,
|
||||
"line_stop": 32,
|
||||
"col_start": 18,
|
||||
"col_stop": 32,
|
||||
"path": "",
|
||||
@ -1389,8 +1651,8 @@
|
||||
}
|
||||
},
|
||||
"span": {
|
||||
"line_start": 29,
|
||||
"line_stop": 29,
|
||||
"line_start": 32,
|
||||
"line_stop": 32,
|
||||
"col_start": 3,
|
||||
"col_stop": 32,
|
||||
"path": "",
|
||||
@ -1404,10 +1666,10 @@
|
||||
"variable_names": [
|
||||
{
|
||||
"mutable": true,
|
||||
"identifier": "{\"name\":\"b\",\"span\":\"{\\\"line_start\\\":31,\\\"line_stop\\\":31,\\\"col_start\\\":7,\\\"col_stop\\\":8,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\" let b = [0u8; (4, 1)];\\\"}\"}",
|
||||
"identifier": "{\"name\":\"b\",\"span\":\"{\\\"line_start\\\":34,\\\"line_stop\\\":34,\\\"col_start\\\":7,\\\"col_stop\\\":8,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\" let b = [0u8; (4, 1)];\\\"}\"}",
|
||||
"span": {
|
||||
"line_start": 31,
|
||||
"line_stop": 31,
|
||||
"line_start": 34,
|
||||
"line_stop": 34,
|
||||
"col_start": 7,
|
||||
"col_stop": 8,
|
||||
"path": "",
|
||||
@ -1415,7 +1677,27 @@
|
||||
}
|
||||
}
|
||||
],
|
||||
"type_": null,
|
||||
"type_": {
|
||||
"Array": [
|
||||
{
|
||||
"Array": [
|
||||
{
|
||||
"IntegerType": "U8"
|
||||
},
|
||||
[
|
||||
{
|
||||
"value": "1"
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
[
|
||||
{
|
||||
"value": "4"
|
||||
}
|
||||
]
|
||||
]
|
||||
},
|
||||
"value": {
|
||||
"ArrayInit": {
|
||||
"element": {
|
||||
@ -1426,8 +1708,8 @@
|
||||
"U8",
|
||||
"0",
|
||||
{
|
||||
"line_start": 31,
|
||||
"line_stop": 31,
|
||||
"line_start": 34,
|
||||
"line_stop": 34,
|
||||
"col_start": 12,
|
||||
"col_stop": 15,
|
||||
"path": "",
|
||||
@ -1442,8 +1724,8 @@
|
||||
}
|
||||
],
|
||||
"span": {
|
||||
"line_start": 31,
|
||||
"line_stop": 31,
|
||||
"line_start": 34,
|
||||
"line_stop": 34,
|
||||
"col_start": 11,
|
||||
"col_stop": 24,
|
||||
"path": "",
|
||||
@ -1457,8 +1739,8 @@
|
||||
}
|
||||
],
|
||||
"span": {
|
||||
"line_start": 31,
|
||||
"line_stop": 31,
|
||||
"line_start": 34,
|
||||
"line_stop": 34,
|
||||
"col_start": 11,
|
||||
"col_stop": 24,
|
||||
"path": "",
|
||||
@ -1467,8 +1749,8 @@
|
||||
}
|
||||
},
|
||||
"span": {
|
||||
"line_start": 31,
|
||||
"line_stop": 31,
|
||||
"line_start": 34,
|
||||
"line_stop": 34,
|
||||
"col_start": 3,
|
||||
"col_stop": 24,
|
||||
"path": "",
|
||||
@ -1480,16 +1762,17 @@
|
||||
"Assign": {
|
||||
"operation": "Assign",
|
||||
"assignee": {
|
||||
"identifier": "{\"name\":\"b\",\"span\":\"{\\\"line_start\\\":32,\\\"line_stop\\\":32,\\\"col_start\\\":3,\\\"col_stop\\\":4,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\" b[2][0] += 1u8;\\\"}\"}",
|
||||
"identifier": "{\"name\":\"b\",\"span\":\"{\\\"line_start\\\":35,\\\"line_stop\\\":35,\\\"col_start\\\":3,\\\"col_stop\\\":4,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\" b[2][0] += 1u8;\\\"}\"}",
|
||||
"accesses": [
|
||||
{
|
||||
"ArrayIndex": {
|
||||
"Value": {
|
||||
"Implicit": [
|
||||
"Integer": [
|
||||
"U32",
|
||||
"2",
|
||||
{
|
||||
"line_start": 32,
|
||||
"line_stop": 32,
|
||||
"line_start": 35,
|
||||
"line_stop": 35,
|
||||
"col_start": 5,
|
||||
"col_stop": 6,
|
||||
"path": "",
|
||||
@ -1502,11 +1785,12 @@
|
||||
{
|
||||
"ArrayIndex": {
|
||||
"Value": {
|
||||
"Implicit": [
|
||||
"Integer": [
|
||||
"U32",
|
||||
"0",
|
||||
{
|
||||
"line_start": 32,
|
||||
"line_stop": 32,
|
||||
"line_start": 35,
|
||||
"line_stop": 35,
|
||||
"col_start": 8,
|
||||
"col_stop": 9,
|
||||
"path": "",
|
||||
@ -1518,8 +1802,8 @@
|
||||
}
|
||||
],
|
||||
"span": {
|
||||
"line_start": 32,
|
||||
"line_stop": 32,
|
||||
"line_start": 35,
|
||||
"line_stop": 35,
|
||||
"col_start": 3,
|
||||
"col_stop": 10,
|
||||
"path": "",
|
||||
@ -1533,15 +1817,16 @@
|
||||
"array": {
|
||||
"ArrayAccess": {
|
||||
"array": {
|
||||
"Identifier": "{\"name\":\"b\",\"span\":\"{\\\"line_start\\\":32,\\\"line_stop\\\":32,\\\"col_start\\\":3,\\\"col_stop\\\":4,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\" b[2][0] += 1u8;\\\"}\"}"
|
||||
"Identifier": "{\"name\":\"b\",\"span\":\"{\\\"line_start\\\":35,\\\"line_stop\\\":35,\\\"col_start\\\":3,\\\"col_stop\\\":4,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\" b[2][0] += 1u8;\\\"}\"}"
|
||||
},
|
||||
"index": {
|
||||
"Value": {
|
||||
"Implicit": [
|
||||
"Integer": [
|
||||
"U32",
|
||||
"2",
|
||||
{
|
||||
"line_start": 32,
|
||||
"line_stop": 32,
|
||||
"line_start": 35,
|
||||
"line_stop": 35,
|
||||
"col_start": 5,
|
||||
"col_stop": 6,
|
||||
"path": "",
|
||||
@ -1551,8 +1836,8 @@
|
||||
}
|
||||
},
|
||||
"span": {
|
||||
"line_start": 32,
|
||||
"line_stop": 32,
|
||||
"line_start": 35,
|
||||
"line_stop": 35,
|
||||
"col_start": 3,
|
||||
"col_stop": 17,
|
||||
"path": "",
|
||||
@ -1562,11 +1847,12 @@
|
||||
},
|
||||
"index": {
|
||||
"Value": {
|
||||
"Implicit": [
|
||||
"Integer": [
|
||||
"U32",
|
||||
"0",
|
||||
{
|
||||
"line_start": 32,
|
||||
"line_stop": 32,
|
||||
"line_start": 35,
|
||||
"line_stop": 35,
|
||||
"col_start": 8,
|
||||
"col_stop": 9,
|
||||
"path": "",
|
||||
@ -1576,8 +1862,8 @@
|
||||
}
|
||||
},
|
||||
"span": {
|
||||
"line_start": 32,
|
||||
"line_stop": 32,
|
||||
"line_start": 35,
|
||||
"line_stop": 35,
|
||||
"col_start": 3,
|
||||
"col_stop": 17,
|
||||
"path": "",
|
||||
@ -1591,8 +1877,8 @@
|
||||
"U8",
|
||||
"1",
|
||||
{
|
||||
"line_start": 32,
|
||||
"line_stop": 32,
|
||||
"line_start": 35,
|
||||
"line_stop": 35,
|
||||
"col_start": 14,
|
||||
"col_stop": 17,
|
||||
"path": "",
|
||||
@ -1603,8 +1889,8 @@
|
||||
},
|
||||
"op": "Add",
|
||||
"span": {
|
||||
"line_start": 32,
|
||||
"line_stop": 32,
|
||||
"line_start": 35,
|
||||
"line_stop": 35,
|
||||
"col_start": 3,
|
||||
"col_stop": 17,
|
||||
"path": "",
|
||||
@ -1613,8 +1899,8 @@
|
||||
}
|
||||
},
|
||||
"span": {
|
||||
"line_start": 32,
|
||||
"line_stop": 32,
|
||||
"line_start": 35,
|
||||
"line_stop": 35,
|
||||
"col_start": 3,
|
||||
"col_stop": 17,
|
||||
"path": "",
|
||||
@ -1632,15 +1918,16 @@
|
||||
"array": {
|
||||
"ArrayAccess": {
|
||||
"array": {
|
||||
"Identifier": "{\"name\":\"a\",\"span\":\"{\\\"line_start\\\":33,\\\"line_stop\\\":33,\\\"col_start\\\":18,\\\"col_stop\\\":19,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\" console.assert(a[2][0] == 1u8);\\\"}\"}"
|
||||
"Identifier": "{\"name\":\"a\",\"span\":\"{\\\"line_start\\\":36,\\\"line_stop\\\":36,\\\"col_start\\\":18,\\\"col_stop\\\":19,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\" console.assert(a[2][0] == 1u8);\\\"}\"}"
|
||||
},
|
||||
"index": {
|
||||
"Value": {
|
||||
"Implicit": [
|
||||
"Integer": [
|
||||
"U32",
|
||||
"2",
|
||||
{
|
||||
"line_start": 33,
|
||||
"line_stop": 33,
|
||||
"line_start": 36,
|
||||
"line_stop": 36,
|
||||
"col_start": 20,
|
||||
"col_stop": 21,
|
||||
"path": "",
|
||||
@ -1650,8 +1937,8 @@
|
||||
}
|
||||
},
|
||||
"span": {
|
||||
"line_start": 33,
|
||||
"line_stop": 33,
|
||||
"line_start": 36,
|
||||
"line_stop": 36,
|
||||
"col_start": 18,
|
||||
"col_stop": 22,
|
||||
"path": "",
|
||||
@ -1661,11 +1948,12 @@
|
||||
},
|
||||
"index": {
|
||||
"Value": {
|
||||
"Implicit": [
|
||||
"Integer": [
|
||||
"U32",
|
||||
"0",
|
||||
{
|
||||
"line_start": 33,
|
||||
"line_stop": 33,
|
||||
"line_start": 36,
|
||||
"line_stop": 36,
|
||||
"col_start": 23,
|
||||
"col_stop": 24,
|
||||
"path": "",
|
||||
@ -1675,8 +1963,8 @@
|
||||
}
|
||||
},
|
||||
"span": {
|
||||
"line_start": 33,
|
||||
"line_stop": 33,
|
||||
"line_start": 36,
|
||||
"line_stop": 36,
|
||||
"col_start": 18,
|
||||
"col_stop": 25,
|
||||
"path": "",
|
||||
@ -1690,8 +1978,8 @@
|
||||
"U8",
|
||||
"1",
|
||||
{
|
||||
"line_start": 33,
|
||||
"line_stop": 33,
|
||||
"line_start": 36,
|
||||
"line_stop": 36,
|
||||
"col_start": 29,
|
||||
"col_stop": 32,
|
||||
"path": "",
|
||||
@ -1702,8 +1990,8 @@
|
||||
},
|
||||
"op": "Eq",
|
||||
"span": {
|
||||
"line_start": 33,
|
||||
"line_stop": 33,
|
||||
"line_start": 36,
|
||||
"line_stop": 36,
|
||||
"col_start": 18,
|
||||
"col_stop": 32,
|
||||
"path": "",
|
||||
@ -1713,8 +2001,8 @@
|
||||
}
|
||||
},
|
||||
"span": {
|
||||
"line_start": 33,
|
||||
"line_stop": 33,
|
||||
"line_start": 36,
|
||||
"line_stop": 36,
|
||||
"col_start": 3,
|
||||
"col_stop": 32,
|
||||
"path": "",
|
||||
@ -1725,7 +2013,7 @@
|
||||
],
|
||||
"span": {
|
||||
"line_start": 10,
|
||||
"line_stop": 34,
|
||||
"line_stop": 37,
|
||||
"col_start": 17,
|
||||
"col_stop": 2,
|
||||
"path": "",
|
||||
@ -1734,11 +2022,11 @@
|
||||
},
|
||||
"span": {
|
||||
"line_start": 10,
|
||||
"line_stop": 34,
|
||||
"line_stop": 37,
|
||||
"col_start": 1,
|
||||
"col_stop": 2,
|
||||
"path": "",
|
||||
"content": "function main() {\n...\n}\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n"
|
||||
"content": "function main() {\n...\n}\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -24,6 +24,9 @@ function main() {
|
||||
foo.f += 2u8;
|
||||
console.assert(foo.f == 8u8);
|
||||
|
||||
let complex = 2u8;
|
||||
complex += 22u8 - 2u8+ 1u8;
|
||||
console.assert(complex == 23u8);
|
||||
let a = [[0u8; 1]; 4];
|
||||
a[2][0] += 1u8;
|
||||
console.assert(a[2][0] == 1u8);
|
||||
|
@ -0,0 +1,4 @@
|
||||
function main () {
|
||||
let x = [1u32; 5];
|
||||
x[..2] += 1;
|
||||
}
|
@ -87,3 +87,11 @@ fn test_compound_assignment() {
|
||||
|
||||
assert_eq!(expected_ast, ast);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_illegal_array_range_fail() {
|
||||
// Check program is invalid.
|
||||
let program_string = include_str!("illegal_array_range_fail.leo");
|
||||
let program = parse_program(program_string);
|
||||
assert!(program.is_err());
|
||||
}
|
||||
|
@ -28,7 +28,7 @@ fn test_parse_program_from_string() {
|
||||
// Parse program from string with compiler.
|
||||
let program_string = include_str!("main.leo");
|
||||
let context = crate::make_test_context();
|
||||
let mut compiler_no_path = EdwardsTestCompiler::new("".to_string(), PathBuf::new(), PathBuf::new(), context);
|
||||
let mut compiler_no_path = EdwardsTestCompiler::new("".to_string(), PathBuf::new(), PathBuf::new(), context, None);
|
||||
|
||||
compiler_no_path.parse_program_from_string(program_string).unwrap();
|
||||
|
||||
@ -37,7 +37,7 @@ fn test_parse_program_from_string() {
|
||||
local.push(MAIN_FILE_NAME);
|
||||
|
||||
let compiler_with_path =
|
||||
EdwardsTestCompiler::parse_program_without_input("".to_string(), local, PathBuf::new(), context).unwrap();
|
||||
EdwardsTestCompiler::parse_program_without_input("".to_string(), local, PathBuf::new(), context, None).unwrap();
|
||||
|
||||
// Compare output bytes.
|
||||
let expected_output = get_output(compiler_no_path);
|
||||
|
46
compiler/otests/global_consts/global_const_types.leo
Normal file
46
compiler/otests/global_consts/global_const_types.leo
Normal file
@ -0,0 +1,46 @@
|
||||
const basic: u32 = 8;
|
||||
const array: [u8; (3, 2)] = [[0u8; 2]; 3];
|
||||
const tuple = (1u32, 2u32);
|
||||
const (a, b) = (1u32, 2u32);
|
||||
const simple_group: group = 1group;
|
||||
const complex_group = (_, 1)group;
|
||||
const field_test: field = 2;
|
||||
const use_another_const = basic + 1;
|
||||
const foo = Foo { width: 10, height: 20 };
|
||||
const uno = uno();
|
||||
|
||||
circuit Foo {
|
||||
width: u32,
|
||||
height: u32,
|
||||
}
|
||||
|
||||
function uno() -> u32 {
|
||||
return 1u32
|
||||
}
|
||||
|
||||
function main() {
|
||||
// basic test
|
||||
console.assert(basic == 8u32);
|
||||
// array test
|
||||
console.assert(array[0][0] == 0);
|
||||
console.assert(array[2][1] == 0);
|
||||
// tuple test
|
||||
let (x, y) = (1u32, 2u32);
|
||||
console.assert(x == 1u32);
|
||||
console.assert(y == 2u32);
|
||||
// tuple extraction test
|
||||
console.assert(a == 1u32);
|
||||
console.assert(b == 2u32);
|
||||
// group test
|
||||
console.assert(simple_group == 1group);
|
||||
console.assert(complex_group == (_, 1)group);
|
||||
// field test
|
||||
console.assert(field_test == 2field);
|
||||
// use another const test
|
||||
console.assert(use_another_const == 9u32);
|
||||
// circuit test
|
||||
console.assert(foo.width == 10u32);
|
||||
console.assert(foo.height == 20u32);
|
||||
// function test
|
||||
console.assert(uno == 1u32);
|
||||
}
|
36
compiler/otests/global_consts/mod.rs
Normal file
36
compiler/otests/global_consts/mod.rs
Normal file
@ -0,0 +1,36 @@
|
||||
// Copyright (C) 2019-2021 Aleo Systems Inc.
|
||||
// This file is part of the Leo library.
|
||||
|
||||
// The Leo library is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// The Leo library is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
use crate::{assert_satisfied, expect_compiler_error, parse_program};
|
||||
|
||||
|
||||
#[test]
|
||||
fn test_global_consts() {
|
||||
let program_string = include_str!("global_consts.leo");
|
||||
|
||||
let program = parse_program(program_string).unwrap();
|
||||
|
||||
assert_satisfied(program);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_modify_global_const() {
|
||||
let program_string = include_str!("modify_global_const.leo");
|
||||
|
||||
let program = parse_program(program_string).unwrap();
|
||||
|
||||
assert!(parse_program(program_string).is_err());
|
||||
}
|
6
compiler/otests/global_consts/modify_global_const.leo
Normal file
6
compiler/otests/global_consts/modify_global_const.leo
Normal file
@ -0,0 +1,6 @@
|
||||
const basic: u32 = 8;
|
||||
|
||||
function main() {
|
||||
// Cannot re-assign!
|
||||
basic = 2u32;
|
||||
}
|
@ -1,5 +1,7 @@
|
||||
import test-import.foo as bar;
|
||||
import bar.baz.ONE as UNO;
|
||||
|
||||
function main() {
|
||||
console.assert(bar() == 1u32);
|
||||
console.assert(UNO == 1u8);
|
||||
}
|
@ -5,3 +5,5 @@ circuit Baz {
|
||||
circuit Bazzar {
|
||||
a: u32
|
||||
}
|
||||
|
||||
const ONE: u8 = 1;
|
@ -5,7 +5,7 @@ import test-import.( // local import
|
||||
|
||||
import bar.( // imports directory import
|
||||
Bar,
|
||||
baz.(Baz, Bazzar),
|
||||
baz.(Baz, Bazzar, ONE),
|
||||
bat.bat.Bat,
|
||||
);
|
||||
|
||||
@ -23,4 +23,5 @@ function main() {
|
||||
const car = Car { c: 1u32 };
|
||||
|
||||
console.assert(car.c == 1u32);
|
||||
console.assert(ONE == 1u8);
|
||||
}
|
@ -16,4 +16,5 @@ function main() {
|
||||
const car = Car { c: 1u32 };
|
||||
|
||||
console.assert(car.c == 1u32);
|
||||
console.assert(ONE == 1u8);
|
||||
}
|
@ -37,6 +37,7 @@ pub mod mutability;
|
||||
pub mod statements;
|
||||
pub mod syntax;
|
||||
pub mod tuples;
|
||||
pub mod type_inference;
|
||||
|
||||
use leo_asg::{new_alloc_context, new_context, AsgContext};
|
||||
use leo_ast::{InputValue, MainInput};
|
||||
@ -71,7 +72,7 @@ fn new_compiler() -> EdwardsTestCompiler {
|
||||
let path = PathBuf::from("/test/src/main.leo");
|
||||
let output_dir = PathBuf::from(TEST_OUTPUT_DIRECTORY);
|
||||
|
||||
EdwardsTestCompiler::new(program_name, path, output_dir, make_test_context())
|
||||
EdwardsTestCompiler::new(program_name, path, output_dir, make_test_context(), None)
|
||||
}
|
||||
|
||||
pub(crate) fn parse_program(program_string: &str) -> Result<EdwardsTestCompiler, CompilerError> {
|
||||
|
1008
compiler/otests/type_inference/basic.json
Normal file
1008
compiler/otests/type_inference/basic.json
Normal file
File diff suppressed because it is too large
Load Diff
23
compiler/otests/type_inference/basic.leo
Normal file
23
compiler/otests/type_inference/basic.leo
Normal file
@ -0,0 +1,23 @@
|
||||
circuit Foo {}
|
||||
|
||||
function two() -> u8 {
|
||||
return 2u8;
|
||||
}
|
||||
|
||||
const ONE = 1u8;
|
||||
|
||||
function main() {
|
||||
const a = 1u8;
|
||||
const b = 1field;
|
||||
const c = 1group;
|
||||
const d = (0, 1)group;
|
||||
const e = aleo1qnr4dkkvkgfqph0vzc3y6z2eu975wnpz2925ntjccd5cfqxtyu8sta57j8;
|
||||
const f = two();
|
||||
const g = [0u8; (3, 2)];
|
||||
const h = [[0u8; 3]; 2];
|
||||
const i = [1u8, 1u8, 1u8];
|
||||
const j = true;
|
||||
const k = (1u8, 1u8);
|
||||
const l = (1u8, 1u8, true);
|
||||
const m = Foo {};
|
||||
}
|
243
compiler/otests/type_inference/for_loop_and_compound.json
Normal file
243
compiler/otests/type_inference/for_loop_and_compound.json
Normal file
@ -0,0 +1,243 @@
|
||||
{
|
||||
"name": "",
|
||||
"expected_input": [],
|
||||
"imports": [],
|
||||
"circuits": {},
|
||||
"global_consts": {},
|
||||
"functions": {
|
||||
"{\"name\":\"main\",\"span\":\"{\\\"line_start\\\":1,\\\"line_stop\\\":1,\\\"col_start\\\":10,\\\"col_stop\\\":14,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\"function main() {\\\"}\"}": {
|
||||
"annotations": [],
|
||||
"identifier": "{\"name\":\"main\",\"span\":\"{\\\"line_start\\\":1,\\\"line_stop\\\":1,\\\"col_start\\\":10,\\\"col_stop\\\":14,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\"function main() {\\\"}\"}",
|
||||
"input": [],
|
||||
"output": {
|
||||
"Tuple": []
|
||||
},
|
||||
"block": {
|
||||
"statements": [
|
||||
{
|
||||
"Definition": {
|
||||
"declaration_type": "Let",
|
||||
"variable_names": [
|
||||
{
|
||||
"mutable": true,
|
||||
"identifier": "{\"name\":\"x\",\"span\":\"{\\\"line_start\\\":2,\\\"line_stop\\\":2,\\\"col_start\\\":9,\\\"col_stop\\\":10,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\" let x = 10u16;\\\"}\"}",
|
||||
"span": {
|
||||
"line_start": 2,
|
||||
"line_stop": 2,
|
||||
"col_start": 9,
|
||||
"col_stop": 10,
|
||||
"path": "",
|
||||
"content": " let x = 10u16;"
|
||||
}
|
||||
}
|
||||
],
|
||||
"type_": {
|
||||
"IntegerType": "U16"
|
||||
},
|
||||
"value": {
|
||||
"Value": {
|
||||
"Integer": [
|
||||
"U16",
|
||||
"10",
|
||||
{
|
||||
"line_start": 2,
|
||||
"line_stop": 2,
|
||||
"col_start": 13,
|
||||
"col_stop": 18,
|
||||
"path": "",
|
||||
"content": " let x = 10u16;"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"span": {
|
||||
"line_start": 2,
|
||||
"line_stop": 2,
|
||||
"col_start": 5,
|
||||
"col_stop": 18,
|
||||
"path": "",
|
||||
"content": " let x = 10u16;"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"Iteration": {
|
||||
"variable": "{\"name\":\"i\",\"span\":\"{\\\"line_start\\\":3,\\\"line_stop\\\":3,\\\"col_start\\\":9,\\\"col_stop\\\":10,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\" for i in 0..3 {\\\"}\"}",
|
||||
"start": {
|
||||
"Value": {
|
||||
"Integer": [
|
||||
"U32",
|
||||
"0",
|
||||
{
|
||||
"line_start": 3,
|
||||
"line_stop": 3,
|
||||
"col_start": 14,
|
||||
"col_stop": 15,
|
||||
"path": "",
|
||||
"content": " for i in 0..3 {"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"stop": {
|
||||
"Value": {
|
||||
"Integer": [
|
||||
"U32",
|
||||
"3",
|
||||
{
|
||||
"line_start": 3,
|
||||
"line_stop": 3,
|
||||
"col_start": 17,
|
||||
"col_stop": 18,
|
||||
"path": "",
|
||||
"content": " for i in 0..3 {"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"block": {
|
||||
"statements": [
|
||||
{
|
||||
"Assign": {
|
||||
"operation": "Assign",
|
||||
"assignee": {
|
||||
"identifier": "{\"name\":\"x\",\"span\":\"{\\\"line_start\\\":4,\\\"line_stop\\\":4,\\\"col_start\\\":9,\\\"col_stop\\\":10,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\" x -= 1;\\\"}\"}",
|
||||
"accesses": [],
|
||||
"span": {
|
||||
"line_start": 4,
|
||||
"line_stop": 4,
|
||||
"col_start": 9,
|
||||
"col_stop": 10,
|
||||
"path": "",
|
||||
"content": " x -= 1;"
|
||||
}
|
||||
},
|
||||
"value": {
|
||||
"Binary": {
|
||||
"left": {
|
||||
"Identifier": "{\"name\":\"x\",\"span\":\"{\\\"line_start\\\":4,\\\"line_stop\\\":4,\\\"col_start\\\":9,\\\"col_stop\\\":10,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\" x -= 1;\\\"}\"}"
|
||||
},
|
||||
"right": {
|
||||
"Value": {
|
||||
"Integer": [
|
||||
"U16",
|
||||
"1",
|
||||
{
|
||||
"line_start": 4,
|
||||
"line_stop": 4,
|
||||
"col_start": 14,
|
||||
"col_stop": 15,
|
||||
"path": "",
|
||||
"content": " x -= 1;"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"op": "Sub",
|
||||
"span": {
|
||||
"line_start": 4,
|
||||
"line_stop": 4,
|
||||
"col_start": 9,
|
||||
"col_stop": 15,
|
||||
"path": "",
|
||||
"content": " x -= 1;"
|
||||
}
|
||||
}
|
||||
},
|
||||
"span": {
|
||||
"line_start": 4,
|
||||
"line_stop": 4,
|
||||
"col_start": 9,
|
||||
"col_stop": 15,
|
||||
"path": "",
|
||||
"content": " x -= 1;"
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"span": {
|
||||
"line_start": 3,
|
||||
"line_stop": 5,
|
||||
"col_start": 19,
|
||||
"col_stop": 6,
|
||||
"path": "",
|
||||
"content": " for i in 0..3 {\n...\n }"
|
||||
}
|
||||
},
|
||||
"span": {
|
||||
"line_start": 3,
|
||||
"line_stop": 5,
|
||||
"col_start": 5,
|
||||
"col_stop": 6,
|
||||
"path": "",
|
||||
"content": " for i in 0..3 {\n...\n }"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"Console": {
|
||||
"function": {
|
||||
"Assert": {
|
||||
"Binary": {
|
||||
"left": {
|
||||
"Identifier": "{\"name\":\"x\",\"span\":\"{\\\"line_start\\\":6,\\\"line_stop\\\":6,\\\"col_start\\\":20,\\\"col_stop\\\":21,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\" console.assert(x == 7u16);\\\"}\"}"
|
||||
},
|
||||
"right": {
|
||||
"Value": {
|
||||
"Integer": [
|
||||
"U16",
|
||||
"7",
|
||||
{
|
||||
"line_start": 6,
|
||||
"line_stop": 6,
|
||||
"col_start": 25,
|
||||
"col_stop": 29,
|
||||
"path": "",
|
||||
"content": " console.assert(x == 7u16);"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"op": "Eq",
|
||||
"span": {
|
||||
"line_start": 6,
|
||||
"line_stop": 6,
|
||||
"col_start": 20,
|
||||
"col_stop": 29,
|
||||
"path": "",
|
||||
"content": " console.assert(x == 7u16);"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"span": {
|
||||
"line_start": 6,
|
||||
"line_stop": 6,
|
||||
"col_start": 5,
|
||||
"col_stop": 29,
|
||||
"path": "",
|
||||
"content": " console.assert(x == 7u16);"
|
||||
}
|
||||
}
|
||||
}
|
||||
],
|
||||
"span": {
|
||||
"line_start": 1,
|
||||
"line_stop": 7,
|
||||
"col_start": 17,
|
||||
"col_stop": 2,
|
||||
"path": "",
|
||||
"content": "function main() {\n...\n}"
|
||||
}
|
||||
},
|
||||
"span": {
|
||||
"line_start": 1,
|
||||
"line_stop": 7,
|
||||
"col_start": 1,
|
||||
"col_stop": 2,
|
||||
"path": "",
|
||||
"content": "function main() {\n...\n}\n\n\n\n"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
7
compiler/otests/type_inference/for_loop_and_compound.leo
Normal file
7
compiler/otests/type_inference/for_loop_and_compound.leo
Normal file
@ -0,0 +1,7 @@
|
||||
function main() {
|
||||
let x = 10u16;
|
||||
for i in 0..3 {
|
||||
x -= 1;
|
||||
}
|
||||
console.assert(x == 7u16);
|
||||
}
|
85
compiler/otests/type_inference/mod.rs
Normal file
85
compiler/otests/type_inference/mod.rs
Normal file
@ -0,0 +1,85 @@
|
||||
// Copyright (C) 2019-2021 Aleo Systems Inc.
|
||||
// This file is part of the Leo library.
|
||||
|
||||
// The Leo library is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// The Leo library is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
use crate::{assert_satisfied, parse_program};
|
||||
#[allow(unused)]
|
||||
use leo_asg::{new_context, Asg, AsgContext};
|
||||
use leo_ast::Ast;
|
||||
use leo_compiler::TypeInferencePhase;
|
||||
use leo_imports::ImportParser;
|
||||
use leo_parser::parser;
|
||||
|
||||
thread_local! {
|
||||
static THREAD_GLOBAL_CONTEXT: AsgContext<'static> = {
|
||||
let leaked = Box::leak(Box::new(leo_asg::new_alloc_context()));
|
||||
leo_asg::new_context(leaked)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn thread_leaked_context() -> AsgContext<'static> {
|
||||
THREAD_GLOBAL_CONTEXT.with(|f| *f)
|
||||
}
|
||||
|
||||
pub fn parse_program_ast(file_string: &str) -> Ast {
|
||||
const TEST_PROGRAM_PATH: &str = "";
|
||||
let test_program_file_path = std::path::PathBuf::from(TEST_PROGRAM_PATH);
|
||||
|
||||
let mut ast = Ast::new(
|
||||
parser::parse(test_program_file_path.to_str().expect("unwrap fail"), &file_string)
|
||||
.expect("Failed to parse file."),
|
||||
);
|
||||
ast.canonicalize().expect("Failed to canonicalize program.");
|
||||
|
||||
let program = ast.clone().into_repr();
|
||||
let asg = Asg::new(thread_leaked_context(), &program, &mut ImportParser::default())
|
||||
.expect("Failed to create ASG from AST");
|
||||
|
||||
let new_ast = TypeInferencePhase::default()
|
||||
.phase_ast(&program, &asg.into_repr())
|
||||
.expect("Failed to produce type inference ast.");
|
||||
|
||||
new_ast
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_basic() {
|
||||
// Check program is valid.
|
||||
let program_string = include_str!("basic.leo");
|
||||
let program = parse_program(program_string).unwrap();
|
||||
assert_satisfied(program);
|
||||
|
||||
// Check we get expected ast.
|
||||
let ast = parse_program_ast(program_string);
|
||||
let expected_json = include_str!("basic.json");
|
||||
let expected_ast: Ast = Ast::from_json_string(expected_json).expect("Unable to parse json.");
|
||||
|
||||
assert_eq!(expected_ast, ast);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_for_loop_and_compound() {
|
||||
// Check program is valid.
|
||||
let program_string = include_str!("for_loop_and_compound.leo");
|
||||
let program = parse_program(program_string).unwrap();
|
||||
assert_satisfied(program);
|
||||
|
||||
// Check we get expected ast.
|
||||
let ast = parse_program_ast(program_string);
|
||||
let expected_json = include_str!("for_loop_and_compound.json");
|
||||
let expected_ast: Ast = Ast::from_json_string(expected_json).expect("Unable to parse json.");
|
||||
|
||||
assert_eq!(expected_ast, ast);
|
||||
}
|
@ -79,6 +79,7 @@ impl<'a, F: PrimeField, G: GroupType<F>> Compiler<'a, F, G> {
|
||||
main_file_path: PathBuf,
|
||||
output_directory: PathBuf,
|
||||
context: AsgContext<'a>,
|
||||
options: Option<CompilerOptions>,
|
||||
) -> Self {
|
||||
Self {
|
||||
program_name: package_name.clone(),
|
||||
@ -88,7 +89,7 @@ impl<'a, F: PrimeField, G: GroupType<F>> Compiler<'a, F, G> {
|
||||
program_input: Input::new(),
|
||||
asg: None,
|
||||
context,
|
||||
options: CompilerOptions::default(),
|
||||
options: options.unwrap_or_default(),
|
||||
_engine: PhantomData,
|
||||
_group: PhantomData,
|
||||
}
|
||||
@ -106,8 +107,9 @@ impl<'a, F: PrimeField, G: GroupType<F>> Compiler<'a, F, G> {
|
||||
main_file_path: PathBuf,
|
||||
output_directory: PathBuf,
|
||||
context: AsgContext<'a>,
|
||||
options: Option<CompilerOptions>,
|
||||
) -> Result<Self, CompilerError> {
|
||||
let mut compiler = Self::new(package_name, main_file_path, output_directory, context);
|
||||
let mut compiler = Self::new(package_name, main_file_path, output_directory, context, options);
|
||||
|
||||
compiler.parse_program()?;
|
||||
|
||||
@ -136,8 +138,9 @@ impl<'a, F: PrimeField, G: GroupType<F>> Compiler<'a, F, G> {
|
||||
state_string: &str,
|
||||
state_path: &Path,
|
||||
context: AsgContext<'a>,
|
||||
options: Option<CompilerOptions>,
|
||||
) -> Result<Self, CompilerError> {
|
||||
let mut compiler = Self::new(package_name, main_file_path, output_directory, context);
|
||||
let mut compiler = Self::new(package_name, main_file_path, output_directory, context, options);
|
||||
|
||||
compiler.parse_input(input_string, input_path, state_string, state_path)?;
|
||||
|
||||
@ -216,6 +219,7 @@ impl<'a, F: PrimeField, G: GroupType<F>> Compiler<'a, F, G> {
|
||||
// Use the parser to construct the abstract syntax tree (ast).
|
||||
|
||||
let mut ast = parse_ast(self.main_file_path.to_str().unwrap_or_default(), program_string)?;
|
||||
|
||||
// Preform compiler optimization via canonicalizing AST if its enabled.
|
||||
if self.options.canonicalization_enabled {
|
||||
ast.canonicalize()?;
|
||||
@ -228,7 +232,11 @@ impl<'a, F: PrimeField, G: GroupType<F>> Compiler<'a, F, G> {
|
||||
tracing::debug!("Program parsing complete\n{:#?}", self.program);
|
||||
|
||||
// Create a new symbol table from the program, imported_programs, and program_input.
|
||||
let asg = Asg::new(self.context, &self.program, &mut leo_imports::ImportParser::default())?;
|
||||
let asg = Asg::new(
|
||||
self.context,
|
||||
&self.program,
|
||||
&mut leo_imports::ImportParser::new(self.main_file_path.clone()),
|
||||
)?;
|
||||
|
||||
tracing::debug!("ASG generation complete");
|
||||
|
||||
|
@ -33,6 +33,10 @@ pub fn generate_constraints<'a, F: PrimeField, G: GroupType<F>, CS: ConstraintSy
|
||||
) -> Result<Output, CompilerError> {
|
||||
let mut resolved_program = ConstrainedProgram::<F, G>::new(program.clone());
|
||||
|
||||
for (_, global_const) in program.global_consts.iter() {
|
||||
resolved_program.enforce_definition_statement(cs, global_const)?;
|
||||
}
|
||||
|
||||
let main = {
|
||||
let program = program;
|
||||
program.functions.get("main").cloned()
|
||||
@ -85,10 +89,17 @@ pub fn generate_test_constraints<'a, F: PrimeField, G: GroupType<F>>(
|
||||
let input_pair = match input_file {
|
||||
Some(file_id) => {
|
||||
let file_name = file_id.clone();
|
||||
let file_name_kebab = file_name.to_string().replace("_", "-");
|
||||
|
||||
// transform "test_name" into "test-name"
|
||||
output_file_name = file_name.to_string();
|
||||
|
||||
match input.pairs.get(file_name.as_ref()) {
|
||||
// searches for test_input (snake case) or for test-input (kebab case)
|
||||
match input
|
||||
.pairs
|
||||
.get(&file_name_kebab)
|
||||
.or_else(|| input.pairs.get(&file_name_kebab))
|
||||
{
|
||||
Some(pair) => pair.to_owned(),
|
||||
None => return Err(CompilerError::InvalidTestContext(file_name.to_string())),
|
||||
}
|
||||
|
@ -24,7 +24,6 @@ use snarkvm_fields::PrimeField;
|
||||
impl<'a, F: PrimeField, G: GroupType<F>> ConstrainedProgram<'a, F, G> {
|
||||
pub fn store_definition(&mut self, variable: &Variable, value: ConstrainedValue<'a, F, G>) {
|
||||
let variable = variable.borrow();
|
||||
|
||||
self.store(variable.id, value);
|
||||
}
|
||||
}
|
||||
|
@ -14,9 +14,9 @@
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
use crate::errors::FunctionError;
|
||||
use crate::errors::{ExpressionError, FunctionError, ImportError, StatementError};
|
||||
use leo_asg::{AsgConvertError, FormattedError};
|
||||
use leo_ast::{CanonicalizeError, LeoError};
|
||||
use leo_ast::{LeoError, ReducerError};
|
||||
use leo_input::InputParserError;
|
||||
use leo_parser::SyntaxError;
|
||||
use leo_state::LocalDataVerificationError;
|
||||
@ -31,6 +31,12 @@ pub enum CompilerError {
|
||||
#[error("{}", _0)]
|
||||
AsgPassError(FormattedError),
|
||||
|
||||
#[error("{}", _0)]
|
||||
ExpressionError(#[from] ExpressionError),
|
||||
|
||||
#[error("{}", _0)]
|
||||
ImportError(#[from] ImportError),
|
||||
|
||||
#[error("{}", _0)]
|
||||
InputParserError(#[from] InputParserError),
|
||||
|
||||
@ -56,7 +62,10 @@ pub enum CompilerError {
|
||||
AsgConvertError(#[from] AsgConvertError),
|
||||
|
||||
#[error("{}", _0)]
|
||||
CanonicalizeError(#[from] CanonicalizeError),
|
||||
ReducerError(#[from] ReducerError),
|
||||
|
||||
#[error("{}", _0)]
|
||||
StatementError(#[from] StatementError),
|
||||
}
|
||||
|
||||
impl LeoError for CompilerError {}
|
||||
|
@ -26,6 +26,7 @@ impl<'a, F: PrimeField, G: GroupType<F>> ConstrainedProgram<'a, F, G> {
|
||||
pub fn evaluate_ref(&mut self, variable_ref: &VariableRef) -> Result<ConstrainedValue<'a, F, G>, ExpressionError> {
|
||||
// Evaluate the identifier name in the current function scope
|
||||
let variable = variable_ref.variable.borrow();
|
||||
|
||||
let result_value = if let Some(value) = self.get(variable.id) {
|
||||
value.clone()
|
||||
} else {
|
||||
|
@ -57,8 +57,11 @@ pub use prelude::*;
|
||||
pub mod value;
|
||||
pub use value::*;
|
||||
|
||||
pub mod stage;
|
||||
pub use stage::*;
|
||||
pub mod phase;
|
||||
pub use phase::*;
|
||||
|
||||
pub mod phases;
|
||||
pub use phases::*;
|
||||
|
||||
pub mod option;
|
||||
pub use option::*;
|
||||
|
@ -16,6 +16,6 @@
|
||||
|
||||
use leo_asg::Program;
|
||||
|
||||
pub trait ASGStage {
|
||||
pub trait ASGPhase {
|
||||
fn apply(asg: &mut Program);
|
||||
}
|
23
compiler/src/phases/mod.rs
Normal file
23
compiler/src/phases/mod.rs
Normal file
@ -0,0 +1,23 @@
|
||||
// Copyright (C) 2019-2021 Aleo Systems Inc.
|
||||
// This file is part of the Leo library.
|
||||
|
||||
// The Leo library is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// The Leo library is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
//! Compiles a Leo program from a file path.
|
||||
|
||||
pub mod reducing_director;
|
||||
pub use reducing_director::*;
|
||||
|
||||
pub mod phase;
|
||||
pub use phase::*;
|
65
compiler/src/phases/phase.rs
Normal file
65
compiler/src/phases/phase.rs
Normal file
@ -0,0 +1,65 @@
|
||||
// Copyright (C) 2019-2021 Aleo Systems Inc.
|
||||
// This file is part of the Leo library.
|
||||
|
||||
// The Leo library is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// The Leo library is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
//! Compiles a Leo program from a file path.
|
||||
|
||||
use crate::{CombineAstAsgDirector, CombinerOptions};
|
||||
use leo_asg::Program as AsgProgram;
|
||||
use leo_ast::{Ast, Program as AstProgram, ReconstructingReducer, ReducerError};
|
||||
|
||||
macro_rules! phase {
|
||||
($phase_name:ident, $function:item) => {
|
||||
pub struct $phase_name {
|
||||
in_circuit: bool,
|
||||
}
|
||||
|
||||
pub struct Options;
|
||||
|
||||
impl CombinerOptions for Options {
|
||||
$function
|
||||
}
|
||||
|
||||
impl ReconstructingReducer for $phase_name {
|
||||
fn in_circuit(&self) -> bool {
|
||||
self.in_circuit
|
||||
}
|
||||
|
||||
fn swap_in_circuit(&mut self) {
|
||||
self.in_circuit = !self.in_circuit;
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for $phase_name {
|
||||
fn default() -> Self {
|
||||
Self { in_circuit: false }
|
||||
}
|
||||
}
|
||||
|
||||
impl $phase_name {
|
||||
pub fn phase_ast(&self, ast: &AstProgram, asg: &AsgProgram) -> Result<Ast, ReducerError> {
|
||||
Ok(Ast::new(CombineAstAsgDirector::new(Self::default(), Options{})
|
||||
.reduce_program(ast, asg)?))
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
phase!(
|
||||
TypeInferencePhase,
|
||||
fn type_inference_enabled(&self) -> bool {
|
||||
true
|
||||
}
|
||||
);
|
782
compiler/src/phases/reducing_director.rs
Normal file
782
compiler/src/phases/reducing_director.rs
Normal file
@ -0,0 +1,782 @@
|
||||
// Copyright (C) 2019-2021 Aleo Systems Inc.
|
||||
// This file is part of the Leo library.
|
||||
|
||||
// The Leo library is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// The Leo library is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
//! Compiles a Leo program from a file path.
|
||||
|
||||
use indexmap::IndexMap;
|
||||
use leo_asg::{
|
||||
ArrayAccessExpression as AsgArrayAccessExpression,
|
||||
ArrayInitExpression as AsgArrayInitExpression,
|
||||
ArrayInlineExpression as AsgArrayInlineExpression,
|
||||
ArrayRangeAccessExpression as AsgArrayRangeAccessExpression,
|
||||
AssignAccess as AsgAssignAccess,
|
||||
AssignStatement as AsgAssignStatement,
|
||||
BinaryExpression as AsgBinaryExpression,
|
||||
BlockStatement as AsgBlockStatement,
|
||||
CallExpression as AsgCallExpression,
|
||||
CastExpression as AsgCastExpression,
|
||||
Circuit as AsgCircuit,
|
||||
CircuitAccessExpression as AsgCircuitAccessExpression,
|
||||
CircuitInitExpression as AsgCircuitInitExpression,
|
||||
CircuitMember as AsgCircuitMember,
|
||||
ConditionalStatement as AsgConditionalStatement,
|
||||
ConsoleFunction as AsgConsoleFunction,
|
||||
ConsoleStatement as AsgConsoleStatement,
|
||||
ConstValue,
|
||||
Constant as AsgConstant,
|
||||
DefinitionStatement as AsgDefinitionStatement,
|
||||
Expression as AsgExpression,
|
||||
ExpressionStatement as AsgExpressionStatement,
|
||||
Function as AsgFunction,
|
||||
GroupValue as AsgGroupValue,
|
||||
IterationStatement as AsgIterationStatement,
|
||||
ReturnStatement as AsgReturnStatement,
|
||||
Statement as AsgStatement,
|
||||
TernaryExpression as AsgTernaryExpression,
|
||||
TupleAccessExpression as AsgTupleAccessExpression,
|
||||
TupleInitExpression as AsgTupleInitExpression,
|
||||
Type as AsgType,
|
||||
UnaryExpression as AsgUnaryExpression,
|
||||
VariableRef as AsgVariableRef,
|
||||
};
|
||||
use leo_ast::{
|
||||
ArrayAccessExpression as AstArrayAccessExpression,
|
||||
ArrayDimensions,
|
||||
ArrayInitExpression as AstArrayInitExpression,
|
||||
ArrayInlineExpression as AstArrayInlineExpression,
|
||||
ArrayRangeAccessExpression as AstArrayRangeAccessExpression,
|
||||
AssignStatement as AstAssignStatement,
|
||||
Assignee,
|
||||
AssigneeAccess as AstAssignAccess,
|
||||
BinaryExpression as AstBinaryExpression,
|
||||
Block as AstBlockStatement,
|
||||
CallExpression as AstCallExpression,
|
||||
CastExpression as AstCastExpression,
|
||||
Circuit as AstCircuit,
|
||||
CircuitImpliedVariableDefinition,
|
||||
CircuitInitExpression as AstCircuitInitExpression,
|
||||
CircuitMember as AstCircuitMember,
|
||||
CircuitMemberAccessExpression,
|
||||
CircuitStaticFunctionAccessExpression,
|
||||
CombinerError,
|
||||
ConditionalStatement as AstConditionalStatement,
|
||||
ConsoleFunction as AstConsoleFunction,
|
||||
ConsoleStatement as AstConsoleStatement,
|
||||
DefinitionStatement as AstDefinitionStatement,
|
||||
Expression as AstExpression,
|
||||
ExpressionStatement as AstExpressionStatement,
|
||||
FormatString,
|
||||
Function as AstFunction,
|
||||
GroupTuple,
|
||||
GroupValue as AstGroupValue,
|
||||
IterationStatement as AstIterationStatement,
|
||||
PositiveNumber,
|
||||
ReconstructingReducer,
|
||||
ReducerError,
|
||||
ReturnStatement as AstReturnStatement,
|
||||
Span,
|
||||
SpreadOrExpression,
|
||||
Statement as AstStatement,
|
||||
TernaryExpression as AstTernaryExpression,
|
||||
TupleAccessExpression as AstTupleAccessExpression,
|
||||
TupleInitExpression as AstTupleInitExpression,
|
||||
Type as AstType,
|
||||
UnaryExpression as AstUnaryExpression,
|
||||
ValueExpression,
|
||||
};
|
||||
use tendril::StrTendril;
|
||||
|
||||
pub trait CombinerOptions {
|
||||
fn type_inference_enabled(&self) -> bool {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
pub struct CombineAstAsgDirector<R: ReconstructingReducer, O: CombinerOptions> {
|
||||
ast_reducer: R,
|
||||
options: O,
|
||||
}
|
||||
|
||||
impl<R: ReconstructingReducer, O: CombinerOptions> CombineAstAsgDirector<R, O> {
|
||||
pub fn new(ast_reducer: R, options: O) -> Self {
|
||||
Self { ast_reducer, options }
|
||||
}
|
||||
|
||||
pub fn reduce_type(&mut self, ast: &AstType, asg: &AsgType, span: &Span) -> Result<AstType, ReducerError> {
|
||||
let new = match (ast, asg) {
|
||||
(AstType::Array(ast_type, ast_dimensions), AsgType::Array(asg_type, asg_dimensions)) => {
|
||||
if self.options.type_inference_enabled() {
|
||||
AstType::Array(
|
||||
Box::new(self.reduce_type(ast_type, asg_type, span)?),
|
||||
ArrayDimensions(vec![PositiveNumber {
|
||||
value: StrTendril::from(format!("{}", asg_dimensions)),
|
||||
}]),
|
||||
)
|
||||
} else {
|
||||
AstType::Array(
|
||||
Box::new(self.reduce_type(ast_type, asg_type, span)?),
|
||||
ast_dimensions.clone(),
|
||||
)
|
||||
}
|
||||
}
|
||||
(AstType::Tuple(ast_types), AsgType::Tuple(asg_types)) => {
|
||||
let mut reduced_types = vec![];
|
||||
for (ast_type, asg_type) in ast_types.iter().zip(asg_types) {
|
||||
reduced_types.push(self.reduce_type(ast_type, asg_type, span)?);
|
||||
}
|
||||
|
||||
AstType::Tuple(reduced_types)
|
||||
}
|
||||
_ => ast.clone(),
|
||||
};
|
||||
|
||||
self.ast_reducer.reduce_type(ast, new, span)
|
||||
}
|
||||
|
||||
pub fn reduce_expression(
|
||||
&mut self,
|
||||
ast: &AstExpression,
|
||||
asg: &AsgExpression,
|
||||
) -> Result<AstExpression, ReducerError> {
|
||||
let new = match (ast, asg) {
|
||||
(AstExpression::Value(value), AsgExpression::Constant(const_)) => {
|
||||
AstExpression::Value(self.reduce_value(&value, &const_)?)
|
||||
}
|
||||
(AstExpression::Binary(ast), AsgExpression::Binary(asg)) => {
|
||||
AstExpression::Binary(self.reduce_binary(&ast, &asg)?)
|
||||
}
|
||||
(AstExpression::Unary(ast), AsgExpression::Unary(asg)) => {
|
||||
AstExpression::Unary(self.reduce_unary(&ast, &asg)?)
|
||||
}
|
||||
(AstExpression::Ternary(ast), AsgExpression::Ternary(asg)) => {
|
||||
AstExpression::Ternary(self.reduce_ternary(&ast, &asg)?)
|
||||
}
|
||||
(AstExpression::Cast(ast), AsgExpression::Cast(asg)) => AstExpression::Cast(self.reduce_cast(&ast, &asg)?),
|
||||
|
||||
(AstExpression::ArrayInline(ast), AsgExpression::ArrayInline(asg)) => {
|
||||
AstExpression::ArrayInline(self.reduce_array_inline(&ast, &asg)?)
|
||||
}
|
||||
(AstExpression::ArrayInit(ast), AsgExpression::ArrayInit(asg)) => {
|
||||
AstExpression::ArrayInit(self.reduce_array_init(&ast, &asg)?)
|
||||
}
|
||||
(AstExpression::ArrayAccess(ast), AsgExpression::ArrayAccess(asg)) => {
|
||||
AstExpression::ArrayAccess(self.reduce_array_access(&ast, &asg)?)
|
||||
}
|
||||
(AstExpression::ArrayRangeAccess(ast), AsgExpression::ArrayRangeAccess(asg)) => {
|
||||
AstExpression::ArrayRangeAccess(self.reduce_array_range_access(&ast, &asg)?)
|
||||
}
|
||||
|
||||
(AstExpression::TupleInit(ast), AsgExpression::TupleInit(asg)) => {
|
||||
AstExpression::TupleInit(self.reduce_tuple_init(&ast, &asg)?)
|
||||
}
|
||||
(AstExpression::TupleAccess(ast), AsgExpression::TupleAccess(asg)) => {
|
||||
AstExpression::TupleAccess(self.reduce_tuple_access(&ast, &asg)?)
|
||||
}
|
||||
|
||||
(AstExpression::CircuitInit(ast), AsgExpression::CircuitInit(asg)) => {
|
||||
AstExpression::CircuitInit(self.reduce_circuit_init(&ast, &asg)?)
|
||||
}
|
||||
(AstExpression::CircuitMemberAccess(ast), AsgExpression::CircuitAccess(asg)) => {
|
||||
AstExpression::CircuitMemberAccess(self.reduce_circuit_member_access(&ast, &asg)?)
|
||||
}
|
||||
(AstExpression::CircuitStaticFunctionAccess(ast), AsgExpression::CircuitAccess(asg)) => {
|
||||
AstExpression::CircuitStaticFunctionAccess(self.reduce_circuit_static_fn_access(&ast, &asg)?)
|
||||
}
|
||||
|
||||
(AstExpression::Call(ast), AsgExpression::Call(asg)) => AstExpression::Call(self.reduce_call(&ast, &asg)?),
|
||||
_ => ast.clone(),
|
||||
};
|
||||
|
||||
self.ast_reducer.reduce_expression(ast, new)
|
||||
}
|
||||
|
||||
pub fn reduce_array_access(
|
||||
&mut self,
|
||||
ast: &AstArrayAccessExpression,
|
||||
asg: &AsgArrayAccessExpression,
|
||||
) -> Result<AstArrayAccessExpression, ReducerError> {
|
||||
let array = self.reduce_expression(&ast.array, asg.array.get())?;
|
||||
let index = self.reduce_expression(&ast.index, asg.index.get())?;
|
||||
|
||||
self.ast_reducer.reduce_array_access(ast, array, index)
|
||||
}
|
||||
|
||||
pub fn reduce_array_init(
|
||||
&mut self,
|
||||
ast: &AstArrayInitExpression,
|
||||
asg: &AsgArrayInitExpression,
|
||||
) -> Result<AstArrayInitExpression, ReducerError> {
|
||||
let element = self.reduce_expression(&ast.element, asg.element.get())?;
|
||||
|
||||
self.ast_reducer.reduce_array_init(ast, element)
|
||||
}
|
||||
|
||||
pub fn reduce_array_inline(
|
||||
&mut self,
|
||||
ast: &AstArrayInlineExpression,
|
||||
asg: &AsgArrayInlineExpression,
|
||||
) -> Result<AstArrayInlineExpression, ReducerError> {
|
||||
let mut elements = vec![];
|
||||
for (ast_element, asg_element) in ast.elements.iter().zip(asg.elements.iter()) {
|
||||
let reduced_element = match ast_element {
|
||||
SpreadOrExpression::Expression(ast_expression) => {
|
||||
SpreadOrExpression::Expression(self.reduce_expression(ast_expression, asg_element.0.get())?)
|
||||
}
|
||||
SpreadOrExpression::Spread(ast_expression) => {
|
||||
SpreadOrExpression::Spread(self.reduce_expression(ast_expression, asg_element.0.get())?)
|
||||
}
|
||||
};
|
||||
|
||||
elements.push(reduced_element);
|
||||
}
|
||||
|
||||
self.ast_reducer.reduce_array_inline(ast, elements)
|
||||
}
|
||||
|
||||
pub fn reduce_array_range_access(
|
||||
&mut self,
|
||||
ast: &AstArrayRangeAccessExpression,
|
||||
asg: &AsgArrayRangeAccessExpression,
|
||||
) -> Result<AstArrayRangeAccessExpression, ReducerError> {
|
||||
let array = self.reduce_expression(&ast.array, asg.array.get())?;
|
||||
let left = match (ast.left.as_ref(), asg.left.get()) {
|
||||
(Some(ast_left), Some(asg_left)) => Some(self.reduce_expression(ast_left, asg_left)?),
|
||||
_ => None,
|
||||
};
|
||||
let right = match (ast.right.as_ref(), asg.right.get()) {
|
||||
(Some(ast_right), Some(asg_right)) => Some(self.reduce_expression(ast_right, asg_right)?),
|
||||
_ => None,
|
||||
};
|
||||
|
||||
self.ast_reducer.reduce_array_range_access(ast, array, left, right)
|
||||
}
|
||||
|
||||
pub fn reduce_binary(
|
||||
&mut self,
|
||||
ast: &AstBinaryExpression,
|
||||
asg: &AsgBinaryExpression,
|
||||
) -> Result<AstBinaryExpression, ReducerError> {
|
||||
let left = self.reduce_expression(&ast.left, asg.left.get())?;
|
||||
let right = self.reduce_expression(&ast.right, asg.right.get())?;
|
||||
|
||||
self.ast_reducer.reduce_binary(ast, left, right, ast.op.clone())
|
||||
}
|
||||
|
||||
pub fn reduce_call(
|
||||
&mut self,
|
||||
ast: &AstCallExpression,
|
||||
asg: &AsgCallExpression,
|
||||
) -> Result<AstCallExpression, ReducerError> {
|
||||
// TODO FIGURE IT OUT
|
||||
// let function = self.reduce_expression(&ast.function, asg.function.get())?;
|
||||
// let target = asg.target.get().map(|exp| self.reduce_expression())
|
||||
// Is this needed?
|
||||
|
||||
let mut arguments = vec![];
|
||||
for (ast_arg, asg_arg) in ast.arguments.iter().zip(asg.arguments.iter()) {
|
||||
arguments.push(self.reduce_expression(ast_arg, asg_arg.get())?);
|
||||
}
|
||||
|
||||
self.ast_reducer.reduce_call(ast, *ast.function.clone(), arguments)
|
||||
}
|
||||
|
||||
pub fn reduce_cast(
|
||||
&mut self,
|
||||
ast: &AstCastExpression,
|
||||
asg: &AsgCastExpression,
|
||||
) -> Result<AstCastExpression, ReducerError> {
|
||||
let inner = self.reduce_expression(&ast.inner, &asg.inner.get())?;
|
||||
let target_type = self.reduce_type(&ast.target_type, &asg.target_type, &ast.span)?;
|
||||
|
||||
self.ast_reducer.reduce_cast(ast, inner, target_type)
|
||||
}
|
||||
|
||||
pub fn reduce_circuit_member_access(
|
||||
&mut self,
|
||||
ast: &CircuitMemberAccessExpression,
|
||||
_asg: &AsgCircuitAccessExpression,
|
||||
) -> Result<CircuitMemberAccessExpression, ReducerError> {
|
||||
// let circuit = self.reduce_expression(&circuit_member_access.circuit)?;
|
||||
// let name = self.reduce_identifier(&circuit_member_access.name)?;
|
||||
// let target = input.target.get().map(|e| self.reduce_expression(e));
|
||||
|
||||
self.ast_reducer
|
||||
.reduce_circuit_member_access(ast, *ast.circuit.clone(), ast.name.clone())
|
||||
}
|
||||
|
||||
pub fn reduce_circuit_static_fn_access(
|
||||
&mut self,
|
||||
ast: &CircuitStaticFunctionAccessExpression,
|
||||
_asg: &AsgCircuitAccessExpression,
|
||||
) -> Result<CircuitStaticFunctionAccessExpression, ReducerError> {
|
||||
// let circuit = self.reduce_expression(&circuit_member_access.circuit)?;
|
||||
// let name = self.reduce_identifier(&circuit_member_access.name)?;
|
||||
// let target = input.target.get().map(|e| self.reduce_expression(e));
|
||||
|
||||
self.ast_reducer
|
||||
.reduce_circuit_static_fn_access(ast, *ast.circuit.clone(), ast.name.clone())
|
||||
}
|
||||
|
||||
pub fn reduce_circuit_implied_variable_definition(
|
||||
&mut self,
|
||||
ast: &CircuitImpliedVariableDefinition,
|
||||
asg: &AsgExpression,
|
||||
) -> Result<CircuitImpliedVariableDefinition, ReducerError> {
|
||||
let expression = ast
|
||||
.expression
|
||||
.as_ref()
|
||||
.map(|ast_expr| self.reduce_expression(ast_expr, asg))
|
||||
.transpose()?;
|
||||
|
||||
self.ast_reducer
|
||||
.reduce_circuit_implied_variable_definition(ast, ast.identifier.clone(), expression)
|
||||
}
|
||||
|
||||
pub fn reduce_circuit_init(
|
||||
&mut self,
|
||||
ast: &AstCircuitInitExpression,
|
||||
asg: &AsgCircuitInitExpression,
|
||||
) -> Result<AstCircuitInitExpression, ReducerError> {
|
||||
let mut members = vec![];
|
||||
for (ast_member, asg_member) in ast.members.iter().zip(asg.values.iter()) {
|
||||
members.push(self.reduce_circuit_implied_variable_definition(ast_member, asg_member.1.get())?);
|
||||
}
|
||||
|
||||
self.ast_reducer.reduce_circuit_init(ast, ast.name.clone(), members)
|
||||
}
|
||||
|
||||
pub fn reduce_ternary(
|
||||
&mut self,
|
||||
ast: &AstTernaryExpression,
|
||||
asg: &AsgTernaryExpression,
|
||||
) -> Result<AstTernaryExpression, ReducerError> {
|
||||
let condition = self.reduce_expression(&ast.condition, asg.condition.get())?;
|
||||
let if_true = self.reduce_expression(&ast.if_true, asg.if_true.get())?;
|
||||
let if_false = self.reduce_expression(&ast.if_false, asg.if_false.get())?;
|
||||
|
||||
self.ast_reducer.reduce_ternary(ast, condition, if_true, if_false)
|
||||
}
|
||||
|
||||
pub fn reduce_tuple_access(
|
||||
&mut self,
|
||||
ast: &AstTupleAccessExpression,
|
||||
asg: &AsgTupleAccessExpression,
|
||||
) -> Result<AstTupleAccessExpression, ReducerError> {
|
||||
let tuple = self.reduce_expression(&ast.tuple, asg.tuple_ref.get())?;
|
||||
|
||||
self.ast_reducer.reduce_tuple_access(ast, tuple)
|
||||
}
|
||||
|
||||
pub fn reduce_tuple_init(
|
||||
&mut self,
|
||||
ast: &AstTupleInitExpression,
|
||||
asg: &AsgTupleInitExpression,
|
||||
) -> Result<AstTupleInitExpression, ReducerError> {
|
||||
let mut elements = vec![];
|
||||
for (ast_element, asg_element) in ast.elements.iter().zip(asg.elements.iter()) {
|
||||
let element = self.reduce_expression(ast_element, asg_element.get())?;
|
||||
elements.push(element);
|
||||
}
|
||||
|
||||
self.ast_reducer.reduce_tuple_init(ast, elements)
|
||||
}
|
||||
|
||||
pub fn reduce_unary(
|
||||
&mut self,
|
||||
ast: &AstUnaryExpression,
|
||||
asg: &AsgUnaryExpression,
|
||||
) -> Result<AstUnaryExpression, ReducerError> {
|
||||
let inner = self.reduce_expression(&ast.inner, asg.inner.get())?;
|
||||
|
||||
self.ast_reducer.reduce_unary(ast, inner, ast.op.clone())
|
||||
}
|
||||
|
||||
pub fn reduce_value(&mut self, ast: &ValueExpression, asg: &AsgConstant) -> Result<ValueExpression, ReducerError> {
|
||||
let mut new = ast.clone();
|
||||
|
||||
if self.options.type_inference_enabled() {
|
||||
if let ValueExpression::Implicit(tendril, span) = ast {
|
||||
match &asg.value {
|
||||
ConstValue::Int(int) => {
|
||||
new = ValueExpression::Integer(int.get_int_type(), tendril.clone(), span.clone());
|
||||
}
|
||||
ConstValue::Group(group) => {
|
||||
let group_value = match group {
|
||||
AsgGroupValue::Single(_) => AstGroupValue::Single(tendril.clone(), span.clone()),
|
||||
AsgGroupValue::Tuple(x, y) => AstGroupValue::Tuple(GroupTuple {
|
||||
x: x.into(),
|
||||
y: y.into(),
|
||||
span: span.clone(),
|
||||
}),
|
||||
};
|
||||
new = ValueExpression::Group(Box::new(group_value));
|
||||
}
|
||||
ConstValue::Field(_) => {
|
||||
new = ValueExpression::Field(tendril.clone(), span.clone());
|
||||
}
|
||||
ConstValue::Address(_) => {
|
||||
new = ValueExpression::Address(tendril.clone(), span.clone());
|
||||
}
|
||||
ConstValue::Boolean(_) => {
|
||||
new = ValueExpression::Boolean(tendril.clone(), span.clone());
|
||||
}
|
||||
_ => unimplemented!(), // impossible?
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
self.ast_reducer.reduce_value(ast, new)
|
||||
}
|
||||
|
||||
pub fn reduce_variable_ref(
|
||||
&mut self,
|
||||
ast: &ValueExpression,
|
||||
_asg: &AsgVariableRef,
|
||||
) -> Result<ValueExpression, ReducerError> {
|
||||
// TODO FIGURE IT OUT
|
||||
let new = match ast {
|
||||
// ValueExpression::Group(group_value) => {
|
||||
// ValueExpression::Group(Box::new(self.reduce_group_value(&group_value)?))
|
||||
// }
|
||||
_ => ast.clone(),
|
||||
};
|
||||
|
||||
Ok(new)
|
||||
// self.ast_reducer.reduce_value(value, new)
|
||||
}
|
||||
|
||||
pub fn reduce_statement(
|
||||
&mut self,
|
||||
ast_statement: &AstStatement,
|
||||
asg_statement: &AsgStatement,
|
||||
) -> Result<AstStatement, ReducerError> {
|
||||
let new = match (ast_statement, asg_statement) {
|
||||
(AstStatement::Assign(ast), AsgStatement::Assign(asg)) => {
|
||||
AstStatement::Assign(self.reduce_assign(ast, asg)?)
|
||||
}
|
||||
(AstStatement::Block(ast), AsgStatement::Block(asg)) => AstStatement::Block(self.reduce_block(ast, asg)?),
|
||||
(AstStatement::Conditional(ast), AsgStatement::Conditional(asg)) => {
|
||||
AstStatement::Conditional(self.reduce_conditional(ast, asg)?)
|
||||
}
|
||||
(AstStatement::Console(ast), AsgStatement::Console(asg)) => {
|
||||
AstStatement::Console(self.reduce_console(ast, asg)?)
|
||||
}
|
||||
(AstStatement::Definition(ast), AsgStatement::Definition(asg)) => {
|
||||
AstStatement::Definition(self.reduce_definition(ast, asg)?)
|
||||
}
|
||||
(AstStatement::Expression(ast), AsgStatement::Expression(asg)) => {
|
||||
AstStatement::Expression(self.reduce_expression_statement(ast, asg)?)
|
||||
}
|
||||
(AstStatement::Iteration(ast), AsgStatement::Iteration(asg)) => {
|
||||
AstStatement::Iteration(self.reduce_iteration(ast, asg)?)
|
||||
}
|
||||
(AstStatement::Return(ast), AsgStatement::Return(asg)) => {
|
||||
AstStatement::Return(self.reduce_return(ast, asg)?)
|
||||
}
|
||||
_ => ast_statement.clone(),
|
||||
};
|
||||
|
||||
self.ast_reducer.reduce_statement(ast_statement, new)
|
||||
}
|
||||
|
||||
pub fn reduce_assign_access(
|
||||
&mut self,
|
||||
ast: &AstAssignAccess,
|
||||
asg: &AsgAssignAccess,
|
||||
) -> Result<AstAssignAccess, ReducerError> {
|
||||
let new = match (ast, asg) {
|
||||
(AstAssignAccess::ArrayRange(ast_left, ast_right), AsgAssignAccess::ArrayRange(asg_left, asg_right)) => {
|
||||
let left = match (ast_left.as_ref(), asg_left.get()) {
|
||||
(Some(ast_left), Some(asg_left)) => Some(self.reduce_expression(ast_left, asg_left)?),
|
||||
_ => None,
|
||||
};
|
||||
let right = match (ast_right.as_ref(), asg_right.get()) {
|
||||
(Some(ast_right), Some(asg_right)) => Some(self.reduce_expression(ast_right, asg_right)?),
|
||||
_ => None,
|
||||
};
|
||||
|
||||
AstAssignAccess::ArrayRange(left, right)
|
||||
}
|
||||
(AstAssignAccess::ArrayIndex(ast_index), AsgAssignAccess::ArrayIndex(asg_index)) => {
|
||||
let index = self.reduce_expression(&ast_index, asg_index.get())?;
|
||||
AstAssignAccess::ArrayIndex(index)
|
||||
}
|
||||
_ => ast.clone(),
|
||||
};
|
||||
|
||||
self.ast_reducer.reduce_assignee_access(ast, new)
|
||||
}
|
||||
|
||||
pub fn reduce_assignee(&mut self, ast: &Assignee, asg: &[AsgAssignAccess]) -> Result<Assignee, ReducerError> {
|
||||
let mut accesses = vec![];
|
||||
for (ast_access, asg_access) in ast.accesses.iter().zip(asg) {
|
||||
accesses.push(self.reduce_assign_access(ast_access, asg_access)?);
|
||||
}
|
||||
|
||||
self.ast_reducer.reduce_assignee(ast, ast.identifier.clone(), accesses)
|
||||
}
|
||||
|
||||
pub fn reduce_assign(
|
||||
&mut self,
|
||||
ast: &AstAssignStatement,
|
||||
asg: &AsgAssignStatement,
|
||||
) -> Result<AstAssignStatement, ReducerError> {
|
||||
let assignee = self.reduce_assignee(&ast.assignee, &asg.target_accesses)?;
|
||||
let value = self.reduce_expression(&ast.value, asg.value.get())?;
|
||||
|
||||
self.ast_reducer.reduce_assign(ast, assignee, value)
|
||||
}
|
||||
|
||||
pub fn reduce_block(
|
||||
&mut self,
|
||||
ast: &AstBlockStatement,
|
||||
asg: &AsgBlockStatement,
|
||||
) -> Result<AstBlockStatement, ReducerError> {
|
||||
let mut statements = vec![];
|
||||
for (ast_statement, asg_statement) in ast.statements.iter().zip(asg.statements.iter()) {
|
||||
statements.push(self.reduce_statement(ast_statement, asg_statement.get())?);
|
||||
}
|
||||
|
||||
self.ast_reducer.reduce_block(ast, statements)
|
||||
}
|
||||
|
||||
pub fn reduce_conditional(
|
||||
&mut self,
|
||||
ast: &AstConditionalStatement,
|
||||
asg: &AsgConditionalStatement,
|
||||
) -> Result<AstConditionalStatement, ReducerError> {
|
||||
let condition = self.reduce_expression(&ast.condition, asg.condition.get())?;
|
||||
let block;
|
||||
if let AsgStatement::Block(asg_block) = asg.result.get() {
|
||||
block = self.reduce_block(&ast.block, asg_block)?;
|
||||
} else {
|
||||
return Err(ReducerError::from(CombinerError::asg_statement_not_block(
|
||||
&asg.span.as_ref().unwrap(),
|
||||
)));
|
||||
}
|
||||
let next = match (ast.next.as_ref(), asg.next.get()) {
|
||||
(Some(ast_next), Some(asg_next)) => Some(self.reduce_statement(ast_next, asg_next)?),
|
||||
_ => None,
|
||||
};
|
||||
|
||||
self.ast_reducer.reduce_conditional(ast, condition, block, next)
|
||||
}
|
||||
|
||||
pub fn reduce_console(
|
||||
&mut self,
|
||||
ast: &AstConsoleStatement,
|
||||
asg: &AsgConsoleStatement,
|
||||
) -> Result<AstConsoleStatement, ReducerError> {
|
||||
let function = match (&ast.function, &asg.function) {
|
||||
(AstConsoleFunction::Assert(ast_expression), AsgConsoleFunction::Assert(asg_expression)) => {
|
||||
AstConsoleFunction::Assert(self.reduce_expression(&ast_expression, asg_expression.get())?)
|
||||
}
|
||||
(AstConsoleFunction::Debug(ast_format), AsgConsoleFunction::Debug(asg_format))
|
||||
| (AstConsoleFunction::Error(ast_format), AsgConsoleFunction::Error(asg_format))
|
||||
| (AstConsoleFunction::Log(ast_format), AsgConsoleFunction::Log(asg_format)) => {
|
||||
let mut parameters = vec![];
|
||||
for (ast_parameter, asg_parameter) in ast_format.parameters.iter().zip(asg_format.parameters.iter()) {
|
||||
parameters.push(self.reduce_expression(&ast_parameter, asg_parameter.get())?);
|
||||
}
|
||||
|
||||
let formatted = FormatString {
|
||||
parts: ast_format.parts.clone(),
|
||||
parameters,
|
||||
span: ast_format.span.clone(),
|
||||
};
|
||||
|
||||
match &ast.function {
|
||||
AstConsoleFunction::Debug(_) => AstConsoleFunction::Debug(formatted),
|
||||
AstConsoleFunction::Error(_) => AstConsoleFunction::Error(formatted),
|
||||
AstConsoleFunction::Log(_) => AstConsoleFunction::Log(formatted),
|
||||
_ => return Err(ReducerError::impossible_console_assert_call(&ast_format.span)),
|
||||
}
|
||||
}
|
||||
_ => ast.function.clone(),
|
||||
};
|
||||
|
||||
self.ast_reducer.reduce_console(ast, function)
|
||||
}
|
||||
|
||||
pub fn reduce_definition(
|
||||
&mut self,
|
||||
ast: &AstDefinitionStatement,
|
||||
asg: &AsgDefinitionStatement,
|
||||
) -> Result<AstDefinitionStatement, ReducerError> {
|
||||
let type_;
|
||||
|
||||
if asg.variables.len() > 1 {
|
||||
let mut types = vec![];
|
||||
for variable in asg.variables.iter() {
|
||||
types.push(variable.borrow().type_.clone());
|
||||
}
|
||||
|
||||
let asg_type = AsgType::Tuple(types);
|
||||
|
||||
type_ = match &ast.type_ {
|
||||
Some(ast_type) => Some(self.reduce_type(&ast_type, &asg_type, &ast.span)?),
|
||||
None if self.options.type_inference_enabled() => Some((&asg_type).into()),
|
||||
_ => None,
|
||||
};
|
||||
} else {
|
||||
type_ = match &ast.type_ {
|
||||
Some(ast_type) => {
|
||||
Some(self.reduce_type(&ast_type, &asg.variables.first().unwrap().borrow().type_, &ast.span)?)
|
||||
}
|
||||
None if self.options.type_inference_enabled() => {
|
||||
Some((&asg.variables.first().unwrap().borrow().type_).into())
|
||||
}
|
||||
_ => None,
|
||||
};
|
||||
}
|
||||
|
||||
let value = self.reduce_expression(&ast.value, asg.value.get())?;
|
||||
|
||||
self.ast_reducer
|
||||
.reduce_definition(ast, ast.variable_names.clone(), type_, value)
|
||||
}
|
||||
|
||||
pub fn reduce_expression_statement(
|
||||
&mut self,
|
||||
ast: &AstExpressionStatement,
|
||||
asg: &AsgExpressionStatement,
|
||||
) -> Result<AstExpressionStatement, ReducerError> {
|
||||
let inner_expression = self.reduce_expression(&ast.expression, asg.expression.get())?;
|
||||
self.ast_reducer.reduce_expression_statement(ast, inner_expression)
|
||||
}
|
||||
|
||||
pub fn reduce_iteration(
|
||||
&mut self,
|
||||
ast: &AstIterationStatement,
|
||||
asg: &AsgIterationStatement,
|
||||
) -> Result<AstIterationStatement, ReducerError> {
|
||||
let start = self.reduce_expression(&ast.start, asg.start.get())?;
|
||||
let stop = self.reduce_expression(&ast.stop, asg.stop.get())?;
|
||||
let block;
|
||||
if let AsgStatement::Block(asg_block) = asg.body.get() {
|
||||
block = self.reduce_block(&ast.block, asg_block)?;
|
||||
} else {
|
||||
return Err(ReducerError::from(CombinerError::asg_statement_not_block(
|
||||
&asg.span.as_ref().unwrap(),
|
||||
)));
|
||||
}
|
||||
|
||||
self.ast_reducer
|
||||
.reduce_iteration(ast, ast.variable.clone(), start, stop, block)
|
||||
}
|
||||
|
||||
pub fn reduce_return(
|
||||
&mut self,
|
||||
ast: &AstReturnStatement,
|
||||
asg: &AsgReturnStatement,
|
||||
) -> Result<AstReturnStatement, ReducerError> {
|
||||
let expression = self.reduce_expression(&ast.expression, asg.expression.get())?;
|
||||
|
||||
self.ast_reducer.reduce_return(ast, expression)
|
||||
}
|
||||
|
||||
pub fn reduce_program(
|
||||
&mut self,
|
||||
ast: &leo_ast::Program,
|
||||
asg: &leo_asg::Program,
|
||||
) -> Result<leo_ast::Program, leo_ast::ReducerError> {
|
||||
self.ast_reducer.swap_in_circuit();
|
||||
let mut circuits = IndexMap::new();
|
||||
for ((ast_ident, ast_circuit), (_asg_ident, asg_circuit)) in ast.circuits.iter().zip(&asg.circuits) {
|
||||
circuits.insert(ast_ident.clone(), self.reduce_circuit(ast_circuit, asg_circuit)?);
|
||||
}
|
||||
self.ast_reducer.swap_in_circuit();
|
||||
|
||||
let mut functions = IndexMap::new();
|
||||
for ((ast_ident, ast_function), (_asg_ident, asg_function)) in ast.functions.iter().zip(&asg.functions) {
|
||||
functions.insert(ast_ident.clone(), self.reduce_function(ast_function, asg_function)?);
|
||||
}
|
||||
|
||||
let mut global_consts = IndexMap::new();
|
||||
for ((ast_str, ast_definition), (_asg_str, asg_definition)) in ast.global_consts.iter().zip(&asg.global_consts)
|
||||
{
|
||||
global_consts.insert(ast_str.clone(), self.reduce_definition(ast_definition, asg_definition)?);
|
||||
}
|
||||
|
||||
self.ast_reducer.reduce_program(
|
||||
ast,
|
||||
ast.expected_input.clone(),
|
||||
ast.imports.clone(),
|
||||
circuits,
|
||||
functions,
|
||||
global_consts,
|
||||
)
|
||||
}
|
||||
|
||||
pub fn reduce_function(&mut self, ast: &AstFunction, asg: &AsgFunction) -> Result<AstFunction, ReducerError> {
|
||||
let output = ast
|
||||
.output
|
||||
.as_ref()
|
||||
.map(|type_| self.reduce_type(type_, &asg.output, &ast.span))
|
||||
.transpose()?;
|
||||
|
||||
let mut statements = vec![];
|
||||
if let Some(AsgStatement::Block(asg_block)) = asg.body.get() {
|
||||
for (ast_statement, asg_statement) in ast.block.statements.iter().zip(asg_block.statements.iter()) {
|
||||
statements.push(self.reduce_statement(ast_statement, asg_statement.get())?);
|
||||
}
|
||||
}
|
||||
|
||||
let block = AstBlockStatement {
|
||||
statements,
|
||||
span: ast.block.span.clone(),
|
||||
};
|
||||
|
||||
self.ast_reducer.reduce_function(
|
||||
ast,
|
||||
ast.identifier.clone(),
|
||||
ast.annotations.clone(),
|
||||
ast.input.clone(),
|
||||
output,
|
||||
block,
|
||||
)
|
||||
}
|
||||
|
||||
pub fn reduce_circuit_member(
|
||||
&mut self,
|
||||
ast: &AstCircuitMember,
|
||||
asg: &AsgCircuitMember,
|
||||
) -> Result<AstCircuitMember, ReducerError> {
|
||||
let new = match (ast, asg) {
|
||||
(AstCircuitMember::CircuitVariable(identifier, ast_type), AsgCircuitMember::Variable(asg_type)) => {
|
||||
AstCircuitMember::CircuitVariable(
|
||||
identifier.clone(),
|
||||
self.reduce_type(ast_type, asg_type, &identifier.span)?,
|
||||
)
|
||||
}
|
||||
(AstCircuitMember::CircuitFunction(ast_function), AsgCircuitMember::Function(asg_function)) => {
|
||||
AstCircuitMember::CircuitFunction(self.reduce_function(ast_function, asg_function)?)
|
||||
}
|
||||
_ => ast.clone(),
|
||||
};
|
||||
|
||||
self.ast_reducer.reduce_circuit_member(ast, new)
|
||||
}
|
||||
|
||||
pub fn reduce_circuit(&mut self, ast: &AstCircuit, asg: &AsgCircuit) -> Result<AstCircuit, ReducerError> {
|
||||
let mut members = vec![];
|
||||
for (ast_member, asg_member) in ast.members.iter().zip(asg.members.borrow().iter()) {
|
||||
members.push(self.reduce_circuit_member(ast_member, asg_member.1)?);
|
||||
}
|
||||
|
||||
self.ast_reducer.reduce_circuit(ast, ast.circuit_name.clone(), members)
|
||||
}
|
||||
}
|
@ -41,7 +41,7 @@ fn new_compiler() -> EdwardsTestCompiler {
|
||||
let path = PathBuf::from("/test/src/main.leo");
|
||||
let output_dir = PathBuf::from("/output/");
|
||||
|
||||
EdwardsTestCompiler::new(program_name, path, output_dir, make_test_context())
|
||||
EdwardsTestCompiler::new(program_name, path, output_dir, make_test_context(), None)
|
||||
}
|
||||
|
||||
pub(crate) fn parse_program(program_string: &str) -> Result<EdwardsTestCompiler, CompilerError> {
|
||||
|
44
docs/rfc/000-rfc-format.md
Normal file
44
docs/rfc/000-rfc-format.md
Normal file
@ -0,0 +1,44 @@
|
||||
# Summary
|
||||
|
||||
This is an RFC to propose RFC format for Leo language.
|
||||
|
||||
# Motivation
|
||||
|
||||
Leo develops rapidly and changes to the language need to be stated clearly and presented to everyone. RFC is the perfect tool for turning chaos into order.
|
||||
|
||||
# Design
|
||||
|
||||
This section describes proposed solution.
|
||||
|
||||
## Store RFCs inside Leo repository.
|
||||
|
||||
At early stages it is for better to see changes with the code eliminating the need to keep track of a different repository.
|
||||
|
||||
## Use standard PR mechanics for submitting new RFCs
|
||||
|
||||
New RFCs should be submitted as a PRs into Leo repository. PRs should be correctly labeled for easier search. Yet they should not have number unless PR is accepted by leo maintainers.
|
||||
|
||||
## Increase approvals count for RFCs
|
||||
|
||||
RFCs may propose changes affecting multiple systems or projects. They also introduce new changes to the language design or structure. Because of that they should be watched more carefully by as many involved team members as possible. Increasing number of required approvals should help achieve this goal.
|
||||
|
||||
## Format
|
||||
|
||||
For bootstrapping new requests template is made and placed into RFC folder.
|
||||
|
||||
## Number
|
||||
|
||||
RFCs should receive number once they are accepted.
|
||||
|
||||
# Drawbacks
|
||||
|
||||
None.
|
||||
|
||||
# Effect on Ecosystem
|
||||
|
||||
None.
|
||||
|
||||
# Alternatives
|
||||
|
||||
Propose language changes in team channels or as GitHub issues.
|
||||
|
279
docs/rfc/001-initial-strings.md
Normal file
279
docs/rfc/001-initial-strings.md
Normal file
@ -0,0 +1,279 @@
|
||||
# Leo RFC 001: Initial String Support
|
||||
|
||||
## Authors
|
||||
|
||||
- Max Bruce
|
||||
- Collin Chin
|
||||
- Alessandro Coglio
|
||||
- Eric McCarthy
|
||||
- Pratyush Mishra
|
||||
- Jon Pavlik
|
||||
- Damir Shamanaev
|
||||
- Damon Sicore
|
||||
- Howard Wu
|
||||
|
||||
## Status
|
||||
|
||||
DRAFT
|
||||
|
||||
# Summary
|
||||
|
||||
The purpose of this proposal is to provide initial support for strings in Leo.
|
||||
Since strings are sequences of characters,
|
||||
the proposal inextricably also involves characters.
|
||||
This proposal is described as 'initial'
|
||||
because it provides some basic features that we may extend in the future;
|
||||
the initial features should be sufficiently simple and conservative
|
||||
that they should not limit the design of the future features.
|
||||
|
||||
This proposal adds a new scalar type for characters
|
||||
along with a new kind of literals to denote characters.
|
||||
A string is then simply as an array of characters,
|
||||
but this proposal also adds a new kind of literals to denote strings
|
||||
more directly than via character array construction expressions.
|
||||
Along with equality and inequality, which always apply to every Leo type,
|
||||
this proposal also introduces operations for
|
||||
_[TODO: Summarize initial set of built-in or library operations
|
||||
on characters and strings.]_.
|
||||
|
||||
By not prescribing a new type for strings,
|
||||
this initial proposal leaves the door open
|
||||
to a future more flexible type of resizable strings.
|
||||
|
||||
# Motivation
|
||||
|
||||
Strings (and characters) are common in programming languages.
|
||||
Use cases for Leo include
|
||||
simple ones like URLs and token ticker symbols,
|
||||
and more complex ones like Bech32 encoding,
|
||||
edit distance in strings representing proteins,
|
||||
and zero-knowledge proofs of occurrences or absences of patterns in textual logs.
|
||||
_[TODO: Add more use cases if needed.]_
|
||||
|
||||
# Design
|
||||
|
||||
Since strings are sequences of characters,
|
||||
a design for strings inextricably also involves a design for characters.
|
||||
Thus, we first present a design for characters, then for strings.
|
||||
After that, we discuss the relation with Leo's existing format strings.
|
||||
We conclude this design section
|
||||
with a discussion of possible future extensions.
|
||||
|
||||
## Characters
|
||||
|
||||
We add a new scalar type `char` for characters.
|
||||
In accord with Leo's strong typing,
|
||||
this new type is separate from all the other scalar types.
|
||||
|
||||
The set of values of type `char` is isomorphic to
|
||||
the set of Unicode code points from 0 to 10FFFFh (both inclusive).
|
||||
That is, we support Unicode characters, more precisely code points
|
||||
(this may include some invalid code points,
|
||||
but it is simpler to allow every code point in that range).
|
||||
A character is an atomic entity:
|
||||
there is no notion of Unicode encoding (e.g. UTF-8) that applies here.
|
||||
|
||||
We add a new kind of literals for characters,
|
||||
consisting of single characters or escapes,
|
||||
surrounded by single quotes.
|
||||
Any single Unicode character except single quote is allowed,
|
||||
e.g. `'a'`, `'*'`, and `'"'`.
|
||||
Single quotes must be escaped with backslash, i.e. `'\''`;
|
||||
backslashes must be escaped as well, i.e. `'\\'`
|
||||
We allow other backslash escapes
|
||||
for commonly used characters that are not otherwise easily denoted,
|
||||
namely _[TODO: Decide which other escapes we want to allow, e.g. `'\n'`.]_
|
||||
We also allow Unicode escapes of the form `'\u{X}'`,
|
||||
where `X` is a sequence of one or more hex digits
|
||||
(both uppercase and lowercase letters are allowed)
|
||||
whose value must be between 0 and 10FFFFh.
|
||||
Note that the literal character is assembled by the compiler---for
|
||||
creating literals there is no need for the circuit to know
|
||||
which codepoints are disallowed.
|
||||
_[TODO: Do we want a different notation for Unicode escapes?
|
||||
Note that the `{` `}` delimiters are motivated by the fact that
|
||||
there may be a varying number of hex digits in this notation.]_
|
||||
|
||||
_[TODO: Which (initial) built-in or library operations
|
||||
do we want to provide for `char` values?]_
|
||||
|
||||
## Strings
|
||||
|
||||
In this initial design proposal, we do not introduce any new type for strings.
|
||||
Instead, we rely on the fact that Leo already has arrays,
|
||||
and that arrays of characters can be regarded as strings.
|
||||
Existing array operations, such as element and range access,
|
||||
apply to these strings without the need of language extensions.
|
||||
|
||||
To ease the common use case of writing a string value in the code,
|
||||
we add a new kind of literal for strings (i.e. character arrays),
|
||||
consisting of a sequence of one or more single characters or escapes
|
||||
surrounded by double quotes;
|
||||
this is just syntactic sugar.
|
||||
Any single Unicode character except double quote is allowed,
|
||||
e.g. `""`, `"Aleo"`, `"it's"`, and `"x + y"`.
|
||||
Double quotes must be escaped with backslash, e.g. `"say \"hi\""`;
|
||||
backslashes must be escaped as well, e.g. `"c:\\dir"`.
|
||||
We allow the same backslash escapes allowed for character literals
|
||||
(see the section on characters above).
|
||||
_[TODO: There is a difference in the treatment of single and double quotes:
|
||||
the former are allowed in string literals but not character literals,
|
||||
while the latter are allowed in character literals but not string literals;
|
||||
this asymmetry is also present in Java.
|
||||
However, for simplicity we may want to symmetrically disallow
|
||||
both single and double quotes in both character and string literals.]_
|
||||
We also allow the same Unicode escapes allowed in character literals,
|
||||
(described in the section on characters above).
|
||||
In any case, the type of a string literal is `[char; N]`,
|
||||
where `N` is the length of the string measured in characters,
|
||||
i.e. the size of the array.
|
||||
Note that there is no notion of Unicode encoding (e.g. UTF-8)
|
||||
that applies to string literals.
|
||||
|
||||
The rationale for not introducing a new type for strings initially,
|
||||
and instead piggyback on the existing array types and operations,
|
||||
is twofold.
|
||||
First, it is an economical design
|
||||
that lets us reuse the existing array machinery,
|
||||
both at the language level (e.g. readily use array operations)
|
||||
and at the R1CS compilation level
|
||||
(see the section on compilation to R1CS below).
|
||||
Second, it leaves the door open to providing,
|
||||
in a future design iteration,
|
||||
a richer type for strings,
|
||||
as disccused in the section about future extensions below.
|
||||
|
||||
_[TODO: Which (initial) built-in or library operations
|
||||
do we want to provide for `[char; N]` values that are not already
|
||||
available with the existing array operations?]_
|
||||
* `u8` to `[char; 2]` hexstring, .., `u128` to `[char; 32]` hexstring
|
||||
* field element to `[char; 64]` hexstring. (Application can test leading zeros and slice them out if it needs to return, say, a 40-hex-digit string)
|
||||
* _[TODO: more?]_
|
||||
|
||||
## Input and Output of Literal Characters and Strings
|
||||
|
||||
Since UTF-8 is a standard encoding, it would make sense for
|
||||
the literal characters and strings in the `.in` file
|
||||
to be automatically converted to UTF-32 by the Leo compiler.
|
||||
However, the size of a string can be confusing, since multiple
|
||||
Unicode code points can be composed into a single glyph which
|
||||
then appears to be a single character. If a parameter of type `[char; 10]`
|
||||
[if that is the syntax we decide on] is passed a literal string
|
||||
of a different size, the error message should explain that the
|
||||
size must be the number of codepoints needed to encode the string.
|
||||
|
||||
## Format Strings
|
||||
|
||||
Leo currently supports format strings as their own entity,
|
||||
usable exclusively as first arguments of console print calls.
|
||||
This proposal eliminates this very specific notion,
|
||||
which is subsumed by the string literals described above.
|
||||
In other words, a console print call
|
||||
will simply take a string literal as first argument,
|
||||
which will be interpreted as a format string
|
||||
according to the semantics of console print calls.
|
||||
The internal UTF-32 string will be translated to UTF-8 for output.
|
||||
|
||||
## Compilation to R1CS
|
||||
|
||||
So far the discussion has been independent from R1CS
|
||||
(except for a brief reference when discussing the rationale behind the design).
|
||||
This is intentional, because the syntax and semantics of Leo
|
||||
should be understandable independently from the compilation of Leo to R1CS.
|
||||
However, compilation to R1CS is a critical consideration
|
||||
that affects the design of Leo.
|
||||
This section discusses R1CS compilation considerations
|
||||
for this proposal for characters and strings.
|
||||
|
||||
Values of type `char` can be represented directly as field elements,
|
||||
since the prime of the field is (much) larger than 10FFFFh.
|
||||
This is more efficient than using a bit representation of characters.
|
||||
By construction, field elements that represent `char` values
|
||||
are never above 10FFFFh.
|
||||
Note that `field` and `char` remain separate types in Leo:
|
||||
it is only in the compilation to R1CS
|
||||
that everything is reduced to field elements.
|
||||
|
||||
Since strings are just arrays of characters,
|
||||
there is nothing special about compiling strings to R1CS,
|
||||
compared to other types of arrays.
|
||||
In particular, the machinery to infer array sizes at compile time,
|
||||
necessary for the flattening to R1CS,
|
||||
applies to strings without exception.
|
||||
String literals are just syntactic sugar for
|
||||
suitable array inline construction expressions.
|
||||
|
||||
## Future Extensions
|
||||
|
||||
As alluded to in the section about design above,
|
||||
for now we are avoiding the introduction of a string type,
|
||||
isomorphic to but separate from character arrays,
|
||||
because we may want to introduce later a more flexible type of strings,
|
||||
in particular one that supports resizing.
|
||||
This may be realized via a built-in or library circuit type
|
||||
that includes a character array and a fill index.
|
||||
This may be a special case of a built-in or library circuit type
|
||||
for resizable vectors,
|
||||
possibly realized via an array and a fill index.
|
||||
This hypothetical type of resizable vectors
|
||||
may have to be parameterized over the element type,
|
||||
requiring an extension of the Leo type system
|
||||
that is much more general than strings.
|
||||
|
||||
Because of the above considerations,
|
||||
it seems premature to design a string type at this time,
|
||||
provided that the simple initial design described in the section above
|
||||
suffices to cover the initial use cases that motivate this RFC.
|
||||
|
||||
# Drawbacks
|
||||
|
||||
This proposal does not appear to bring any real drawbacks,
|
||||
other than making the language inevitably slightly more complex.
|
||||
But the need to support characters and strings justifies the extra complexity.
|
||||
|
||||
# Effect on Ecosystem
|
||||
|
||||
With the ability of Leo programs to process strings,
|
||||
it may be useful to have external tools that convert Leo strings
|
||||
to/from common formats, e.g. UTF-8.
|
||||
|
||||
# Alternatives
|
||||
|
||||
We could avoid the new `char` type altogether,
|
||||
and instead rely on the existing `u32` to represent Unicode code points,
|
||||
and provide character-oriented operations on `u32` values.
|
||||
(Note that both `u8` and `u16` are too small for 10FFFFh,
|
||||
and that signed integer types include negative integers
|
||||
which are not Unicode code points:
|
||||
this makes `u32` the obvious choice.)
|
||||
However, many values of type `u32` are above 10FFFFh,
|
||||
and many operations on `u32` do not really make sense on code points.
|
||||
We would probably want a notation for character literals anyhow,
|
||||
which could be (arguably mis)used for non-character unsigned integers.
|
||||
All in all, introducing a new type for characters
|
||||
is consistent with Leo's strong typing approach.
|
||||
Furthermore, for compilation to R1CS, `u32`,
|
||||
even if restricted to the number of bits needed for Unicode code points,
|
||||
is less efficient than the field representation described earlier,
|
||||
because `u32` requires a field element for each bit.
|
||||
|
||||
Instead of representing strings as character arrays,
|
||||
we could introduce a new type `string`
|
||||
whose values are finite sequences of zero or more characters.
|
||||
These strings would be isomorphic to, but distinct form, character arrays.
|
||||
However, for compilation to R1CS, it would be necessary to
|
||||
perform the same kind of known-size analysis on strings
|
||||
that is already performed on arrays,
|
||||
possibly necessitating to include a size as part of the type, i.e. `string(N)`,
|
||||
which is obviously isomorphic to `[char; N]`.
|
||||
Thus, using character arrays avoids the duplication.
|
||||
Furthermore, as noted in the section on future extensions,
|
||||
this leaves the door open to
|
||||
introducing a future type `string` for resizable strings.
|
||||
|
||||
Yet another option could be to use directly `field` to represent characters
|
||||
and `[field; N]` to represent strings of `N` characters.
|
||||
However, many values of type `field` are not valid Unicode code points,
|
||||
and many field operations do not make sense for characters.
|
||||
Thus, having a separate type `char` for characters seems better,
|
||||
and more in accordance with Leo's strong typing.
|
24
docs/rfc/__template.md
Normal file
24
docs/rfc/__template.md
Normal file
@ -0,0 +1,24 @@
|
||||
# Summary
|
||||
|
||||
What is the proposal?
|
||||
|
||||
# Motivation
|
||||
|
||||
What problems does it solve? What is the background?
|
||||
|
||||
# Design
|
||||
|
||||
What are the details of proposal?
|
||||
|
||||
# Drawbacks
|
||||
|
||||
What problems does this solution bring in?
|
||||
|
||||
# Effect on Ecosystem
|
||||
|
||||
How changes affect other projects and language ecosystem in general?
|
||||
|
||||
# Alternatives
|
||||
|
||||
What are the alternatives?
|
||||
|
14
examples/silly-sudoku/inputs/test-input.in
Normal file
14
examples/silly-sudoku/inputs/test-input.in
Normal file
@ -0,0 +1,14 @@
|
||||
// The program input for tmp-test/src/main.leo
|
||||
[main]
|
||||
puzzle: [u8; (3, 3)] = [[1, 0, 5],
|
||||
[0, 2, 0],
|
||||
[7, 0, 0]];
|
||||
|
||||
answer: [u8; (3, 3)] = [[1, 4, 5],
|
||||
[3, 2, 6],
|
||||
[7, 8, 9]];
|
||||
|
||||
expected: bool = true;
|
||||
|
||||
[registers]
|
||||
r: bool = false;
|
@ -1,30 +1,18 @@
|
||||
Leo Library
|
||||
Copyright (C) 2019-2021 Aleo Systems Inc.
|
||||
This file is part of the Leo library.
|
||||
|
||||
The Leo library is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
The Leo library is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
Copyright (C) 2021 Aleo Systems Inc.
|
||||
|
||||
|
||||
--------
|
||||
|
||||
|
||||
Format Note
|
||||
-----------
|
||||
|
||||
The ABNF standard requires grammars to consist of lines terminated by CR LF
|
||||
(i.e. carriage return followed by line feed, DOS/Windows-style),
|
||||
as explained in the background on ABNF later in this file.
|
||||
This file's lines are therefore terminated by CR LF.
|
||||
To avoid losing this requirement across systems,
|
||||
this file is marked as 'text eol=crlf' in .gitattributes:
|
||||
this means that the file is textual, enabling visual diffs,
|
||||
but its lines will always be terminated by CR LF on any system.
|
||||
|
||||
Note that this CR LF requirement only applies to the grammar files themselves.
|
||||
It does not apply to the lines of the languages described by the grammar.
|
||||
ABNF grammars may describe any kind of languages,
|
||||
with any kind of line terminators,
|
||||
or even without line terminators at all (e.g. for "binary" languages).
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
|
||||
--------
|
||||
@ -69,59 +57,59 @@ without going beyond context-free grammars.
|
||||
|
||||
Instead of BNF's angle-bracket notation for nonterminals,
|
||||
ABNF uses case-insensitive names consisting of letters, digits, and dashes,
|
||||
e.g. HTTP-message and IPv6address.
|
||||
e.g. `HTTP-message` and `IPv6address`.
|
||||
ABNF includes an angle-bracket notation for prose descriptions,
|
||||
e.g. <host, see [RFC3986], Section 3.2.2>,
|
||||
e.g. `<host, see [RFC3986], Section 3.2.2>`,
|
||||
usable as last resort in the definiens of a nonterminal.
|
||||
|
||||
While BNF allows arbitrary terminals,
|
||||
ABNF uses only natural numbers as terminals,
|
||||
and denotes them via:
|
||||
(i) binary, decimal, or hexadecimal sequences,
|
||||
e.g. %b1.11.1010, %d1.3.10, and %x.1.3.A
|
||||
all denote the sequence of terminals '1 3 10';
|
||||
e.g. `%b1.11.1010`, `%d1.3.10`, and `%x.1.3.A`
|
||||
all denote the sequence of terminals [1, 3, 10];
|
||||
(ii) binary, decimal, or hexadecimal ranges,
|
||||
e.g. %x30-39 denotes any singleton sequence of terminals
|
||||
'n' with 48 <= n <= 57 (an ASCII digit);
|
||||
e.g. `%x30-39` denotes any singleton sequence of terminals
|
||||
[_n_] with 48 <= _n_ <= 57 (an ASCII digit);
|
||||
(iii) case-sensitive ASCII strings,
|
||||
e.g. %s"Ab" denotes the sequence of terminals '65 98';
|
||||
e.g. `%s"Ab"` denotes the sequence of terminals [65, 98];
|
||||
and (iv) case-insensitive ASCII strings,
|
||||
e.g. %i"ab", or just "ab", denotes
|
||||
e.g. `%i"ab"`, or just `"ab"`, denotes
|
||||
any sequence of terminals among
|
||||
'65 66',
|
||||
'65 98',
|
||||
'97 66', and
|
||||
'97 98'.
|
||||
[65, 66],
|
||||
[65, 98],
|
||||
[97, 66], and
|
||||
[97, 98].
|
||||
ABNF terminals in suitable sets represent ASCII or Unicode characters.
|
||||
|
||||
ABNF allows repetition prefixes n*m,
|
||||
where n and m are natural numbers in decimal notation;
|
||||
ABNF allows repetition prefixes `n*m`,
|
||||
where `n` and `m` are natural numbers in decimal notation;
|
||||
if absent,
|
||||
n defaults to 0, and
|
||||
m defaults to infinity.
|
||||
`n` defaults to 0, and
|
||||
`m` defaults to infinity.
|
||||
For example,
|
||||
1*4HEXDIG denotes one to four HEXDIGs,
|
||||
*3DIGIT denotes up to three DIGITs, and
|
||||
1*OCTET denotes one or more OCTETs.
|
||||
A single n prefix
|
||||
abbreviates n*n,
|
||||
e.g. 3DIGIT denotes three DIGITs.
|
||||
`1*4HEXDIG` denotes one to four `HEXDIG`s,
|
||||
`*3DIGIT` denotes up to three `DIGIT`s, and
|
||||
`1*OCTET` denotes one or more `OCTET`s.
|
||||
A single `n` prefix
|
||||
abbreviates `n*n`,
|
||||
e.g. `3DIGIT` denotes three `DIGIT`s.
|
||||
|
||||
Instead of BNF's |, ABNF uses / to separate alternatives.
|
||||
Instead of BNF's `|`, ABNF uses `/` to separate alternatives.
|
||||
Repetition prefixes have precedence over juxtapositions,
|
||||
which have precedence over /.
|
||||
which have precedence over `/`.
|
||||
Round brackets group things and override the aforementioned precedence rules,
|
||||
e.g. *(WSP / CRLF WSP) denotes sequences of terminals
|
||||
e.g. `*(WSP / CRLF WSP)` denotes sequences of terminals
|
||||
obtained by repeating, zero or more times,
|
||||
either (i) a WSP or (ii) a CRLF followed by a WSP.
|
||||
either (i) a `WSP` or (ii) a `CRLF` followed by a `WSP`.
|
||||
Square brackets also group things but make them optional,
|
||||
e.g. [":" port] is equivalent to 0*1(":" port).
|
||||
e.g. `[":" port]` is equivalent to `0*1(":" port)`.
|
||||
|
||||
Instead of BNF's ::=, ABNF uses = to define nonterminals,
|
||||
and =/ to incrementally add alternatives
|
||||
Instead of BNF's `::=`, ABNF uses `=` to define nonterminals,
|
||||
and `=/` to incrementally add alternatives
|
||||
to previously defined nonterminals.
|
||||
For example, the rule BIT = "0" / "1"
|
||||
is equivalent to BIT = "0" followed by BIT =/ "1".
|
||||
For example, the rule `BIT = "0" / "1"`
|
||||
is equivalent to `BIT = "0"` followed by `BIT =/ "1"`.
|
||||
|
||||
The syntax of ABNF itself is formally specified in ABNF
|
||||
(in Section 4 of the aforementioned RFC 5234,
|
||||
@ -143,7 +131,7 @@ Structure
|
||||
This ABNF grammar consists of two (sub-)grammars:
|
||||
(i) a lexical grammar that describes how
|
||||
sequence of characters are parsed into tokens, and
|
||||
(ii) a syntactic grammar that described how
|
||||
(ii) a syntactic grammar that describes how
|
||||
tokens are parsed into expressions, statements, etc.
|
||||
The adjectives 'lexical' and 'syntactic' are
|
||||
the same ones used in the Java language reference,
|
||||
@ -210,8 +198,8 @@ additive-expression =
|
||||
|
||||
|
||||
These rules tell us
|
||||
that the additive operators '+' and '-' have lower precedence
|
||||
than the multiplicative operators '*' and '/',
|
||||
that the additive operators `+` and `-` have lower precedence
|
||||
than the multiplicative operators `*` and `/`,
|
||||
and that both the additive and multiplicative operators associate to the left.
|
||||
This may be best understood via the examples given below.
|
||||
|
||||
@ -307,7 +295,7 @@ Naming Convention
|
||||
This ABNF grammar uses nonterminal names
|
||||
that consist of complete English words, separated by dashes,
|
||||
and that describe the construct the way it is in English.
|
||||
For instance, we use the name 'conditional-statement'
|
||||
For instance, we use the name `conditional-statement`
|
||||
to describe conditional statements.
|
||||
|
||||
At the same time, this grammar establishes
|
||||
@ -366,8 +354,8 @@ Lexical Grammar
|
||||
|
||||
A Leo file is a finite sequence of Unicode characters,
|
||||
represented as Unicode code points,
|
||||
which are numbers in the range form 0 to 10FFFFh.
|
||||
These are captured by the ABNF rule 'character' below.
|
||||
which are numbers in the range from 0 to 10FFFFh.
|
||||
These are captured by the ABNF rule `character` below.
|
||||
|
||||
The lexical grammar defines how, at least conceptually,
|
||||
the sequence of characters is turned into
|
||||
@ -375,20 +363,20 @@ a sequence of tokens, comments, and whitespaces:
|
||||
these entities are all defined by the grammar rules below.
|
||||
|
||||
As stated, the lexical grammar alone is ambiguous.
|
||||
For example, the sequence of characters '**' (i.e. two stars)
|
||||
could be equally parsed as two '*' symbol tokens or one '**' symbol token
|
||||
(see rule for 'symbol' below).
|
||||
As another example, the sequence or characters '<CR><LF>'
|
||||
For example, the sequence of characters `**` (i.e. two stars)
|
||||
could be equally parsed as two `*` symbol tokens or one `**` symbol token
|
||||
(see rule for `symbol` below).
|
||||
As another example, the sequence or characters `<CR><LF>`
|
||||
(i.e. carriage return followed by line feed)
|
||||
could be equally parsed as two line terminators or one
|
||||
(see rule for 'newline').
|
||||
(see rule for `newline`).
|
||||
|
||||
Thus, as often done in language syntax definitions,
|
||||
the lexical grammar is disambiguated by
|
||||
the extra-grammatical requirement that
|
||||
the longest possible sequence of characters is always parsed.
|
||||
This way, '**' must be parsed as one '**' symbol token,
|
||||
and '<CR><LF>' must be parsed as one line terminator.
|
||||
This way, `**` must be parsed as one `**` symbol token,
|
||||
and `<CR><LF>` must be parsed as one line terminator.
|
||||
|
||||
As mentioned above, a character is any Unicode code point.
|
||||
This grammar does not say how those are encoded in files (e.g. UTF-8):
|
||||
@ -405,27 +393,27 @@ We give names to certain ASCII characters.
|
||||
|
||||
<a name="horizontal-tab"></a>
|
||||
```abnf
|
||||
horizontal-tab = %x9
|
||||
horizontal-tab = %x9 ; <HT>
|
||||
```
|
||||
|
||||
<a name="line-feed"></a>
|
||||
```abnf
|
||||
line-feed = %xA
|
||||
line-feed = %xA ; <LF>
|
||||
```
|
||||
|
||||
<a name="carriage-return"></a>
|
||||
```abnf
|
||||
carriage-return = %xD
|
||||
carriage-return = %xD ; <CR>
|
||||
```
|
||||
|
||||
<a name="space"></a>
|
||||
```abnf
|
||||
space = %x20
|
||||
space = %x20 ; <SP>
|
||||
```
|
||||
|
||||
<a name="double-quote"></a>
|
||||
```abnf
|
||||
double-quote = %x22
|
||||
double-quote = %x22 ; "
|
||||
```
|
||||
|
||||
We give names to complements of certain ASCII characters.
|
||||
@ -444,12 +432,25 @@ not-star = %x0-29 / %x2B-10FFFF ; anything but *
|
||||
<a name="not-line-feed-or-carriage-return"></a>
|
||||
```abnf
|
||||
not-line-feed-or-carriage-return = %x0-9 / %xB-C / %xE-10FFFF
|
||||
; anything but LF or CR
|
||||
; anything but <LF> or <CR>
|
||||
```
|
||||
|
||||
<a name="not-double-quote-or-open-brace"></a>
|
||||
```abnf
|
||||
not-double-quote-or-open-brace = %x0-22 / %x24-7A / %x7C-10FFFF
|
||||
; anything but " or {
|
||||
```
|
||||
|
||||
<a name="not-double-quote-or-close-brace"></a>
|
||||
```abnf
|
||||
not-double-quote-or-close-brace = %x0-22 / %x24-7C / %x7E-10FFFF
|
||||
; anything but " or }
|
||||
```
|
||||
|
||||
<a name="not-star-or-slash"></a>
|
||||
```abnf
|
||||
not-star-or-slash = %x0-29 / %x2B-2E / %x30-10FFFF ; anything but * or /
|
||||
not-star-or-slash = %x0-29 / %x2B-2E / %x30-10FFFF
|
||||
; anything but * or /
|
||||
```
|
||||
|
||||
Lines in Leo may be terminated via
|
||||
@ -475,16 +476,16 @@ Line terminators form whitespace, along with spaces and horizontal tabs.
|
||||
whitespace = space / horizontal-tab / newline
|
||||
```
|
||||
|
||||
Go to: _[newline](#user-content-newline), [space](#user-content-space), [horizontal-tab](#user-content-horizontal-tab)_;
|
||||
Go to: _[space](#user-content-space), [newline](#user-content-newline), [horizontal-tab](#user-content-horizontal-tab)_;
|
||||
|
||||
|
||||
There are two kinds of comments in Leo, as in other languages.
|
||||
One is block comments of the form '/* ... */',
|
||||
and the other is end-of-line comments of the form '// ...'.
|
||||
The first kind start at '/*' and end at the first '*/',
|
||||
One is block comments of the form `/* ... */`,
|
||||
and the other is end-of-line comments of the form `// ...`.
|
||||
The first kind start at `/*` and end at the first `*/`,
|
||||
possibly spanning multiple (partial) lines;
|
||||
these do no nest.
|
||||
The second kind start at '//' and extend till the end of the line.
|
||||
The second kind start at `//` and extend till the end of the line.
|
||||
The rules about comments given below are similar to
|
||||
the ones used in the Java language reference.
|
||||
|
||||
@ -493,7 +494,7 @@ the ones used in the Java language reference.
|
||||
comment = block-comment / end-of-line-comment
|
||||
```
|
||||
|
||||
Go to: _[end-of-line-comment](#user-content-end-of-line-comment), [block-comment](#user-content-block-comment)_;
|
||||
Go to: _[block-comment](#user-content-block-comment), [end-of-line-comment](#user-content-end-of-line-comment)_;
|
||||
|
||||
|
||||
<a name="block-comment"></a>
|
||||
@ -510,7 +511,7 @@ rest-of-block-comment = "*" rest-of-block-comment-after-star
|
||||
/ not-star rest-of-block-comment
|
||||
```
|
||||
|
||||
Go to: _[not-star](#user-content-not-star), [rest-of-block-comment-after-star](#user-content-rest-of-block-comment-after-star), [rest-of-block-comment](#user-content-rest-of-block-comment)_;
|
||||
Go to: _[rest-of-block-comment](#user-content-rest-of-block-comment), [rest-of-block-comment-after-star](#user-content-rest-of-block-comment-after-star), [not-star](#user-content-not-star)_;
|
||||
|
||||
|
||||
<a name="rest-of-block-comment-after-star"></a>
|
||||
@ -520,7 +521,7 @@ rest-of-block-comment-after-star = "/"
|
||||
/ not-star-or-slash rest-of-block-comment
|
||||
```
|
||||
|
||||
Go to: _[rest-of-block-comment-after-star](#user-content-rest-of-block-comment-after-star), [not-star-or-slash](#user-content-not-star-or-slash), [rest-of-block-comment](#user-content-rest-of-block-comment)_;
|
||||
Go to: _[rest-of-block-comment](#user-content-rest-of-block-comment), [not-star-or-slash](#user-content-not-star-or-slash), [rest-of-block-comment-after-star](#user-content-rest-of-block-comment-after-star)_;
|
||||
|
||||
|
||||
<a name="end-of-line-comment"></a>
|
||||
@ -595,16 +596,19 @@ lowercase-letter = %x61-7A ; a-z
|
||||
letter = uppercase-letter / lowercase-letter
|
||||
```
|
||||
|
||||
Go to: _[uppercase-letter](#user-content-uppercase-letter), [lowercase-letter](#user-content-lowercase-letter)_;
|
||||
Go to: _[lowercase-letter](#user-content-lowercase-letter), [uppercase-letter](#user-content-uppercase-letter)_;
|
||||
|
||||
|
||||
An identifier is a non-empty sequence of letters, digits, and underscores,
|
||||
starting with a letter.
|
||||
It must not be a keyword: this is an extra-grammatical constraint.
|
||||
It must not be a keyword: this is an extra-grammatical requirement.
|
||||
It must also not be or start with `aleo1`,
|
||||
because that is used for address literals:
|
||||
this is another extra-grammatical requirement.
|
||||
|
||||
<a name="identifier"></a>
|
||||
```abnf
|
||||
identifier = letter *( letter / digit / "_" ) ; but not a keyword
|
||||
identifier = letter *( letter / digit / "_" ) ; but not a keyword or aleo1...
|
||||
```
|
||||
|
||||
Go to: _[letter](#user-content-letter)_;
|
||||
@ -621,31 +625,16 @@ package-name = 1*( lowercase-letter / digit )
|
||||
|
||||
A format string is a sequence of characters, other than double quote,
|
||||
surrounded by double quotes.
|
||||
Within a format string, sub-strings '{}' are distinguished as containers
|
||||
Within a format string, sub-strings `{}` are distinguished as containers
|
||||
(these are the ones that may be matched with values
|
||||
whose textual representation replaces the containers
|
||||
in the printed string).
|
||||
There is an implicit extra-grammatical requirements that
|
||||
the explicit 'format-string-container' instances include
|
||||
all the occurrences of '{}' in the parsed character sequence:
|
||||
that is, there may not be two contiguous 'not-double-quote' instances
|
||||
that are '{' and '}'.
|
||||
|
||||
<a name="format-string-container"></a>
|
||||
```abnf
|
||||
format-string-container = "{}"
|
||||
```
|
||||
|
||||
<a name="not-double-quote-or-open-brace"></a>
|
||||
```abnf
|
||||
not-double-quote-or-open-brace = %x0-22 / %x24-7A / %x7C-10FFFF
|
||||
```
|
||||
|
||||
<a name="not-double-quote-or-close-brace"></a>
|
||||
```abnf
|
||||
not-double-quote-or-close-brace = %x0-22 / %x24-7C / %x7E-10FFFF
|
||||
```
|
||||
|
||||
<a name="format-string-element"></a>
|
||||
```abnf
|
||||
format-string-element = not-double-quote-or-open-brace
|
||||
@ -653,7 +642,7 @@ format-string-element = not-double-quote-or-open-brace
|
||||
/ format-string-container
|
||||
```
|
||||
|
||||
Go to: _[not-double-quote-or-open-brace](#user-content-not-double-quote-or-open-brace), [format-string-container](#user-content-format-string-container), [not-double-quote-or-close-brace](#user-content-not-double-quote-or-close-brace)_;
|
||||
Go to: _[format-string-container](#user-content-format-string-container), [not-double-quote-or-open-brace](#user-content-not-double-quote-or-open-brace), [not-double-quote-or-close-brace](#user-content-not-double-quote-or-close-brace)_;
|
||||
|
||||
|
||||
<a name="format-string"></a>
|
||||
@ -664,43 +653,7 @@ format-string = double-quote *format-string-element double-quote
|
||||
Go to: _[double-quote](#user-content-double-quote)_;
|
||||
|
||||
|
||||
Here is (part of this ABNF comment),
|
||||
an alternative way to specify format strings,
|
||||
which captures the extra-grammatical requirement above in the grammar,
|
||||
but is more complicated:
|
||||
|
||||
|
||||
|
||||
```
|
||||
not-double-quote-or-open-brace = %x0-22 / %x24-7A / %x7C-10FFFF
|
||||
```
|
||||
|
||||
|
||||
|
||||
```
|
||||
not-double-quote-or-close-brace = %x0-22 / %x24-7C / %x7E-10FFFF
|
||||
```
|
||||
|
||||
|
||||
|
||||
```
|
||||
format-string-element = not-double-quote-or-open-brace
|
||||
/ "{" not-double-quote-or-close-brace
|
||||
/ format-string-container
|
||||
```
|
||||
|
||||
|
||||
|
||||
```
|
||||
format-string = double-quote *format-string-element double-quote
|
||||
```
|
||||
|
||||
|
||||
|
||||
It is not immediately clear which approach is better; there are tradeoffs.
|
||||
We may choose to adopt this one in future revisions of the grammar.
|
||||
|
||||
Annotations have names, which are identifiers immediately preceded by '@'.
|
||||
Annotations have names, which are identifiers immediately preceded by `@`.
|
||||
|
||||
<a name="annotation-name"></a>
|
||||
```abnf
|
||||
@ -711,7 +664,7 @@ Go to: _[identifier](#user-content-identifier)_;
|
||||
|
||||
|
||||
A natural (number) is a sequence of one or more digits.
|
||||
We allow leading zeros, e.g. '007'.
|
||||
We allow leading zeros, e.g. `007`.
|
||||
|
||||
<a name="natural"></a>
|
||||
```abnf
|
||||
@ -719,7 +672,7 @@ natural = 1*digit
|
||||
```
|
||||
|
||||
An integer (number) is either a natural or its negation.
|
||||
We allow leading zeros also in negative numbers, e.g. '-007'.
|
||||
We allow leading zeros also in negative numbers, e.g. `-007`.
|
||||
|
||||
<a name="integer"></a>
|
||||
```abnf
|
||||
@ -790,7 +743,7 @@ Boolean literals are the usual two.
|
||||
boolean-literal = %s"true" / %s"false"
|
||||
```
|
||||
|
||||
An address literal starts with 'aleo1'
|
||||
An address literal starts with `aleo1`
|
||||
and continues with exactly 58 lowercase letters and digits.
|
||||
Thus an address always consists of 63 characters.
|
||||
|
||||
@ -814,50 +767,22 @@ atomic-literal = untyped-literal
|
||||
/ address-literal
|
||||
```
|
||||
|
||||
Go to: _[product-group-literal](#user-content-product-group-literal), [untyped-literal](#user-content-untyped-literal), [signed-literal](#user-content-signed-literal), [field-literal](#user-content-field-literal), [boolean-literal](#user-content-boolean-literal), [address-literal](#user-content-address-literal), [unsigned-literal](#user-content-unsigned-literal)_;
|
||||
Go to: _[product-group-literal](#user-content-product-group-literal), [signed-literal](#user-content-signed-literal), [unsigned-literal](#user-content-unsigned-literal), [field-literal](#user-content-field-literal), [boolean-literal](#user-content-boolean-literal), [untyped-literal](#user-content-untyped-literal), [address-literal](#user-content-address-literal)_;
|
||||
|
||||
|
||||
After defining the (mostly) alphanumeric tokens above,
|
||||
it remains to define tokens for non-alphanumeric symbols such as "+" and "(".
|
||||
it remains to define tokens for non-alphanumeric symbols such as `+` and `(`.
|
||||
Different programming languages used different terminologies for these,
|
||||
e.g. operators, separators, punctuators, etc.
|
||||
Here we use 'symbol', for all of them.
|
||||
Here we use `symbol`, for all of them.
|
||||
We also include a token consisting of
|
||||
a closing parenthesis immediately followed by 'group':
|
||||
a closing parenthesis `)` immediately followed by `group`:
|
||||
as defined in the syntactic grammar,
|
||||
this is the final part of an affine group literal;
|
||||
even though it includes letters,
|
||||
it seems appropriate to still consider it a symbol,
|
||||
particularly since it starts with a proper symbol.
|
||||
|
||||
We could give names to all of these symbols,
|
||||
via rules such as
|
||||
|
||||
|
||||
|
||||
```
|
||||
equality-operator = "=="
|
||||
```
|
||||
|
||||
|
||||
|
||||
and defining 'symbol' in terms of those
|
||||
|
||||
|
||||
|
||||
```
|
||||
symbol = ... / equality-operator / ...
|
||||
```
|
||||
|
||||
|
||||
|
||||
This may or may not make the grammar more readable,
|
||||
but it would help establish a terminology in the grammar,
|
||||
namely the exact names of some of these token.
|
||||
On the other hand, at least some of them are perhaps simple enough
|
||||
that they could be just described in terms of their symbols,
|
||||
e.g. 'double dot', 'question mark', etc.
|
||||
|
||||
<a name="symbol"></a>
|
||||
```abnf
|
||||
symbol = "!" / "&&" / "||"
|
||||
@ -887,7 +812,7 @@ token = keyword
|
||||
/ symbol
|
||||
```
|
||||
|
||||
Go to: _[package-name](#user-content-package-name), [annotation-name](#user-content-annotation-name), [atomic-literal](#user-content-atomic-literal), [format-string](#user-content-format-string), [keyword](#user-content-keyword), [symbol](#user-content-symbol), [identifier](#user-content-identifier)_;
|
||||
Go to: _[package-name](#user-content-package-name), [annotation-name](#user-content-annotation-name), [format-string](#user-content-format-string), [keyword](#user-content-keyword), [atomic-literal](#user-content-atomic-literal), [identifier](#user-content-identifier), [symbol](#user-content-symbol)_;
|
||||
|
||||
|
||||
|
||||
@ -923,7 +848,7 @@ signed-type = %s"i8" / %s"i16" / %s"i32" / %s"i64" / %s"i128"
|
||||
integer-type = unsigned-type / signed-type
|
||||
```
|
||||
|
||||
Go to: _[unsigned-type](#user-content-unsigned-type), [signed-type](#user-content-signed-type)_;
|
||||
Go to: _[signed-type](#user-content-signed-type), [unsigned-type](#user-content-unsigned-type)_;
|
||||
|
||||
|
||||
The integer types, along with the field and group types,
|
||||
@ -965,10 +890,10 @@ address-type = %s"address"
|
||||
scalar-type = boolean-type / arithmetic-type / address-type
|
||||
```
|
||||
|
||||
Go to: _[address-type](#user-content-address-type), [boolean-type](#user-content-boolean-type), [arithmetic-type](#user-content-arithmetic-type)_;
|
||||
Go to: _[arithmetic-type](#user-content-arithmetic-type), [address-type](#user-content-address-type), [boolean-type](#user-content-boolean-type)_;
|
||||
|
||||
|
||||
Circuit types are denoted by identifiers and the keyword 'Self'.
|
||||
Circuit types are denoted by identifiers and the keyword `Self`.
|
||||
The latter is only allowed inside a circuit definition,
|
||||
to denote the circuit being defined.
|
||||
|
||||
@ -982,7 +907,7 @@ self-type = %s"Self"
|
||||
circuit-type = identifier / self-type
|
||||
```
|
||||
|
||||
Go to: _[self-type](#user-content-self-type), [identifier](#user-content-identifier)_;
|
||||
Go to: _[identifier](#user-content-identifier), [self-type](#user-content-self-type)_;
|
||||
|
||||
|
||||
A tuple type consists of zero, two, or more component types.
|
||||
@ -1005,7 +930,7 @@ or a tuple of one or more dimensions.
|
||||
array-type = "[" type ";" array-dimensions "]"
|
||||
```
|
||||
|
||||
Go to: _[array-dimensions](#user-content-array-dimensions), [type](#user-content-type)_;
|
||||
Go to: _[type](#user-content-type), [array-dimensions](#user-content-array-dimensions)_;
|
||||
|
||||
|
||||
<a name="array-dimensions"></a>
|
||||
@ -1026,7 +951,7 @@ i.e. types whose values contain (sub-)values
|
||||
aggregate-type = tuple-type / array-type / circuit-type
|
||||
```
|
||||
|
||||
Go to: _[array-type](#user-content-array-type), [circuit-type](#user-content-circuit-type), [tuple-type](#user-content-tuple-type)_;
|
||||
Go to: _[tuple-type](#user-content-tuple-type), [array-type](#user-content-array-type), [circuit-type](#user-content-circuit-type)_;
|
||||
|
||||
|
||||
Scalar and aggregate types form all the types.
|
||||
@ -1036,7 +961,7 @@ Scalar and aggregate types form all the types.
|
||||
type = scalar-type / aggregate-type
|
||||
```
|
||||
|
||||
Go to: _[scalar-type](#user-content-scalar-type), [aggregate-type](#user-content-aggregate-type)_;
|
||||
Go to: _[aggregate-type](#user-content-aggregate-type), [scalar-type](#user-content-scalar-type)_;
|
||||
|
||||
|
||||
The lexical grammar given earlier defines product group literals.
|
||||
@ -1076,7 +1001,7 @@ Go to: _[affine-group-literal](#user-content-affine-group-literal), [atomic-lite
|
||||
|
||||
|
||||
The following rule is not directly referenced in the rules for expressions
|
||||
(which reference 'literal' instead),
|
||||
(which reference `literal` instead),
|
||||
but it is useful to establish terminology:
|
||||
a group literal is either a product group literal or an affine group literal.
|
||||
|
||||
@ -1095,7 +1020,7 @@ of operators and other expression constructs,
|
||||
and the (left or right) associativity of binary operators.
|
||||
|
||||
The primary expressions are self-contained in a way,
|
||||
i.e. they have clear deliminations:
|
||||
i.e. they have clear delimitations:
|
||||
Some consist of single tokens,
|
||||
while others have explicit endings.
|
||||
Primary expressions also include parenthesized expressions,
|
||||
@ -1114,7 +1039,7 @@ primary-expression = identifier
|
||||
/ circuit-expression
|
||||
```
|
||||
|
||||
Go to: _[identifier](#user-content-identifier), [literal](#user-content-literal), [array-expression](#user-content-array-expression), [expression](#user-content-expression), [tuple-expression](#user-content-tuple-expression), [circuit-expression](#user-content-circuit-expression)_;
|
||||
Go to: _[tuple-expression](#user-content-tuple-expression), [identifier](#user-content-identifier), [expression](#user-content-expression), [array-expression](#user-content-array-expression), [circuit-expression](#user-content-circuit-expression), [literal](#user-content-literal)_;
|
||||
|
||||
|
||||
Tuple expressions construct tuples.
|
||||
@ -1139,7 +1064,7 @@ Go to: _[tuple-construction](#user-content-tuple-construction)_;
|
||||
Array expressions construct arrays.
|
||||
There are two kinds:
|
||||
one lists the element expressions (at least one),
|
||||
including spreads (via '...') which are arrays being spliced in;
|
||||
including spreads (via `...`) which are arrays being spliced in;
|
||||
the other repeats (the value of) a single expression
|
||||
across one or more dimensions.
|
||||
|
||||
@ -1198,11 +1123,12 @@ so they are syntactically identical but semantically different.
|
||||
<a name="circuit-construction"></a>
|
||||
```abnf
|
||||
circuit-construction = circuit-type "{"
|
||||
circuit-inline-element *( "," circuit-inline-element ) [ "," ]
|
||||
circuit-inline-element
|
||||
*( "," circuit-inline-element ) [ "," ]
|
||||
"}"
|
||||
```
|
||||
|
||||
Go to: _[circuit-type](#user-content-circuit-type), [circuit-inline-element](#user-content-circuit-inline-element)_;
|
||||
Go to: _[circuit-inline-element](#user-content-circuit-inline-element), [circuit-type](#user-content-circuit-type)_;
|
||||
|
||||
|
||||
<a name="circuit-inline-element"></a>
|
||||
@ -1210,7 +1136,7 @@ Go to: _[circuit-type](#user-content-circuit-type), [circuit-inline-element](#us
|
||||
circuit-inline-element = identifier ":" expression / identifier
|
||||
```
|
||||
|
||||
Go to: _[identifier](#user-content-identifier), [expression](#user-content-expression)_;
|
||||
Go to: _[expression](#user-content-expression), [identifier](#user-content-identifier)_;
|
||||
|
||||
|
||||
<a name="circuit-expression"></a>
|
||||
@ -1261,7 +1187,7 @@ postfix-expression = primary-expression
|
||||
/ postfix-expression "[" [expression] ".." [expression] "]"
|
||||
```
|
||||
|
||||
Go to: _[circuit-type](#user-content-circuit-type), [natural](#user-content-natural), [postfix-expression](#user-content-postfix-expression), [primary-expression](#user-content-primary-expression), [identifier](#user-content-identifier), [function-arguments](#user-content-function-arguments), [expression](#user-content-expression)_;
|
||||
Go to: _[function-arguments](#user-content-function-arguments), [postfix-expression](#user-content-postfix-expression), [natural](#user-content-natural), [circuit-type](#user-content-circuit-type), [expression](#user-content-expression), [identifier](#user-content-identifier), [primary-expression](#user-content-primary-expression)_;
|
||||
|
||||
|
||||
Unary operators have the highest operator precedence.
|
||||
@ -1280,8 +1206,8 @@ Go to: _[postfix-expression](#user-content-postfix-expression), [unary-expressio
|
||||
|
||||
Next in the operator precedence is exponentiation,
|
||||
following mathematical practice.
|
||||
The current rule below makes exponentiation left-associative,
|
||||
i.e. 'a ** b ** c' must be parsed as '(a ** b) ** c'.
|
||||
The current rule below makes exponentiation right-associative,
|
||||
i.e. `a ** b ** c` must be parsed as `a ** (b ** c)`.
|
||||
|
||||
<a name="exponential-expression"></a>
|
||||
```abnf
|
||||
@ -1301,7 +1227,7 @@ multiplicative-expression = exponential-expression
|
||||
/ multiplicative-expression "/" exponential-expression
|
||||
```
|
||||
|
||||
Go to: _[multiplicative-expression](#user-content-multiplicative-expression), [exponential-expression](#user-content-exponential-expression)_;
|
||||
Go to: _[exponential-expression](#user-content-exponential-expression), [multiplicative-expression](#user-content-multiplicative-expression)_;
|
||||
|
||||
|
||||
Then there are addition and subtraction, both left-assocative.
|
||||
@ -1376,7 +1302,7 @@ conditional-expression = disjunctive-expression
|
||||
":" conditional-expression
|
||||
```
|
||||
|
||||
Go to: _[conditional-expression](#user-content-conditional-expression), [expression](#user-content-expression), [disjunctive-expression](#user-content-disjunctive-expression)_;
|
||||
Go to: _[expression](#user-content-expression), [conditional-expression](#user-content-conditional-expression), [disjunctive-expression](#user-content-disjunctive-expression)_;
|
||||
|
||||
|
||||
Those above are all the expressions.
|
||||
@ -1400,7 +1326,8 @@ Blocks are possibly empty sequences of statements surrounded by curly braces.
|
||||
```abnf
|
||||
statement = expression-statement
|
||||
/ return-statement
|
||||
/ variable-definition-statement
|
||||
/ variable-declaration
|
||||
/ constant-declaration
|
||||
/ conditional-statement
|
||||
/ loop-statement
|
||||
/ assignment-statement
|
||||
@ -1408,7 +1335,7 @@ statement = expression-statement
|
||||
/ block
|
||||
```
|
||||
|
||||
Go to: _[return-statement](#user-content-return-statement), [variable-definition-statement](#user-content-variable-definition-statement), [assignment-statement](#user-content-assignment-statement), [console-statement](#user-content-console-statement), [expression-statement](#user-content-expression-statement), [loop-statement](#user-content-loop-statement), [conditional-statement](#user-content-conditional-statement), [block](#user-content-block)_;
|
||||
Go to: _[expression-statement](#user-content-expression-statement), [return-statement](#user-content-return-statement), [assignment-statement](#user-content-assignment-statement), [block](#user-content-block), [loop-statement](#user-content-loop-statement), [constant-declaration](#user-content-constant-declaration), [conditional-statement](#user-content-conditional-statement), [console-statement](#user-content-console-statement), [variable-declaration](#user-content-variable-declaration)_;
|
||||
|
||||
|
||||
<a name="block"></a>
|
||||
@ -1427,8 +1354,7 @@ expression-statement = expression ";"
|
||||
Go to: _[expression](#user-content-expression)_;
|
||||
|
||||
|
||||
A return statement always takes an expression,
|
||||
and does not end with a semicolon.
|
||||
A return statement always takes an expression, and ends with a semicolon.
|
||||
|
||||
<a name="return-statement"></a>
|
||||
```abnf
|
||||
@ -1438,22 +1364,32 @@ return-statement = %s"return" expression ";"
|
||||
Go to: _[expression](#user-content-expression)_;
|
||||
|
||||
|
||||
There are two kinds of variable definition statements,
|
||||
There are variable declarations and constant declarations,
|
||||
which only differ in the starting keyword.
|
||||
The variables are either a single one or a tuple of two or more;
|
||||
These declarations are also statements.
|
||||
The names of the variables or constants are
|
||||
either a single one or a tuple of two or more;
|
||||
in all cases, there is just one optional type
|
||||
and just one initializing expression.
|
||||
|
||||
<a name="variable-definition-statement"></a>
|
||||
<a name="variable-declaration"></a>
|
||||
```abnf
|
||||
variable-definition-statement = ( %s"let" / %s"const" )
|
||||
identifier-or-identifiers
|
||||
[ ":" type ] "=" expression ";"
|
||||
variable-declaration = %s"let" identifier-or-identifiers [ ":" type ]
|
||||
"=" expression ";"
|
||||
```
|
||||
|
||||
Go to: _[type](#user-content-type), [identifier-or-identifiers](#user-content-identifier-or-identifiers), [expression](#user-content-expression)_;
|
||||
|
||||
|
||||
<a name="constant-declaration"></a>
|
||||
```abnf
|
||||
constant-declaration = %s"const" identifier-or-identifiers [ ":" type ]
|
||||
"=" expression ";"
|
||||
```
|
||||
|
||||
Go to: _[expression](#user-content-expression), [type](#user-content-type), [identifier-or-identifiers](#user-content-identifier-or-identifiers)_;
|
||||
|
||||
|
||||
<a name="identifier-or-identifiers"></a>
|
||||
```abnf
|
||||
identifier-or-identifiers = identifier
|
||||
@ -1474,7 +1410,7 @@ Note that blocks are required in all branches, not merely statements.
|
||||
branch = %s"if" expression block
|
||||
```
|
||||
|
||||
Go to: _[expression](#user-content-expression), [block](#user-content-block)_;
|
||||
Go to: _[block](#user-content-block), [expression](#user-content-expression)_;
|
||||
|
||||
|
||||
<a name="conditional-statement"></a>
|
||||
@ -1484,7 +1420,7 @@ conditional-statement = branch
|
||||
/ branch %s"else" conditional-statement
|
||||
```
|
||||
|
||||
Go to: _[branch](#user-content-branch), [conditional-statement](#user-content-conditional-statement), [block](#user-content-block)_;
|
||||
Go to: _[block](#user-content-block), [branch](#user-content-branch), [conditional-statement](#user-content-conditional-statement)_;
|
||||
|
||||
|
||||
A loop statement implicitly defines a loop variable
|
||||
@ -1500,7 +1436,7 @@ Go to: _[identifier](#user-content-identifier), [expression](#user-content-expre
|
||||
|
||||
|
||||
An assignment statement is straightforward.
|
||||
Based on the operator, the assignment may be simple (i.e. '=')
|
||||
Based on the operator, the assignment may be simple (i.e. `=`)
|
||||
or compound (i.e. combining assignment with an arithmetic operation).
|
||||
|
||||
<a name="assignment-operator"></a>
|
||||
@ -1513,16 +1449,16 @@ assignment-operator = "=" / "+=" / "-=" / "*=" / "/=" / "**="
|
||||
assignment-statement = expression assignment-operator expression ";"
|
||||
```
|
||||
|
||||
Go to: _[assignment-operator](#user-content-assignment-operator), [expression](#user-content-expression)_;
|
||||
Go to: _[expression](#user-content-expression), [assignment-operator](#user-content-assignment-operator)_;
|
||||
|
||||
|
||||
Console statements start with the 'console' keyword,
|
||||
Console statements start with the `console` keyword,
|
||||
followed by a console function call.
|
||||
The call may be an assertion or a print command.
|
||||
The former takes an expression (which must be boolean) as argument.
|
||||
The latter takes either no argument,
|
||||
or a format string followed by expressions,
|
||||
whose number must match the number of containers '{}' in the format string.
|
||||
whose number must match the number of containers `{}` in the format string.
|
||||
Note that the console function names are identifiers, not keywords.
|
||||
There are three kinds of print commands.
|
||||
|
||||
@ -1540,7 +1476,7 @@ console-call = assert-call
|
||||
/ print-call
|
||||
```
|
||||
|
||||
Go to: _[assert-call](#user-content-assert-call), [print-call](#user-content-print-call)_;
|
||||
Go to: _[print-call](#user-content-print-call), [assert-call](#user-content-assert-call)_;
|
||||
|
||||
|
||||
<a name="assert-call"></a>
|
||||
@ -1572,7 +1508,7 @@ print-call = print-function print-arguments
|
||||
Go to: _[print-function](#user-content-print-function), [print-arguments](#user-content-print-arguments)_;
|
||||
|
||||
|
||||
An annotation consists of an annotation name (which starts with '@')
|
||||
An annotation consists of an annotation name (which starts with `@`)
|
||||
with optional annotation arguments, which are identifiers.
|
||||
Note that no parentheses are used if there are no arguments.
|
||||
|
||||
@ -1582,7 +1518,7 @@ annotation = annotation-name
|
||||
[ "(" identifier *( "," identifier ) ")" ]
|
||||
```
|
||||
|
||||
Go to: _[identifier](#user-content-identifier), [annotation-name](#user-content-annotation-name)_;
|
||||
Go to: _[annotation-name](#user-content-annotation-name), [identifier](#user-content-identifier)_;
|
||||
|
||||
|
||||
A function declaration defines a function.
|
||||
@ -1590,8 +1526,7 @@ The output type is optional, defaulting to the empty tuple type.
|
||||
In general, a function input consists of an identifier and a type,
|
||||
with an optional 'const' modifier.
|
||||
Additionally, functions inside circuits
|
||||
may start with a 'mut self' or 'const self' or 'self' parameter.
|
||||
Furthermore, any function may end with an 'input' parameter.
|
||||
may start with a `mut self` or `const self` or `self` parameter.
|
||||
|
||||
<a name="function-declaration"></a>
|
||||
```abnf
|
||||
@ -1600,7 +1535,7 @@ function-declaration = *annotation %s"function" identifier
|
||||
block
|
||||
```
|
||||
|
||||
Go to: _[function-parameters](#user-content-function-parameters), [block](#user-content-block), [type](#user-content-type), [identifier](#user-content-identifier)_;
|
||||
Go to: _[block](#user-content-block), [identifier](#user-content-identifier), [function-parameters](#user-content-function-parameters), [type](#user-content-type)_;
|
||||
|
||||
|
||||
<a name="function-parameters"></a>
|
||||
@ -1631,14 +1566,9 @@ Go to: _[function-input](#user-content-function-input)_;
|
||||
function-input = [ %s"const" ] identifier ":" type
|
||||
```
|
||||
|
||||
Go to: _[identifier](#user-content-identifier), [type](#user-content-type)_;
|
||||
Go to: _[type](#user-content-type), [identifier](#user-content-identifier)_;
|
||||
|
||||
|
||||
<a name="input-parameter"></a>
|
||||
```abnf
|
||||
input-parameter = %s"input"
|
||||
```
|
||||
|
||||
A circuit member variable declaration consists of an identifier and a type.
|
||||
A circuit member function declaration consists of a function declaration.
|
||||
|
||||
@ -1648,7 +1578,7 @@ member-declaration = member-variable-declaration
|
||||
/ member-function-declaration
|
||||
```
|
||||
|
||||
Go to: _[member-variable-declaration](#user-content-member-variable-declaration), [member-function-declaration](#user-content-member-function-declaration)_;
|
||||
Go to: _[member-function-declaration](#user-content-member-function-declaration), [member-variable-declaration](#user-content-member-variable-declaration)_;
|
||||
|
||||
|
||||
<a name="member-variable-declaration"></a>
|
||||
@ -1676,10 +1606,10 @@ circuit-declaration = *annotation %s"circuit" identifier
|
||||
"{" member-declaration *( "," member-declaration ) "}"
|
||||
```
|
||||
|
||||
Go to: _[member-declaration](#user-content-member-declaration), [identifier](#user-content-identifier)_;
|
||||
Go to: _[identifier](#user-content-identifier), [member-declaration](#user-content-member-declaration)_;
|
||||
|
||||
|
||||
An import declaration consists of the 'import' keyword
|
||||
An import declaration consists of the `import` keyword
|
||||
followed by a package path, which may be one of the following:
|
||||
a single wildcard;
|
||||
an identifier, optionally followed by a local renamer;
|
||||
@ -1691,7 +1621,7 @@ to be followed by a comma, for convenience.
|
||||
|
||||
<a name="import-declaration"></a>
|
||||
```abnf
|
||||
import-declaration = %s"import" package-path
|
||||
import-declaration = %s"import" package-path ";"
|
||||
```
|
||||
|
||||
Go to: _[package-path](#user-content-package-path)_;
|
||||
@ -1705,22 +1635,50 @@ package-path = "*"
|
||||
/ "(" package-path *( "," package-path ) [","] ")"
|
||||
```
|
||||
|
||||
Go to: _[package-path](#user-content-package-path), [identifier](#user-content-identifier), [package-name](#user-content-package-name)_;
|
||||
Go to: _[identifier](#user-content-identifier), [package-path](#user-content-package-path), [package-name](#user-content-package-name)_;
|
||||
|
||||
|
||||
Finally, we define a file as a sequence of zero or more declarations.
|
||||
We allow constant declarations at the top level, for global constants.
|
||||
Currently variable declarations are disallowed at the top level.
|
||||
|
||||
<a name="declaration"></a>
|
||||
```abnf
|
||||
declaration = import-declaration
|
||||
/ function-declaration
|
||||
/ circuit-declaration
|
||||
/ constant-declaration
|
||||
```
|
||||
|
||||
Go to: _[import-declaration](#user-content-import-declaration), [circuit-declaration](#user-content-circuit-declaration), [function-declaration](#user-content-function-declaration)_;
|
||||
Go to: _[constant-declaration](#user-content-constant-declaration), [import-declaration](#user-content-import-declaration), [function-declaration](#user-content-function-declaration), [circuit-declaration](#user-content-circuit-declaration)_;
|
||||
|
||||
|
||||
<a name="file"></a>
|
||||
```abnf
|
||||
file = *declaration
|
||||
```
|
||||
|
||||
|
||||
--------
|
||||
|
||||
|
||||
Format Note
|
||||
-----------
|
||||
|
||||
The ABNF standard requires grammars
|
||||
to consist of lines terminated by `<CR><LF>`
|
||||
(i.e. carriage return followed by line feed, DOS/Windows-style),
|
||||
as explained in the background on ABNF earlier in this file.
|
||||
This file's lines are therefore terminated by `<CR><LF>`.
|
||||
To avoid losing this requirement across systems,
|
||||
this file is marked as `text eol=crlf` in `.gitattributes`:
|
||||
this means that the file is textual, enabling visual diffs,
|
||||
but its lines will always be terminated by `<CR><LF>` on any system.
|
||||
|
||||
Note that this `<CR><LF>` requirement only applies
|
||||
to the grammar files themselves.
|
||||
It does not apply to the lines of the languages described by the grammar.
|
||||
ABNF grammars may describe any kind of languages,
|
||||
with any kind of line terminators,
|
||||
or even without line terminators at all (e.g. for "binary" languages).
|
||||
|
||||
|
@ -1,26 +1,18 @@
|
||||
; Leo Library
|
||||
;
|
||||
; Copyright (C) 2021 Aleo Systems Inc.
|
||||
; Copyright (C) 2019-2021 Aleo Systems Inc.
|
||||
; This file is part of the Leo library.
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
; The Leo library is free software: you can redistribute it and/or modify
|
||||
; it under the terms of the GNU General Public License as published by
|
||||
; the Free Software Foundation, either version 3 of the License, or
|
||||
; (at your option) any later version.
|
||||
|
||||
; Format Note
|
||||
; -----------
|
||||
; The Leo library is distributed in the hope that it will be useful,
|
||||
; but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
; GNU General Public License for more details.
|
||||
|
||||
; The ABNF standard requires grammars to consist of lines terminated by CR LF
|
||||
; (i.e. carriage return followed by line feed, DOS/Windows-style),
|
||||
; as explained in the background on ABNF later in this file.
|
||||
; This file's lines are therefore terminated by CR LF.
|
||||
; To avoid losing this requirement across systems,
|
||||
; this file is marked as 'text eol=crlf' in .gitattributes:
|
||||
; this means that the file is textual, enabling visual diffs,
|
||||
; but its lines will always be terminated by CR LF on any system.
|
||||
|
||||
; Note that this CR LF requirement only applies to the grammar files themselves.
|
||||
; It does not apply to the lines of the languages described by the grammar.
|
||||
; ABNF grammars may describe any kind of languages,
|
||||
; with any kind of line terminators,
|
||||
; or even without line terminators at all (e.g. for "binary" languages).
|
||||
; You should have received a copy of the GNU General Public License
|
||||
; along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
@ -61,59 +53,59 @@
|
||||
|
||||
; Instead of BNF's angle-bracket notation for nonterminals,
|
||||
; ABNF uses case-insensitive names consisting of letters, digits, and dashes,
|
||||
; e.g. HTTP-message and IPv6address.
|
||||
; e.g. `HTTP-message` and `IPv6address`.
|
||||
; ABNF includes an angle-bracket notation for prose descriptions,
|
||||
; e.g. <host, see [RFC3986], Section 3.2.2>,
|
||||
; e.g. `<host, see [RFC3986], Section 3.2.2>`,
|
||||
; usable as last resort in the definiens of a nonterminal.
|
||||
|
||||
; While BNF allows arbitrary terminals,
|
||||
; ABNF uses only natural numbers as terminals,
|
||||
; and denotes them via:
|
||||
; (i) binary, decimal, or hexadecimal sequences,
|
||||
; e.g. %b1.11.1010, %d1.3.10, and %x.1.3.A
|
||||
; all denote the sequence of terminals '1 3 10';
|
||||
; e.g. `%b1.11.1010`, `%d1.3.10`, and `%x.1.3.A`
|
||||
; all denote the sequence of terminals [1, 3, 10];
|
||||
; (ii) binary, decimal, or hexadecimal ranges,
|
||||
; e.g. %x30-39 denotes any singleton sequence of terminals
|
||||
; 'n' with 48 <= n <= 57 (an ASCII digit);
|
||||
; e.g. `%x30-39` denotes any singleton sequence of terminals
|
||||
; [_n_] with 48 <= _n_ <= 57 (an ASCII digit);
|
||||
; (iii) case-sensitive ASCII strings,
|
||||
; e.g. %s"Ab" denotes the sequence of terminals '65 98';
|
||||
; e.g. `%s"Ab"` denotes the sequence of terminals [65, 98];
|
||||
; and (iv) case-insensitive ASCII strings,
|
||||
; e.g. %i"ab", or just "ab", denotes
|
||||
; e.g. `%i"ab"`, or just `"ab"`, denotes
|
||||
; any sequence of terminals among
|
||||
; '65 66',
|
||||
; '65 98',
|
||||
; '97 66', and
|
||||
; '97 98'.
|
||||
; [65, 66],
|
||||
; [65, 98],
|
||||
; [97, 66], and
|
||||
; [97, 98].
|
||||
; ABNF terminals in suitable sets represent ASCII or Unicode characters.
|
||||
|
||||
; ABNF allows repetition prefixes n*m,
|
||||
; where n and m are natural numbers in decimal notation;
|
||||
; ABNF allows repetition prefixes `n*m`,
|
||||
; where `n` and `m` are natural numbers in decimal notation;
|
||||
; if absent,
|
||||
; n defaults to 0, and
|
||||
; m defaults to infinity.
|
||||
; `n` defaults to 0, and
|
||||
; `m` defaults to infinity.
|
||||
; For example,
|
||||
; 1*4HEXDIG denotes one to four HEXDIGs,
|
||||
; *3DIGIT denotes up to three DIGITs, and
|
||||
; 1*OCTET denotes one or more OCTETs.
|
||||
; A single n prefix
|
||||
; abbreviates n*n,
|
||||
; e.g. 3DIGIT denotes three DIGITs.
|
||||
; `1*4HEXDIG` denotes one to four `HEXDIG`s,
|
||||
; `*3DIGIT` denotes up to three `DIGIT`s, and
|
||||
; `1*OCTET` denotes one or more `OCTET`s.
|
||||
; A single `n` prefix
|
||||
; abbreviates `n*n`,
|
||||
; e.g. `3DIGIT` denotes three `DIGIT`s.
|
||||
|
||||
; Instead of BNF's |, ABNF uses / to separate alternatives.
|
||||
; Instead of BNF's `|`, ABNF uses `/` to separate alternatives.
|
||||
; Repetition prefixes have precedence over juxtapositions,
|
||||
; which have precedence over /.
|
||||
; which have precedence over `/`.
|
||||
; Round brackets group things and override the aforementioned precedence rules,
|
||||
; e.g. *(WSP / CRLF WSP) denotes sequences of terminals
|
||||
; e.g. `*(WSP / CRLF WSP)` denotes sequences of terminals
|
||||
; obtained by repeating, zero or more times,
|
||||
; either (i) a WSP or (ii) a CRLF followed by a WSP.
|
||||
; either (i) a `WSP` or (ii) a `CRLF` followed by a `WSP`.
|
||||
; Square brackets also group things but make them optional,
|
||||
; e.g. [":" port] is equivalent to 0*1(":" port).
|
||||
; e.g. `[":" port]` is equivalent to `0*1(":" port)`.
|
||||
|
||||
; Instead of BNF's ::=, ABNF uses = to define nonterminals,
|
||||
; and =/ to incrementally add alternatives
|
||||
; Instead of BNF's `::=`, ABNF uses `=` to define nonterminals,
|
||||
; and `=/` to incrementally add alternatives
|
||||
; to previously defined nonterminals.
|
||||
; For example, the rule BIT = "0" / "1"
|
||||
; is equivalent to BIT = "0" followed by BIT =/ "1".
|
||||
; For example, the rule `BIT = "0" / "1"`
|
||||
; is equivalent to `BIT = "0"` followed by `BIT =/ "1"`.
|
||||
|
||||
; The syntax of ABNF itself is formally specified in ABNF
|
||||
; (in Section 4 of the aforementioned RFC 5234,
|
||||
@ -133,7 +125,7 @@
|
||||
; This ABNF grammar consists of two (sub-)grammars:
|
||||
; (i) a lexical grammar that describes how
|
||||
; sequence of characters are parsed into tokens, and
|
||||
; (ii) a syntactic grammar that described how
|
||||
; (ii) a syntactic grammar that describes how
|
||||
; tokens are parsed into expressions, statements, etc.
|
||||
; The adjectives 'lexical' and 'syntactic' are
|
||||
; the same ones used in the Java language reference,
|
||||
@ -188,8 +180,8 @@
|
||||
; / additive-expression "-" multiplicative-expression
|
||||
;
|
||||
; These rules tell us
|
||||
; that the additive operators '+' and '-' have lower precedence
|
||||
; than the multiplicative operators '*' and '/',
|
||||
; that the additive operators `+` and `-` have lower precedence
|
||||
; than the multiplicative operators `*` and `/`,
|
||||
; and that both the additive and multiplicative operators associate to the left.
|
||||
; This may be best understood via the examples given below.
|
||||
|
||||
@ -247,7 +239,7 @@
|
||||
; This ABNF grammar uses nonterminal names
|
||||
; that consist of complete English words, separated by dashes,
|
||||
; and that describe the construct the way it is in English.
|
||||
; For instance, we use the name 'conditional-statement'
|
||||
; For instance, we use the name `conditional-statement`
|
||||
; to describe conditional statements.
|
||||
|
||||
; At the same time, this grammar establishes
|
||||
@ -292,8 +284,8 @@
|
||||
|
||||
; A Leo file is a finite sequence of Unicode characters,
|
||||
; represented as Unicode code points,
|
||||
; which are numbers in the range form 0 to 10FFFFh.
|
||||
; These are captured by the ABNF rule 'character' below.
|
||||
; which are numbers in the range from 0 to 10FFFFh.
|
||||
; These are captured by the ABNF rule `character` below.
|
||||
|
||||
; The lexical grammar defines how, at least conceptually,
|
||||
; the sequence of characters is turned into
|
||||
@ -301,20 +293,20 @@
|
||||
; these entities are all defined by the grammar rules below.
|
||||
|
||||
; As stated, the lexical grammar alone is ambiguous.
|
||||
; For example, the sequence of characters '**' (i.e. two stars)
|
||||
; could be equally parsed as two '*' symbol tokens or one '**' symbol token
|
||||
; (see rule for 'symbol' below).
|
||||
; As another example, the sequence or characters '<CR><LF>'
|
||||
; For example, the sequence of characters `**` (i.e. two stars)
|
||||
; could be equally parsed as two `*` symbol tokens or one `**` symbol token
|
||||
; (see rule for `symbol` below).
|
||||
; As another example, the sequence or characters `<CR><LF>`
|
||||
; (i.e. carriage return followed by line feed)
|
||||
; could be equally parsed as two line terminators or one
|
||||
; (see rule for 'newline').
|
||||
; (see rule for `newline`).
|
||||
|
||||
; Thus, as often done in language syntax definitions,
|
||||
; the lexical grammar is disambiguated by
|
||||
; the extra-grammatical requirement that
|
||||
; the longest possible sequence of characters is always parsed.
|
||||
; This way, '**' must be parsed as one '**' symbol token,
|
||||
; and '<CR><LF>' must be parsed as one line terminator.
|
||||
; This way, `**` must be parsed as one `**` symbol token,
|
||||
; and `<CR><LF>` must be parsed as one line terminator.
|
||||
|
||||
; As mentioned above, a character is any Unicode code point.
|
||||
; This grammar does not say how those are encoded in files (e.g. UTF-8):
|
||||
@ -326,15 +318,15 @@ character = %x0-10FFFF ; any Unicode code point
|
||||
|
||||
; We give names to certain ASCII characters.
|
||||
|
||||
horizontal-tab = %x9
|
||||
horizontal-tab = %x9 ; <HT>
|
||||
|
||||
line-feed = %xA
|
||||
line-feed = %xA ; <LF>
|
||||
|
||||
carriage-return = %xD
|
||||
carriage-return = %xD ; <CR>
|
||||
|
||||
space = %x20
|
||||
space = %x20 ; <SP>
|
||||
|
||||
double-quote = %x22
|
||||
double-quote = %x22 ; "
|
||||
|
||||
; We give names to complements of certain ASCII characters.
|
||||
; These consist of all the Unicode characters except for one or two.
|
||||
@ -344,9 +336,16 @@ not-double-quote = %x0-22 / %x24-10FFFF ; anything but "
|
||||
not-star = %x0-29 / %x2B-10FFFF ; anything but *
|
||||
|
||||
not-line-feed-or-carriage-return = %x0-9 / %xB-C / %xE-10FFFF
|
||||
; anything but LF or CR
|
||||
; anything but <LF> or <CR>
|
||||
|
||||
not-star-or-slash = %x0-29 / %x2B-2E / %x30-10FFFF ; anything but * or /
|
||||
not-double-quote-or-open-brace = %x0-22 / %x24-7A / %x7C-10FFFF
|
||||
; anything but " or {
|
||||
|
||||
not-double-quote-or-close-brace = %x0-22 / %x24-7C / %x7E-10FFFF
|
||||
; anything but " or }
|
||||
|
||||
not-star-or-slash = %x0-29 / %x2B-2E / %x30-10FFFF
|
||||
; anything but * or /
|
||||
|
||||
; Lines in Leo may be terminated via
|
||||
; a single carriage return,
|
||||
@ -363,12 +362,12 @@ newline = line-feed / carriage-return / carriage-return line-feed
|
||||
whitespace = space / horizontal-tab / newline
|
||||
|
||||
; There are two kinds of comments in Leo, as in other languages.
|
||||
; One is block comments of the form '/* ... */',
|
||||
; and the other is end-of-line comments of the form '// ...'.
|
||||
; The first kind start at '/*' and end at the first '*/',
|
||||
; One is block comments of the form `/* ... */`,
|
||||
; and the other is end-of-line comments of the form `// ...`.
|
||||
; The first kind start at `/*` and end at the first `*/`,
|
||||
; possibly spanning multiple (partial) lines;
|
||||
; these do no nest.
|
||||
; The second kind start at '//' and extend till the end of the line.
|
||||
; The second kind start at `//` and extend till the end of the line.
|
||||
; The rules about comments given below are similar to
|
||||
; the ones used in the Java language reference.
|
||||
|
||||
@ -436,9 +435,12 @@ letter = uppercase-letter / lowercase-letter
|
||||
|
||||
; An identifier is a non-empty sequence of letters, digits, and underscores,
|
||||
; starting with a letter.
|
||||
; It must not be a keyword: this is an extra-grammatical constraint.
|
||||
; It must not be a keyword: this is an extra-grammatical requirement.
|
||||
; It must also not be or start with `aleo1`,
|
||||
; because that is used for address literals:
|
||||
; this is another extra-grammatical requirement.
|
||||
|
||||
identifier = letter *( letter / digit / "_" ) ; but not a keyword
|
||||
identifier = letter *( letter / digit / "_" ) ; but not a keyword or aleo1...
|
||||
|
||||
; A package name consists of one or more segments separated by single dashes,
|
||||
; where each segment is a non-empty sequence of lowercase letters and digits.
|
||||
@ -448,57 +450,30 @@ package-name = 1*( lowercase-letter / digit )
|
||||
|
||||
; A format string is a sequence of characters, other than double quote,
|
||||
; surrounded by double quotes.
|
||||
; Within a format string, sub-strings '{}' are distinguished as containers
|
||||
; Within a format string, sub-strings `{}` are distinguished as containers
|
||||
; (these are the ones that may be matched with values
|
||||
; whose textual representation replaces the containers
|
||||
; in the printed string).
|
||||
; There is an implicit extra-grammatical requirements that
|
||||
; the explicit 'format-string-container' instances include
|
||||
; all the occurrences of '{}' in the parsed character sequence:
|
||||
; that is, there may not be two contiguous 'not-double-quote' instances
|
||||
; that are '{' and '}'.
|
||||
|
||||
format-string-container = "{}"
|
||||
|
||||
not-double-quote-or-open-brace = %x0-22 / %x24-7A / %x7C-10FFFF
|
||||
|
||||
not-double-quote-or-close-brace = %x0-22 / %x24-7C / %x7E-10FFFF
|
||||
|
||||
format-string-element = not-double-quote-or-open-brace
|
||||
/ "{" not-double-quote-or-close-brace
|
||||
/ format-string-container
|
||||
|
||||
format-string = double-quote *format-string-element double-quote
|
||||
|
||||
; Here is (part of this ABNF comment),
|
||||
; an alternative way to specify format strings,
|
||||
; which captures the extra-grammatical requirement above in the grammar,
|
||||
; but is more complicated:
|
||||
;
|
||||
; not-double-quote-or-open-brace = %x0-22 / %x24-7A / %x7C-10FFFF
|
||||
;
|
||||
; not-double-quote-or-close-brace = %x0-22 / %x24-7C / %x7E-10FFFF
|
||||
;
|
||||
; format-string-element = not-double-quote-or-open-brace
|
||||
; / "{" not-double-quote-or-close-brace
|
||||
; / format-string-container
|
||||
;
|
||||
; format-string = double-quote *format-string-element double-quote
|
||||
;
|
||||
; It is not immediately clear which approach is better; there are tradeoffs.
|
||||
; We may choose to adopt this one in future revisions of the grammar.
|
||||
|
||||
; Annotations have names, which are identifiers immediately preceded by '@'.
|
||||
; Annotations have names, which are identifiers immediately preceded by `@`.
|
||||
|
||||
annotation-name = "@" identifier
|
||||
|
||||
; A natural (number) is a sequence of one or more digits.
|
||||
; We allow leading zeros, e.g. '007'.
|
||||
; We allow leading zeros, e.g. `007`.
|
||||
|
||||
natural = 1*digit
|
||||
|
||||
; An integer (number) is either a natural or its negation.
|
||||
; We allow leading zeros also in negative numbers, e.g. '-007'.
|
||||
; We allow leading zeros also in negative numbers, e.g. `-007`.
|
||||
|
||||
integer = [ "-" ] natural
|
||||
|
||||
@ -530,7 +505,7 @@ product-group-literal = integer %s"group"
|
||||
|
||||
boolean-literal = %s"true" / %s"false"
|
||||
|
||||
; An address literal starts with 'aleo1'
|
||||
; An address literal starts with `aleo1`
|
||||
; and continues with exactly 58 lowercase letters and digits.
|
||||
; Thus an address always consists of 63 characters.
|
||||
|
||||
@ -549,34 +524,18 @@ atomic-literal = untyped-literal
|
||||
/ address-literal
|
||||
|
||||
; After defining the (mostly) alphanumeric tokens above,
|
||||
; it remains to define tokens for non-alphanumeric symbols such as "+" and "(".
|
||||
; it remains to define tokens for non-alphanumeric symbols such as `+` and `(`.
|
||||
; Different programming languages used different terminologies for these,
|
||||
; e.g. operators, separators, punctuators, etc.
|
||||
; Here we use 'symbol', for all of them.
|
||||
; Here we use `symbol`, for all of them.
|
||||
; We also include a token consisting of
|
||||
; a closing parenthesis immediately followed by 'group':
|
||||
; a closing parenthesis `)` immediately followed by `group`:
|
||||
; as defined in the syntactic grammar,
|
||||
; this is the final part of an affine group literal;
|
||||
; even though it includes letters,
|
||||
; it seems appropriate to still consider it a symbol,
|
||||
; particularly since it starts with a proper symbol.
|
||||
|
||||
; We could give names to all of these symbols,
|
||||
; via rules such as
|
||||
;
|
||||
; equality-operator = "=="
|
||||
;
|
||||
; and defining 'symbol' in terms of those
|
||||
;
|
||||
; symbol = ... / equality-operator / ...
|
||||
;
|
||||
; This may or may not make the grammar more readable,
|
||||
; but it would help establish a terminology in the grammar,
|
||||
; namely the exact names of some of these token.
|
||||
; On the other hand, at least some of them are perhaps simple enough
|
||||
; that they could be just described in terms of their symbols,
|
||||
; e.g. 'double dot', 'question mark', etc.
|
||||
|
||||
symbol = "!" / "&&" / "||"
|
||||
/ "==" / "!="
|
||||
/ "<" / "<=" / ">" / ">="
|
||||
@ -640,7 +599,7 @@ address-type = %s"address"
|
||||
|
||||
scalar-type = boolean-type / arithmetic-type / address-type
|
||||
|
||||
; Circuit types are denoted by identifiers and the keyword 'Self'.
|
||||
; Circuit types are denoted by identifiers and the keyword `Self`.
|
||||
; The latter is only allowed inside a circuit definition,
|
||||
; to denote the circuit being defined.
|
||||
|
||||
@ -691,7 +650,7 @@ affine-group-literal = "(" group-coordinate "," group-coordinate %s")group"
|
||||
literal = atomic-literal / affine-group-literal
|
||||
|
||||
; The following rule is not directly referenced in the rules for expressions
|
||||
; (which reference 'literal' instead),
|
||||
; (which reference `literal` instead),
|
||||
; but it is useful to establish terminology:
|
||||
; a group literal is either a product group literal or an affine group literal.
|
||||
|
||||
@ -704,7 +663,7 @@ group-literal = product-group-literal / affine-group-literal
|
||||
; and the (left or right) associativity of binary operators.
|
||||
|
||||
; The primary expressions are self-contained in a way,
|
||||
; i.e. they have clear deliminations:
|
||||
; i.e. they have clear delimitations:
|
||||
; Some consist of single tokens,
|
||||
; while others have explicit endings.
|
||||
; Primary expressions also include parenthesized expressions,
|
||||
@ -730,7 +689,7 @@ tuple-expression = tuple-construction
|
||||
; Array expressions construct arrays.
|
||||
; There are two kinds:
|
||||
; one lists the element expressions (at least one),
|
||||
; including spreads (via '...') which are arrays being spliced in;
|
||||
; including spreads (via `...`) which are arrays being spliced in;
|
||||
; the other repeats (the value of) a single expression
|
||||
; across one or more dimensions.
|
||||
|
||||
@ -757,7 +716,8 @@ array-expression = array-construction
|
||||
; so they are syntactically identical but semantically different.
|
||||
|
||||
circuit-construction = circuit-type "{"
|
||||
circuit-inline-element *( "," circuit-inline-element ) [ "," ]
|
||||
circuit-inline-element
|
||||
*( "," circuit-inline-element ) [ "," ]
|
||||
"}"
|
||||
|
||||
circuit-inline-element = identifier ":" expression / identifier
|
||||
@ -805,8 +765,8 @@ unary-expression = postfix-expression
|
||||
|
||||
; Next in the operator precedence is exponentiation,
|
||||
; following mathematical practice.
|
||||
; The current rule below makes exponentiation left-associative,
|
||||
; i.e. 'a ** b ** c' must be parsed as '(a ** b) ** c'.
|
||||
; The current rule below makes exponentiation right-associative,
|
||||
; i.e. `a ** b ** c` must be parsed as `a ** (b ** c)`.
|
||||
|
||||
exponential-expression = unary-expression
|
||||
/ unary-expression "**" exponential-expression
|
||||
@ -869,7 +829,8 @@ expression = conditional-expression
|
||||
|
||||
statement = expression-statement
|
||||
/ return-statement
|
||||
/ variable-definition-statement
|
||||
/ variable-declaration
|
||||
/ constant-declaration
|
||||
/ conditional-statement
|
||||
/ loop-statement
|
||||
/ assignment-statement
|
||||
@ -883,20 +844,23 @@ block = "{" *statement "}"
|
||||
|
||||
expression-statement = expression ";"
|
||||
|
||||
; A return statement always takes an expression,
|
||||
; and does not end with a semicolon.
|
||||
; A return statement always takes an expression, and ends with a semicolon.
|
||||
|
||||
return-statement = %s"return" expression ";"
|
||||
|
||||
; There are two kinds of variable definition statements,
|
||||
; There are variable declarations and constant declarations,
|
||||
; which only differ in the starting keyword.
|
||||
; The variables are either a single one or a tuple of two or more;
|
||||
; These declarations are also statements.
|
||||
; The names of the variables or constants are
|
||||
; either a single one or a tuple of two or more;
|
||||
; in all cases, there is just one optional type
|
||||
; and just one initializing expression.
|
||||
|
||||
variable-definition-statement = ( %s"let" / %s"const" )
|
||||
identifier-or-identifiers
|
||||
[ ":" type ] "=" expression ";"
|
||||
variable-declaration = %s"let" identifier-or-identifiers [ ":" type ]
|
||||
"=" expression ";"
|
||||
|
||||
constant-declaration = %s"const" identifier-or-identifiers [ ":" type ]
|
||||
"=" expression ";"
|
||||
|
||||
identifier-or-identifiers = identifier
|
||||
/ "(" identifier 1*( "," identifier ) ")"
|
||||
@ -920,20 +884,20 @@ conditional-statement = branch
|
||||
loop-statement = %s"for" identifier %s"in" expression ".." expression block
|
||||
|
||||
; An assignment statement is straightforward.
|
||||
; Based on the operator, the assignment may be simple (i.e. '=')
|
||||
; Based on the operator, the assignment may be simple (i.e. `=`)
|
||||
; or compound (i.e. combining assignment with an arithmetic operation).
|
||||
|
||||
assignment-operator = "=" / "+=" / "-=" / "*=" / "/=" / "**="
|
||||
|
||||
assignment-statement = expression assignment-operator expression ";"
|
||||
|
||||
; Console statements start with the 'console' keyword,
|
||||
; Console statements start with the `console` keyword,
|
||||
; followed by a console function call.
|
||||
; The call may be an assertion or a print command.
|
||||
; The former takes an expression (which must be boolean) as argument.
|
||||
; The latter takes either no argument,
|
||||
; or a format string followed by expressions,
|
||||
; whose number must match the number of containers '{}' in the format string.
|
||||
; whose number must match the number of containers `{}` in the format string.
|
||||
; Note that the console function names are identifiers, not keywords.
|
||||
; There are three kinds of print commands.
|
||||
|
||||
@ -950,7 +914,7 @@ print-arguments = "(" [ format-string *( "," expression ) ] ")"
|
||||
|
||||
print-call = print-function print-arguments
|
||||
|
||||
; An annotation consists of an annotation name (which starts with '@')
|
||||
; An annotation consists of an annotation name (which starts with `@`)
|
||||
; with optional annotation arguments, which are identifiers.
|
||||
; Note that no parentheses are used if there are no arguments.
|
||||
|
||||
@ -962,8 +926,7 @@ annotation = annotation-name
|
||||
; In general, a function input consists of an identifier and a type,
|
||||
; with an optional 'const' modifier.
|
||||
; Additionally, functions inside circuits
|
||||
; may start with a 'mut self' or 'const self' or 'self' parameter.
|
||||
; Furthermore, any function may end with an 'input' parameter.
|
||||
; may start with a `mut self` or `const self` or `self` parameter.
|
||||
|
||||
function-declaration = *annotation %s"function" identifier
|
||||
"(" [ function-parameters ] ")" [ "->" type ]
|
||||
@ -979,8 +942,6 @@ function-inputs = function-input *( "," function-input )
|
||||
|
||||
function-input = [ %s"const" ] identifier ":" type
|
||||
|
||||
input-parameter = %s"input"
|
||||
|
||||
; A circuit member variable declaration consists of an identifier and a type.
|
||||
; A circuit member function declaration consists of a function declaration.
|
||||
|
||||
@ -997,7 +958,7 @@ member-function-declaration = function-declaration
|
||||
circuit-declaration = *annotation %s"circuit" identifier
|
||||
"{" member-declaration *( "," member-declaration ) "}"
|
||||
|
||||
; An import declaration consists of the 'import' keyword
|
||||
; An import declaration consists of the `import` keyword
|
||||
; followed by a package path, which may be one of the following:
|
||||
; a single wildcard;
|
||||
; an identifier, optionally followed by a local renamer;
|
||||
@ -1007,7 +968,7 @@ circuit-declaration = *annotation %s"circuit" identifier
|
||||
; Note that we allow the last element of the parenthesized list
|
||||
; to be followed by a comma, for convenience.
|
||||
|
||||
import-declaration = %s"import" package-path
|
||||
import-declaration = %s"import" package-path ";"
|
||||
|
||||
package-path = "*"
|
||||
/ identifier [ %s"as" identifier ]
|
||||
@ -1015,9 +976,34 @@ package-path = "*"
|
||||
/ "(" package-path *( "," package-path ) [","] ")"
|
||||
|
||||
; Finally, we define a file as a sequence of zero or more declarations.
|
||||
; We allow constant declarations at the top level, for global constants.
|
||||
; Currently variable declarations are disallowed at the top level.
|
||||
|
||||
declaration = import-declaration
|
||||
/ function-declaration
|
||||
/ circuit-declaration
|
||||
/ constant-declaration
|
||||
|
||||
file = *declaration
|
||||
|
||||
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
|
||||
; Format Note
|
||||
; -----------
|
||||
|
||||
; The ABNF standard requires grammars
|
||||
; to consist of lines terminated by `<CR><LF>`
|
||||
; (i.e. carriage return followed by line feed, DOS/Windows-style),
|
||||
; as explained in the background on ABNF earlier in this file.
|
||||
; This file's lines are therefore terminated by `<CR><LF>`.
|
||||
; To avoid losing this requirement across systems,
|
||||
; this file is marked as `text eol=crlf` in `.gitattributes`:
|
||||
; this means that the file is textual, enabling visual diffs,
|
||||
; but its lines will always be terminated by `<CR><LF>` on any system.
|
||||
|
||||
; Note that this `<CR><LF>` requirement only applies
|
||||
; to the grammar files themselves.
|
||||
; It does not apply to the lines of the languages described by the grammar.
|
||||
; ABNF grammars may describe any kind of languages,
|
||||
; with any kind of line terminators,
|
||||
; or even without line terminators at all (e.g. for "binary" languages).
|
||||
|
@ -18,7 +18,7 @@ use crate::errors::ImportParserError;
|
||||
use leo_asg::{AsgContext, AsgConvertError, ImportResolver, Program, Span};
|
||||
|
||||
use indexmap::{IndexMap, IndexSet};
|
||||
use std::env::current_dir;
|
||||
use std::path::PathBuf;
|
||||
|
||||
/// Stores imported packages.
|
||||
///
|
||||
@ -26,11 +26,21 @@ use std::env::current_dir;
|
||||
/// directory, foreign in the imports directory, or part of the core package list.
|
||||
#[derive(Clone, Default)]
|
||||
pub struct ImportParser<'a> {
|
||||
program_path: PathBuf,
|
||||
partial_imports: IndexSet<String>,
|
||||
imports: IndexMap<String, Program<'a>>,
|
||||
}
|
||||
|
||||
//todo: handle relative imports relative to file...
|
||||
impl<'a> ImportParser<'a> {
|
||||
pub fn new(program_path: PathBuf) -> Self {
|
||||
ImportParser {
|
||||
program_path,
|
||||
partial_imports: Default::default(),
|
||||
imports: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> ImportResolver<'a> for ImportParser<'a> {
|
||||
fn resolve_package(
|
||||
&mut self,
|
||||
@ -46,8 +56,7 @@ impl<'a> ImportResolver<'a> for ImportParser<'a> {
|
||||
return Ok(Some(program.clone()));
|
||||
}
|
||||
let mut imports = Self::default();
|
||||
let path =
|
||||
current_dir().map_err(|x| -> AsgConvertError { ImportParserError::current_directory_error(x).into() })?;
|
||||
let path = self.program_path.clone();
|
||||
|
||||
self.partial_imports.insert(full_path.clone());
|
||||
let program = imports
|
||||
|
128
leo/api.rs
128
leo/api.rs
@ -16,33 +16,49 @@
|
||||
|
||||
use anyhow::{anyhow, Error, Result};
|
||||
use reqwest::{
|
||||
blocking::{Client, Response},
|
||||
blocking::{multipart::Form, Client, Response},
|
||||
Method,
|
||||
StatusCode,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{collections::HashMap, path::PathBuf};
|
||||
|
||||
/// Trait describes API Routes and Request bodies, struct which implements
|
||||
/// Route MUST also support Serialize to be usable in Api::run_route(r: Route)
|
||||
/// Format to use.
|
||||
/// Default is JSON, but publish route uses FormData
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum ContentType {
|
||||
Json,
|
||||
FormData,
|
||||
}
|
||||
|
||||
/// API Routes and Request bodies.
|
||||
/// Structs that implement Route MUST also support Serialize to be usable in Api::run_route(r: Route)
|
||||
pub trait Route {
|
||||
/// Whether to use bearer auth or not. Some routes may have additional
|
||||
/// features for logged-in users, so authorization token should be sent
|
||||
/// if it is created of course
|
||||
/// [`true`] if a route supports bearer authentication.
|
||||
/// For example, the login route.
|
||||
const AUTH: bool;
|
||||
|
||||
/// HTTP method to use when requesting
|
||||
/// The HTTP method to use when requesting.
|
||||
const METHOD: Method;
|
||||
|
||||
/// URL path without first forward slash (e.g. v1/package/fetch)
|
||||
/// The URL path without the first forward slash (e.g. v1/package/fetch)
|
||||
const PATH: &'static str;
|
||||
|
||||
/// Output type for this route. For login it is simple - String
|
||||
/// Content type: JSON or Multipart/FormData. Only usable in POST/PUT queries.
|
||||
const CONTENT_TYPE: ContentType;
|
||||
|
||||
/// The output type for this route. For example, the login route output is [`String`].
|
||||
/// But for other routes may be more complex.
|
||||
type Output;
|
||||
|
||||
/// Process reqwest Response and turn it into Output
|
||||
/// Process the reqwest Response and turn it into an Output.
|
||||
fn process(&self, res: Response) -> Result<Self::Output>;
|
||||
|
||||
/// Represent self as a form data for multipart (ContentType::FormData) requests.
|
||||
fn to_form(&self) -> Option<Form> {
|
||||
None
|
||||
}
|
||||
|
||||
/// Transform specific status codes into correct errors for this route.
|
||||
/// For example 404 on package fetch should mean that 'Package is not found'
|
||||
fn status_to_err(&self, _status: StatusCode) -> Error {
|
||||
@ -50,18 +66,18 @@ pub trait Route {
|
||||
}
|
||||
}
|
||||
|
||||
/// REST API handler with reqwest::blocking inside
|
||||
/// REST API handler with reqwest::blocking inside.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Api {
|
||||
host: String,
|
||||
client: Client,
|
||||
/// Authorization token for API requests
|
||||
/// Authorization token for API requests.
|
||||
auth_token: Option<String>,
|
||||
}
|
||||
|
||||
impl Api {
|
||||
/// Create new instance of API, set host and Client is going to be
|
||||
/// created and set automatically
|
||||
/// Returns a new instance of API.
|
||||
/// The set host and Client are created automatically.
|
||||
pub fn new(host: String, auth_token: Option<String>) -> Api {
|
||||
Api {
|
||||
client: Client::new(),
|
||||
@ -70,18 +86,23 @@ impl Api {
|
||||
}
|
||||
}
|
||||
|
||||
/// Get token for bearer auth, should be passed into Api through Context
|
||||
pub fn host(&self) -> &str {
|
||||
&*self.host
|
||||
}
|
||||
|
||||
/// Returns the token for bearer auth, otherwise None.
|
||||
/// The [`auth_token`] should be passed into the Api through Context.
|
||||
pub fn auth_token(&self) -> Option<String> {
|
||||
self.auth_token.clone()
|
||||
}
|
||||
|
||||
/// Set authorization token for future requests
|
||||
/// Set the authorization token for future requests.
|
||||
pub fn set_auth_token(&mut self, token: String) {
|
||||
self.auth_token = Some(token);
|
||||
}
|
||||
|
||||
/// Run specific route struct. Turn struct into request body
|
||||
/// and use type constants and Route implementation to get request params
|
||||
/// and use type constants and Route implementation to get request params.
|
||||
pub fn run_route<T>(&self, route: T) -> Result<T::Output>
|
||||
where
|
||||
T: Route,
|
||||
@ -91,7 +112,16 @@ impl Api {
|
||||
|
||||
// add body for POST and PUT requests
|
||||
if T::METHOD == Method::POST || T::METHOD == Method::PUT {
|
||||
res = res.json(&route);
|
||||
res = match T::CONTENT_TYPE {
|
||||
ContentType::Json => res.json(&route),
|
||||
ContentType::FormData => {
|
||||
let form = route
|
||||
.to_form()
|
||||
.unwrap_or_else(|| unimplemented!("to_form is not implemented for this route"));
|
||||
|
||||
res.multipart(form)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// if Route::Auth is true and token is present - pass it
|
||||
@ -100,7 +130,9 @@ impl Api {
|
||||
};
|
||||
|
||||
// only one error is possible here
|
||||
let res = res.send().map_err(|_| anyhow!("Unable to connect to Aleo PM"))?;
|
||||
let res = res.send().map_err(|_| {
|
||||
anyhow!("Unable to connect to Aleo PM. If you specified custom API endpoint, then check the URL for errors")
|
||||
})?;
|
||||
|
||||
// where magic begins
|
||||
route.process(res)
|
||||
@ -125,6 +157,7 @@ impl Route for Fetch {
|
||||
type Output = Response;
|
||||
|
||||
const AUTH: bool = true;
|
||||
const CONTENT_TYPE: ContentType = ContentType::Json;
|
||||
const METHOD: Method = Method::POST;
|
||||
const PATH: &'static str = "api/package/fetch";
|
||||
|
||||
@ -143,7 +176,7 @@ impl Route for Fetch {
|
||||
// TODO: we should return 404 on not found author/package
|
||||
// and return BAD_REQUEST if data format is incorrect or some of the arguments
|
||||
// were not passed
|
||||
StatusCode::NOT_FOUND => anyhow!("Package is hidden"),
|
||||
StatusCode::NOT_FOUND => anyhow!("Package not found"),
|
||||
_ => anyhow!("Unknown API error: {}", status),
|
||||
}
|
||||
}
|
||||
@ -161,6 +194,7 @@ impl Route for Login {
|
||||
type Output = Response;
|
||||
|
||||
const AUTH: bool = false;
|
||||
const CONTENT_TYPE: ContentType = ContentType::Json;
|
||||
const METHOD: Method = Method::POST;
|
||||
const PATH: &'static str = "api/account/authenticate";
|
||||
|
||||
@ -182,8 +216,59 @@ impl Route for Login {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct Publish {
|
||||
pub name: String,
|
||||
pub remote: String,
|
||||
pub version: String,
|
||||
pub file: PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct PublishResponse {
|
||||
package_id: String,
|
||||
}
|
||||
|
||||
impl Route for Publish {
|
||||
type Output = String;
|
||||
|
||||
const AUTH: bool = true;
|
||||
const CONTENT_TYPE: ContentType = ContentType::FormData;
|
||||
const METHOD: Method = Method::POST;
|
||||
const PATH: &'static str = "api/package/publish";
|
||||
|
||||
fn to_form(&self) -> Option<Form> {
|
||||
Form::new()
|
||||
.text("name", self.name.clone())
|
||||
.text("remote", self.remote.clone())
|
||||
.text("version", self.version.clone())
|
||||
.file("file", self.file.clone())
|
||||
.ok()
|
||||
}
|
||||
|
||||
fn process(&self, res: Response) -> Result<Self::Output> {
|
||||
let status = res.status();
|
||||
|
||||
if status == StatusCode::OK {
|
||||
let body: PublishResponse = res.json()?;
|
||||
Ok(body.package_id)
|
||||
} else {
|
||||
let res: HashMap<String, String> = res.json()?;
|
||||
Err(match status {
|
||||
StatusCode::BAD_REQUEST => anyhow!("{}", res.get("message").unwrap()),
|
||||
StatusCode::UNAUTHORIZED => anyhow!("You are not logged in. Please use `leo login` to login"),
|
||||
StatusCode::FAILED_DEPENDENCY => anyhow!("This package version is already published"),
|
||||
StatusCode::INTERNAL_SERVER_ERROR => {
|
||||
anyhow!("Server error, please contact us at https://github.com/AleoHQ/leo/issues")
|
||||
}
|
||||
_ => anyhow!("Unknown status code"),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Handler for 'my_profile' route. Meant to be used to get profile details but
|
||||
/// in current application is used to check if user is logged in. Any non-200 response
|
||||
/// in the current application it is used to check if the user is logged in. Any non-200 response
|
||||
/// is treated as Unauthorized.
|
||||
#[derive(Serialize)]
|
||||
pub struct Profile {}
|
||||
@ -198,6 +283,7 @@ impl Route for Profile {
|
||||
type Output = Option<String>;
|
||||
|
||||
const AUTH: bool = true;
|
||||
const CONTENT_TYPE: ContentType = ContentType::Json;
|
||||
const METHOD: Method = Method::GET;
|
||||
const PATH: &'static str = "api/account/my_profile";
|
||||
|
||||
|
@ -18,28 +18,72 @@ use crate::{commands::Command, context::Context};
|
||||
use leo_compiler::{
|
||||
compiler::{thread_leaked_context, Compiler},
|
||||
group::targets::edwards_bls12::EdwardsGroupType,
|
||||
CompilerOptions,
|
||||
};
|
||||
use leo_package::{
|
||||
inputs::*,
|
||||
outputs::{ChecksumFile, CircuitFile, OutputsDirectory, OUTPUTS_DIRECTORY_NAME},
|
||||
source::{LibraryFile, MainFile, LIBRARY_FILENAME, MAIN_FILENAME, SOURCE_DIRECTORY_NAME},
|
||||
source::{MainFile, MAIN_FILENAME, SOURCE_DIRECTORY_NAME},
|
||||
};
|
||||
use leo_synthesizer::{CircuitSynthesizer, SerializedCircuit};
|
||||
|
||||
use anyhow::Result;
|
||||
use anyhow::{anyhow, Result};
|
||||
use snarkvm_curves::{bls12_377::Bls12_377, edwards_bls12::Fq};
|
||||
use snarkvm_r1cs::ConstraintSystem;
|
||||
use structopt::StructOpt;
|
||||
use tracing::span::Span;
|
||||
|
||||
/// Compile and build program command
|
||||
/// Compiler Options wrapper for Build command. Also used by other commands which
|
||||
/// require Build command output as their input.
|
||||
#[derive(StructOpt, Clone, Debug)]
|
||||
pub struct BuildOptions {
|
||||
#[structopt(long, help = "Disable constant folding compiler optimization")]
|
||||
pub disable_constant_folding: bool,
|
||||
#[structopt(long, help = "Disable dead code elimination compiler optimization")]
|
||||
pub disable_code_elimination: bool,
|
||||
#[structopt(long, help = "Disable all compiler optimizations")]
|
||||
pub disable_all_optimizations: bool,
|
||||
}
|
||||
|
||||
impl Default for BuildOptions {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
disable_constant_folding: true,
|
||||
disable_code_elimination: true,
|
||||
disable_all_optimizations: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<BuildOptions> for CompilerOptions {
|
||||
fn from(options: BuildOptions) -> Self {
|
||||
if !options.disable_all_optimizations {
|
||||
CompilerOptions {
|
||||
canonicalization_enabled: true,
|
||||
constant_folding_enabled: true,
|
||||
dead_code_elimination_enabled: true,
|
||||
}
|
||||
} else {
|
||||
CompilerOptions {
|
||||
canonicalization_enabled: true,
|
||||
constant_folding_enabled: !options.disable_constant_folding,
|
||||
dead_code_elimination_enabled: !options.disable_code_elimination,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Compile and build program command.
|
||||
#[derive(StructOpt, Debug)]
|
||||
#[structopt(setting = structopt::clap::AppSettings::ColoredHelp)]
|
||||
pub struct Build {}
|
||||
pub struct Build {
|
||||
#[structopt(flatten)]
|
||||
pub(crate) compiler_options: BuildOptions,
|
||||
}
|
||||
|
||||
impl Command for Build {
|
||||
type Input = ();
|
||||
type Output = Option<(Compiler<'static, Fq, EdwardsGroupType>, bool)>;
|
||||
type Output = (Compiler<'static, Fq, EdwardsGroupType>, bool);
|
||||
|
||||
fn log_span(&self) -> Span {
|
||||
tracing::span!(tracing::Level::INFO, "Build")
|
||||
@ -53,127 +97,108 @@ impl Command for Build {
|
||||
let path = context.dir()?;
|
||||
let package_name = context.manifest()?.get_package_name();
|
||||
|
||||
// Sanitize the package path to the root directory
|
||||
// Sanitize the package path to the root directory.
|
||||
let mut package_path = path.clone();
|
||||
if package_path.is_file() {
|
||||
package_path.pop();
|
||||
}
|
||||
|
||||
// Construct the path to the output directory
|
||||
// Construct the path to the output directory.
|
||||
let mut output_directory = package_path.clone();
|
||||
output_directory.push(OUTPUTS_DIRECTORY_NAME);
|
||||
|
||||
tracing::info!("Starting...");
|
||||
|
||||
// Compile the package starting with the lib.leo file
|
||||
if LibraryFile::exists_at(&package_path) {
|
||||
// Construct the path to the library file in the source directory
|
||||
let mut lib_file_path = package_path.clone();
|
||||
lib_file_path.push(SOURCE_DIRECTORY_NAME);
|
||||
lib_file_path.push(LIBRARY_FILENAME);
|
||||
|
||||
// Log compilation of library file to console
|
||||
tracing::info!("Compiling library... ({:?})", lib_file_path);
|
||||
|
||||
// Compile the library file but do not output
|
||||
let _program = Compiler::<Fq, EdwardsGroupType>::parse_program_without_input(
|
||||
package_name.clone(),
|
||||
lib_file_path,
|
||||
output_directory.clone(),
|
||||
thread_leaked_context(),
|
||||
)?;
|
||||
tracing::info!("Complete");
|
||||
};
|
||||
|
||||
// Compile the main.leo file along with constraints
|
||||
if MainFile::exists_at(&package_path) {
|
||||
// Create the output directory
|
||||
OutputsDirectory::create(&package_path)?;
|
||||
|
||||
// Construct the path to the main file in the source directory
|
||||
let mut main_file_path = package_path.clone();
|
||||
main_file_path.push(SOURCE_DIRECTORY_NAME);
|
||||
main_file_path.push(MAIN_FILENAME);
|
||||
|
||||
// Load the input file at `package_name.in`
|
||||
let (input_string, input_path) = InputFile::new(&package_name).read_from(&path)?;
|
||||
|
||||
// Load the state file at `package_name.in`
|
||||
let (state_string, state_path) = StateFile::new(&package_name).read_from(&path)?;
|
||||
|
||||
// Log compilation of files to console
|
||||
tracing::info!("Compiling main program... ({:?})", main_file_path);
|
||||
|
||||
// Load the program at `main_file_path`
|
||||
let program = Compiler::<Fq, EdwardsGroupType>::parse_program_with_input(
|
||||
package_name.clone(),
|
||||
main_file_path,
|
||||
output_directory,
|
||||
&input_string,
|
||||
&input_path,
|
||||
&state_string,
|
||||
&state_path,
|
||||
thread_leaked_context(),
|
||||
)?;
|
||||
|
||||
// Compute the current program checksum
|
||||
let program_checksum = program.checksum()?;
|
||||
|
||||
// Generate the program on the constraint system and verify correctness
|
||||
{
|
||||
let mut cs = CircuitSynthesizer::<Bls12_377> {
|
||||
constraints: Default::default(),
|
||||
public_variables: Default::default(),
|
||||
private_variables: Default::default(),
|
||||
namespaces: Default::default(),
|
||||
};
|
||||
let temporary_program = program.clone();
|
||||
let output = temporary_program.compile_constraints(&mut cs)?;
|
||||
|
||||
tracing::debug!("Compiled output - {:#?}", output);
|
||||
tracing::info!("Number of constraints - {:#?}", cs.num_constraints());
|
||||
|
||||
// Serialize the circuit
|
||||
let circuit_object = SerializedCircuit::from(cs);
|
||||
let json = circuit_object.to_json_string().unwrap();
|
||||
// println!("json: {}", json);
|
||||
|
||||
// Write serialized circuit to circuit `.json` file.
|
||||
let circuit_file = CircuitFile::new(&package_name);
|
||||
circuit_file.write_to(&path, json)?;
|
||||
|
||||
// Check that we can read the serialized circuit file
|
||||
// let serialized = circuit_file.read_from(&package_path)?;
|
||||
|
||||
// Deserialize the circuit
|
||||
// let deserialized = SerializedCircuit::from_json_string(&serialized).unwrap();
|
||||
// let _circuit_synthesizer = CircuitSynthesizer::<Bls12_377>::try_from(deserialized).unwrap();
|
||||
// println!("deserialized {:?}", circuit_synthesizer.num_constraints());
|
||||
}
|
||||
|
||||
// If a checksum file exists, check if it differs from the new checksum
|
||||
let checksum_file = ChecksumFile::new(&package_name);
|
||||
let checksum_differs = if checksum_file.exists_at(&package_path) {
|
||||
let previous_checksum = checksum_file.read_from(&package_path)?;
|
||||
program_checksum != previous_checksum
|
||||
} else {
|
||||
// By default, the checksum differs if there is no checksum to compare against
|
||||
true
|
||||
};
|
||||
|
||||
// If checksum differs, compile the program
|
||||
if checksum_differs {
|
||||
// Write the new checksum to the output directory
|
||||
checksum_file.write_to(&path, program_checksum)?;
|
||||
|
||||
tracing::debug!("Checksum saved ({:?})", path);
|
||||
}
|
||||
|
||||
tracing::info!("Complete");
|
||||
|
||||
return Ok(Some((program, checksum_differs)));
|
||||
if !MainFile::exists_at(&package_path) {
|
||||
return Err(anyhow!("File main.leo not found in src/ directory"));
|
||||
}
|
||||
|
||||
Ok(None)
|
||||
// Create the output directory
|
||||
OutputsDirectory::create(&package_path)?;
|
||||
|
||||
// Construct the path to the main file in the source directory
|
||||
let mut main_file_path = package_path.clone();
|
||||
main_file_path.push(SOURCE_DIRECTORY_NAME);
|
||||
main_file_path.push(MAIN_FILENAME);
|
||||
|
||||
// Load the input file at `package_name.in`
|
||||
let (input_string, input_path) = InputFile::new(&package_name).read_from(&path)?;
|
||||
|
||||
// Load the state file at `package_name.in`
|
||||
let (state_string, state_path) = StateFile::new(&package_name).read_from(&path)?;
|
||||
|
||||
// Log compilation of files to console
|
||||
tracing::info!("Compiling main program... ({:?})", main_file_path);
|
||||
|
||||
// Load the program at `main_file_path`
|
||||
let program = Compiler::<Fq, EdwardsGroupType>::parse_program_with_input(
|
||||
package_name.clone(),
|
||||
main_file_path,
|
||||
output_directory,
|
||||
&input_string,
|
||||
&input_path,
|
||||
&state_string,
|
||||
&state_path,
|
||||
thread_leaked_context(),
|
||||
Some(self.compiler_options.into()),
|
||||
)?;
|
||||
|
||||
// Compute the current program checksum
|
||||
let program_checksum = program.checksum()?;
|
||||
|
||||
// Generate the program on the constraint system and verify correctness
|
||||
{
|
||||
let mut cs = CircuitSynthesizer::<Bls12_377> {
|
||||
constraints: Default::default(),
|
||||
public_variables: Default::default(),
|
||||
private_variables: Default::default(),
|
||||
namespaces: Default::default(),
|
||||
};
|
||||
let temporary_program = program.clone();
|
||||
let output = temporary_program.compile_constraints(&mut cs)?;
|
||||
|
||||
tracing::debug!("Compiled output - {:#?}", output);
|
||||
tracing::info!("Number of constraints - {:#?}", cs.num_constraints());
|
||||
|
||||
// Serialize the circuit
|
||||
let circuit_object = SerializedCircuit::from(cs);
|
||||
let json = circuit_object.to_json_string().unwrap();
|
||||
// println!("json: {}", json);
|
||||
|
||||
// Write serialized circuit to circuit `.json` file.
|
||||
let circuit_file = CircuitFile::new(&package_name);
|
||||
circuit_file.write_to(&path, json)?;
|
||||
|
||||
// Check that we can read the serialized circuit file
|
||||
// let serialized = circuit_file.read_from(&package_path)?;
|
||||
|
||||
// Deserialize the circuit
|
||||
// let deserialized = SerializedCircuit::from_json_string(&serialized).unwrap();
|
||||
// let _circuit_synthesizer = CircuitSynthesizer::<Bls12_377>::try_from(deserialized).unwrap();
|
||||
// println!("deserialized {:?}", circuit_synthesizer.num_constraints());
|
||||
}
|
||||
|
||||
// If a checksum file exists, check if it differs from the new checksum
|
||||
let checksum_file = ChecksumFile::new(&package_name);
|
||||
let checksum_differs = if checksum_file.exists_at(&package_path) {
|
||||
let previous_checksum = checksum_file.read_from(&package_path)?;
|
||||
program_checksum != previous_checksum
|
||||
} else {
|
||||
// By default, the checksum differs if there is no checksum to compare against
|
||||
true
|
||||
};
|
||||
|
||||
// If checksum differs, compile the program
|
||||
if checksum_differs {
|
||||
// Write the new checksum to the output directory
|
||||
checksum_file.write_to(&path, program_checksum)?;
|
||||
|
||||
tracing::debug!("Checksum saved ({:?})", path);
|
||||
}
|
||||
|
||||
tracing::info!("Complete");
|
||||
|
||||
Ok((program, checksum_differs))
|
||||
}
|
||||
}
|
||||
|
@ -18,7 +18,6 @@ use crate::{commands::Command, config::*, context::Context};
|
||||
use leo_package::LeoPackage;
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
use std::env::current_dir;
|
||||
use structopt::StructOpt;
|
||||
use tracing::span::Span;
|
||||
|
||||
@ -39,9 +38,9 @@ impl Command for Init {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn apply(self, _: Context, _: Self::Input) -> Result<Self::Output> {
|
||||
fn apply(self, context: Context, _: Self::Input) -> Result<Self::Output> {
|
||||
// Derive the package directory path.
|
||||
let path = current_dir()?;
|
||||
let path = context.dir()?;
|
||||
|
||||
// Check that the current package directory path exists.
|
||||
if !path.exists() {
|
||||
@ -55,12 +54,12 @@ impl Command for Init {
|
||||
.to_string_lossy()
|
||||
.to_string();
|
||||
if !LeoPackage::is_package_name_valid(&package_name) {
|
||||
return Err(anyhow!("Invalid Leo project name"));
|
||||
return Err(anyhow!("Invalid Leo project name: {}", package_name));
|
||||
}
|
||||
|
||||
let username = read_username().ok();
|
||||
|
||||
LeoPackage::initialize(&package_name, false, &path, username)?;
|
||||
LeoPackage::initialize(&package_name, &path, username)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@ -18,7 +18,7 @@ use crate::{commands::Command, config::*, context::Context};
|
||||
use leo_package::LeoPackage;
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
use std::{env::current_dir, fs};
|
||||
use std::fs;
|
||||
use structopt::StructOpt;
|
||||
use tracing::span::Span;
|
||||
|
||||
@ -42,7 +42,7 @@ impl Command for New {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn apply(self, _: Context, _: Self::Input) -> Result<Self::Output> {
|
||||
fn apply(self, context: Context, _: Self::Input) -> Result<Self::Output> {
|
||||
// Check that the given package name is valid.
|
||||
let package_name = self.name;
|
||||
if !LeoPackage::is_package_name_valid(&package_name) {
|
||||
@ -52,7 +52,7 @@ impl Command for New {
|
||||
let username = read_username().ok();
|
||||
|
||||
// Derive the package directory path.
|
||||
let mut path = current_dir()?;
|
||||
let mut path = context.dir()?;
|
||||
path.push(&package_name);
|
||||
|
||||
// Verify the package directory path does not exist yet.
|
||||
@ -63,7 +63,7 @@ impl Command for New {
|
||||
// Create the package directory
|
||||
fs::create_dir_all(&path).map_err(|err| anyhow!("Could not create directory {}", err))?;
|
||||
|
||||
LeoPackage::initialize(&package_name, false, &path, username)?;
|
||||
LeoPackage::initialize(&package_name, &path, username)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@ -15,30 +15,15 @@
|
||||
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
use super::build::Build;
|
||||
use crate::{
|
||||
commands::Command,
|
||||
context::{Context, PACKAGE_MANAGER_URL},
|
||||
};
|
||||
use crate::{api::Publish as PublishRoute, commands::Command, context::Context};
|
||||
use leo_package::{
|
||||
outputs::OutputsDirectory,
|
||||
root::{ZipFile, AUTHOR_PLACEHOLDER},
|
||||
};
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
use reqwest::{
|
||||
blocking::{multipart::Form, Client},
|
||||
header::{HeaderMap, HeaderValue},
|
||||
};
|
||||
use serde::Deserialize;
|
||||
use structopt::StructOpt;
|
||||
|
||||
pub const PUBLISH_URL: &str = "v1/package/publish";
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct ResponseJson {
|
||||
package_id: String,
|
||||
}
|
||||
|
||||
/// Publish package to Aleo Package Manager
|
||||
#[derive(StructOpt, Debug)]
|
||||
#[structopt(setting = structopt::clap::AppSettings::ColoredHelp)]
|
||||
@ -46,11 +31,14 @@ pub struct Publish {}
|
||||
|
||||
impl Command for Publish {
|
||||
type Input = <Build as Command>::Output;
|
||||
type Output = Option<String>;
|
||||
type Output = String;
|
||||
|
||||
/// Build program before publishing
|
||||
fn prelude(&self, context: Context) -> Result<Self::Input> {
|
||||
(Build {}).execute(context)
|
||||
(Build {
|
||||
compiler_options: Default::default(),
|
||||
})
|
||||
.execute(context)
|
||||
}
|
||||
|
||||
fn apply(self, context: Context, _input: Self::Input) -> Result<Self::Output> {
|
||||
@ -90,58 +78,19 @@ impl Command for Publish {
|
||||
if zip_file.exists_at(&path) {
|
||||
tracing::debug!("Existing package zip file found. Clearing it to regenerate.");
|
||||
// Remove the existing package zip file
|
||||
ZipFile::new(&package_name).remove(&path)?;
|
||||
zip_file.remove(&path)?;
|
||||
}
|
||||
|
||||
zip_file.write(&path)?;
|
||||
|
||||
let form_data = Form::new()
|
||||
.text("name", package_name.clone())
|
||||
.text("remote", format!("{}/{}", package_remote.author, package_name))
|
||||
.text("version", package_version)
|
||||
.file("file", zip_file.get_file_path(&path))?;
|
||||
// Make an API request with zip file and package data.
|
||||
let package_id = context.api.run_route(PublishRoute {
|
||||
name: package_name.clone(),
|
||||
remote: format!("{}/{}", package_remote.author, package_name),
|
||||
version: package_version,
|
||||
file: zip_file.get_file_path(&path).into(),
|
||||
})?;
|
||||
|
||||
// Client for make POST request
|
||||
let client = Client::new();
|
||||
|
||||
let token = context
|
||||
.api
|
||||
.auth_token()
|
||||
.ok_or_else(|| anyhow!("Login before publishing package: try leo login --help"))?;
|
||||
|
||||
// Headers for request to publish package
|
||||
let mut headers = HeaderMap::new();
|
||||
headers.insert(
|
||||
"Authorization",
|
||||
HeaderValue::from_str(&format!("{} {}", "Bearer", token)).unwrap(),
|
||||
);
|
||||
|
||||
// Make a request to publish a package
|
||||
let response = client
|
||||
.post(format!("{}{}", PACKAGE_MANAGER_URL, PUBLISH_URL).as_str())
|
||||
.headers(headers)
|
||||
.multipart(form_data)
|
||||
.send();
|
||||
|
||||
// Get a response result
|
||||
let result: ResponseJson = match response {
|
||||
Ok(json_result) => {
|
||||
let text = json_result.text()?;
|
||||
|
||||
match serde_json::from_str(&text) {
|
||||
Ok(json) => json,
|
||||
Err(_) => {
|
||||
return Err(anyhow!("Package not published: {}", text));
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(error) => {
|
||||
tracing::warn!("{:?}", error);
|
||||
return Err(anyhow!("Connection unavailable"));
|
||||
}
|
||||
};
|
||||
|
||||
tracing::info!("Package published successfully with id: {}", result.package_id);
|
||||
Ok(Some(result.package_id))
|
||||
tracing::info!("Package published successfully with id: {}", &package_id);
|
||||
Ok(package_id)
|
||||
}
|
||||
}
|
||||
|
@ -14,7 +14,7 @@
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
use super::setup::Setup;
|
||||
use super::{build::BuildOptions, setup::Setup};
|
||||
use crate::{commands::Command, context::Context};
|
||||
use leo_package::outputs::ProofFile;
|
||||
use snarkvm_algorithms::{
|
||||
@ -35,6 +35,9 @@ use tracing::span::Span;
|
||||
pub struct Prove {
|
||||
#[structopt(long = "skip-key-check", help = "Skip key verification on Setup stage")]
|
||||
pub(crate) skip_key_check: bool,
|
||||
|
||||
#[structopt(flatten)]
|
||||
pub(crate) compiler_options: BuildOptions,
|
||||
}
|
||||
|
||||
impl Command for Prove {
|
||||
@ -46,8 +49,11 @@ impl Command for Prove {
|
||||
}
|
||||
|
||||
fn prelude(&self, context: Context) -> Result<Self::Input> {
|
||||
let skip_key_check = self.skip_key_check;
|
||||
(Setup { skip_key_check }).execute(context)
|
||||
(Setup {
|
||||
skip_key_check: self.skip_key_check,
|
||||
compiler_options: self.compiler_options.clone(),
|
||||
})
|
||||
.execute(context)
|
||||
}
|
||||
|
||||
fn apply(self, context: Context, input: Self::Input) -> Result<Self::Output> {
|
||||
|
@ -14,7 +14,7 @@
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
use super::prove::Prove;
|
||||
use super::{build::BuildOptions, prove::Prove};
|
||||
use crate::{commands::Command, context::Context};
|
||||
use leo_compiler::{compiler::Compiler, group::targets::edwards_bls12::EdwardsGroupType};
|
||||
|
||||
@ -30,6 +30,9 @@ use tracing::span::Span;
|
||||
pub struct Run {
|
||||
#[structopt(long = "skip-key-check", help = "Skip key verification on Setup stage")]
|
||||
pub(crate) skip_key_check: bool,
|
||||
|
||||
#[structopt(flatten)]
|
||||
pub(crate) compiler_options: BuildOptions,
|
||||
}
|
||||
|
||||
impl Command for Run {
|
||||
@ -41,8 +44,11 @@ impl Command for Run {
|
||||
}
|
||||
|
||||
fn prelude(&self, context: Context) -> Result<Self::Input> {
|
||||
let skip_key_check = self.skip_key_check;
|
||||
(Prove { skip_key_check }).execute(context)
|
||||
(Prove {
|
||||
skip_key_check: self.skip_key_check,
|
||||
compiler_options: self.compiler_options.clone(),
|
||||
})
|
||||
.execute(context)
|
||||
}
|
||||
|
||||
fn apply(self, _context: Context, input: Self::Input) -> Result<Self::Output> {
|
||||
|
@ -14,7 +14,7 @@
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
use super::build::Build;
|
||||
use super::build::{Build, BuildOptions};
|
||||
use crate::{commands::Command, context::Context};
|
||||
use leo_compiler::{compiler::Compiler, group::targets::edwards_bls12::EdwardsGroupType};
|
||||
use leo_package::outputs::{ProvingKeyFile, VerificationKeyFile};
|
||||
@ -35,6 +35,9 @@ use tracing::span::Span;
|
||||
pub struct Setup {
|
||||
#[structopt(long = "skip-key-check", help = "Skip key verification")]
|
||||
pub(crate) skip_key_check: bool,
|
||||
|
||||
#[structopt(flatten)]
|
||||
pub(crate) compiler_options: BuildOptions,
|
||||
}
|
||||
|
||||
impl Command for Setup {
|
||||
@ -50,7 +53,10 @@ impl Command for Setup {
|
||||
}
|
||||
|
||||
fn prelude(&self, context: Context) -> Result<Self::Input> {
|
||||
(Build {}).execute(context)
|
||||
(Build {
|
||||
compiler_options: self.compiler_options.clone(),
|
||||
})
|
||||
.execute(context)
|
||||
}
|
||||
|
||||
fn apply(self, context: Context, input: Self::Input) -> Result<Self::Output> {
|
||||
@ -58,8 +64,7 @@ impl Command for Setup {
|
||||
let package_name = context.manifest()?.get_package_name();
|
||||
|
||||
// Check if leo build failed
|
||||
let (program, checksum_differs) =
|
||||
input.ok_or_else(|| anyhow!("Unable to build, check that main file exists"))?;
|
||||
let (program, checksum_differs) = input;
|
||||
|
||||
// Check if a proving key and verification key already exists
|
||||
let keys_exist = ProvingKeyFile::new(&package_name).exists_at(&path)
|
||||
|
@ -14,6 +14,7 @@
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
use super::build::BuildOptions;
|
||||
use crate::{commands::Command, context::Context};
|
||||
use leo_compiler::{
|
||||
compiler::{thread_leaked_context, Compiler},
|
||||
@ -37,6 +38,9 @@ use tracing::span::Span;
|
||||
pub struct Test {
|
||||
#[structopt(short = "f", long = "file", name = "file")]
|
||||
pub(crate) files: Vec<PathBuf>,
|
||||
|
||||
#[structopt(flatten)]
|
||||
pub(crate) compiler_options: BuildOptions,
|
||||
}
|
||||
|
||||
impl Command for Test {
|
||||
@ -107,6 +111,7 @@ impl Command for Test {
|
||||
file_path,
|
||||
output_directory.clone(),
|
||||
thread_leaked_context(),
|
||||
Some(self.compiler_options.clone().into()),
|
||||
)?;
|
||||
|
||||
let temporary_program = program;
|
||||
|
@ -21,7 +21,7 @@ use structopt::StructOpt;
|
||||
use tracing::span::Span;
|
||||
|
||||
/// Setting for automatic updates of Leo
|
||||
#[derive(Debug, StructOpt, PartialEq)]
|
||||
#[derive(Debug, StructOpt)]
|
||||
pub enum Automatic {
|
||||
Automatic {
|
||||
#[structopt(name = "bool", help = "Boolean value: true or false", parse(try_from_str))]
|
||||
|
@ -14,7 +14,7 @@
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
use super::build::Build;
|
||||
use super::build::{Build, BuildOptions};
|
||||
use crate::{commands::Command, context::Context};
|
||||
|
||||
use std::{sync::mpsc::channel, time::Duration};
|
||||
@ -33,6 +33,9 @@ pub struct Watch {
|
||||
/// Set up watch interval
|
||||
#[structopt(short, long, default_value = "3")]
|
||||
interval: u64,
|
||||
|
||||
#[structopt(flatten)]
|
||||
compiler_options: BuildOptions,
|
||||
}
|
||||
|
||||
impl Command for Watch {
|
||||
@ -64,14 +67,13 @@ impl Command for Watch {
|
||||
match rx.recv() {
|
||||
// See changes on the write event
|
||||
Ok(DebouncedEvent::Write(_write)) => {
|
||||
match (Build {}).execute(context.clone()) {
|
||||
Ok(_output) => {
|
||||
tracing::info!("Built successfully");
|
||||
}
|
||||
Err(e) => {
|
||||
// Syntax error
|
||||
tracing::error!("Error {:?}", e);
|
||||
}
|
||||
match (Build {
|
||||
compiler_options: self.compiler_options.clone(),
|
||||
})
|
||||
.execute(context.clone())
|
||||
{
|
||||
Ok(_output) => tracing::info!("Built successfully"),
|
||||
Err(e) => tracing::error!("Error {:?}", e),
|
||||
};
|
||||
}
|
||||
// Other events
|
||||
|
@ -48,19 +48,19 @@ impl Context {
|
||||
}
|
||||
|
||||
/// Create a new context for the current directory.
|
||||
pub fn create_context(path: PathBuf) -> Result<Context> {
|
||||
pub fn create_context(path: PathBuf, api_url: Option<String>) -> Result<Context> {
|
||||
let token = config::read_token().ok();
|
||||
|
||||
let api = Api::new(PACKAGE_MANAGER_URL.to_string(), token);
|
||||
let api = Api::new(api_url.unwrap_or_else(|| PACKAGE_MANAGER_URL.to_string()), token);
|
||||
|
||||
Ok(Context { api, path: Some(path) })
|
||||
}
|
||||
|
||||
/// Returns project context.
|
||||
pub fn get_context() -> Result<Context> {
|
||||
pub fn get_context(api_url: Option<String>) -> Result<Context> {
|
||||
let token = config::read_token().ok();
|
||||
|
||||
let api = Api::new(PACKAGE_MANAGER_URL.to_string(), token);
|
||||
let api = Api::new(api_url.unwrap_or_else(|| PACKAGE_MANAGER_URL.to_string()), token);
|
||||
|
||||
Ok(Context { api, path: None })
|
||||
}
|
||||
|
@ -14,7 +14,7 @@
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
use std::fmt;
|
||||
use std::{fmt, sync::Once};
|
||||
|
||||
use colored::Colorize;
|
||||
use tracing::{event::Event, subscriber::Subscriber};
|
||||
@ -24,6 +24,8 @@ use tracing_subscriber::{
|
||||
FmtSubscriber,
|
||||
};
|
||||
|
||||
static START: Once = Once::new();
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Format<F = Full, T = SystemTime> {
|
||||
format: F,
|
||||
@ -220,5 +222,8 @@ pub fn init_logger(_app_name: &'static str, verbosity: usize) {
|
||||
.event_format(Format::default())
|
||||
.finish();
|
||||
|
||||
tracing::subscriber::set_global_default(subscriber).expect("setting default subscriber failed");
|
||||
// call this line only once per process. needed for tests using same thread
|
||||
START.call_once(|| {
|
||||
tracing::subscriber::set_global_default(subscriber).expect("setting default subscriber failed");
|
||||
});
|
||||
}
|
||||
|
184
leo/main.rs
184
leo/main.rs
@ -55,6 +55,9 @@ struct Opt {
|
||||
#[structopt(subcommand)]
|
||||
command: CommandOpts,
|
||||
|
||||
#[structopt(help = "Custom Aleo PM backend URL", env = "APM_URL")]
|
||||
api: Option<String>,
|
||||
|
||||
#[structopt(
|
||||
long,
|
||||
global = true,
|
||||
@ -178,9 +181,11 @@ enum CommandOpts {
|
||||
}
|
||||
|
||||
fn main() {
|
||||
// Read command line arguments.
|
||||
let opt = Opt::from_args();
|
||||
handle_error(run_with_args(Opt::from_args()))
|
||||
}
|
||||
|
||||
/// Run command with custom build arguments.
|
||||
fn run_with_args(opt: Opt) -> Result<(), Error> {
|
||||
if !opt.quiet {
|
||||
// Init logger with optional debug flag.
|
||||
logger::init_logger("leo", match opt.debug {
|
||||
@ -192,11 +197,11 @@ fn main() {
|
||||
// Get custom root folder and create context for it.
|
||||
// If not specified, default context will be created in cwd.
|
||||
let context = handle_error(match opt.path {
|
||||
Some(path) => context::create_context(path),
|
||||
None => context::get_context(),
|
||||
Some(path) => context::create_context(path, opt.api),
|
||||
None => context::get_context(opt.api),
|
||||
});
|
||||
|
||||
handle_error(match opt.command {
|
||||
match opt.command {
|
||||
CommandOpts::Init { command } => command.try_execute(context),
|
||||
CommandOpts::New { command } => command.try_execute(context),
|
||||
CommandOpts::Build { command } => command.try_execute(context),
|
||||
@ -217,7 +222,7 @@ fn main() {
|
||||
|
||||
CommandOpts::Lint { command } => command.try_execute(context),
|
||||
CommandOpts::Deploy { command } => command.try_execute(context),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_error<T>(res: Result<T, Error>) -> T {
|
||||
@ -229,3 +234,170 @@ fn handle_error<T>(res: Result<T, Error>) -> T {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod cli_tests {
|
||||
use crate::{run_with_args, Opt};
|
||||
|
||||
use anyhow::Error;
|
||||
use std::path::PathBuf;
|
||||
use structopt::StructOpt;
|
||||
use test_dir::{DirBuilder, FileType, TestDir};
|
||||
|
||||
// Runs Command from cmd-like argument "leo run --arg1 --arg2".
|
||||
fn run_cmd(args: &str, path: &Option<PathBuf>) -> Result<(), Error> {
|
||||
let args = args.split(' ').collect::<Vec<&str>>();
|
||||
let mut opts = Opt::from_iter_safe(args)?;
|
||||
|
||||
if path.is_some() {
|
||||
opts.path = path.clone();
|
||||
}
|
||||
|
||||
if !opts.debug {
|
||||
// turn off tracing for all tests
|
||||
opts.quiet = true;
|
||||
}
|
||||
|
||||
run_with_args(opts)
|
||||
}
|
||||
|
||||
// Create a test directory with name.
|
||||
fn testdir(name: &str) -> TestDir {
|
||||
TestDir::temp().create(name, FileType::Dir)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn global_options() {
|
||||
let path = Some(PathBuf::from("examples/pedersen-hash"));
|
||||
|
||||
assert!(run_cmd("leo build", &path).is_ok());
|
||||
assert!(run_cmd("leo -q build", &path).is_ok());
|
||||
|
||||
assert!(run_cmd("leo --path ../../examples/no-directory-there build", &None).is_err());
|
||||
assert!(run_cmd("leo -v build", &None).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn global_options_fail() {
|
||||
assert!(run_cmd("leo --path ../../examples/no-directory-there build", &None).is_err());
|
||||
assert!(run_cmd("leo -v build", &None).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn init() {
|
||||
let dir = testdir("test");
|
||||
let path = Some(dir.path("test"));
|
||||
|
||||
assert!(run_cmd("leo init", &path).is_ok());
|
||||
assert!(run_cmd("leo init", &path).is_err()); // 2nd time
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn init_fail() {
|
||||
let dir = testdir("incorrect_name");
|
||||
let path = Some(dir.path("incorrect_name"));
|
||||
let fake = Some(PathBuf::from("no_such_directory"));
|
||||
|
||||
assert!(run_cmd("leo init", &fake).is_err());
|
||||
assert!(run_cmd("leo init", &path).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn new() {
|
||||
let dir = testdir("new");
|
||||
let path = Some(dir.path("new"));
|
||||
|
||||
assert!(run_cmd("leo new test", &path).is_ok());
|
||||
assert!(run_cmd("leo new test", &path).is_err()); // 2nd time
|
||||
assert!(run_cmd("leo new wrong_name", &path).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn unimplemented() {
|
||||
assert!(run_cmd("leo lint", &None).is_err());
|
||||
assert!(run_cmd("leo deploy", &None).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn clean() {
|
||||
let path = &Some(PathBuf::from("examples/pedersen-hash"));
|
||||
|
||||
assert!(run_cmd("leo build", path).is_ok());
|
||||
assert!(run_cmd("leo clean", path).is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn build_optimizations() {
|
||||
let dir = testdir("build-test");
|
||||
let path = dir.path("build-test");
|
||||
|
||||
assert!(run_cmd("leo new setup-test", &Some(path.clone())).is_ok());
|
||||
|
||||
let build_path = &Some(path.join("setup-test"));
|
||||
|
||||
assert!(run_cmd("leo build --disable-all-optimizations", build_path).is_ok());
|
||||
assert!(run_cmd("leo build --disable-code-elimination", build_path).is_ok());
|
||||
assert!(run_cmd("leo build --disable-constant-folding", build_path).is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn setup_prove_run_clean() {
|
||||
let dir = testdir("test");
|
||||
let path = dir.path("test");
|
||||
|
||||
assert!(run_cmd("leo new setup-test", &Some(path.clone())).is_ok());
|
||||
|
||||
let setup_path = &Some(path.join("setup-test"));
|
||||
|
||||
assert!(run_cmd("leo setup", setup_path).is_ok());
|
||||
assert!(run_cmd("leo setup", setup_path).is_ok());
|
||||
assert!(run_cmd("leo setup --skip-key-check", setup_path).is_ok());
|
||||
assert!(run_cmd("leo prove --skip-key-check", setup_path).is_ok());
|
||||
assert!(run_cmd("leo run --skip-key-check", setup_path).is_ok());
|
||||
assert!(run_cmd("leo clean", setup_path).is_ok());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_import() {
|
||||
let dir = testdir("test");
|
||||
let path = dir.path("test");
|
||||
|
||||
assert!(run_cmd("leo new import", &Some(path.clone())).is_ok());
|
||||
|
||||
let import_path = &Some(path.join("import"));
|
||||
|
||||
assert!(run_cmd("leo add no-package/definitely-no", import_path).is_err());
|
||||
assert!(run_cmd("leo add justice-league/u8u32", import_path).is_ok());
|
||||
assert!(run_cmd("leo remove u8u32", import_path).is_ok());
|
||||
assert!(run_cmd("leo add --author justice-league --package u8u32", import_path).is_ok());
|
||||
assert!(run_cmd("leo remove u8u32", import_path).is_ok());
|
||||
assert!(run_cmd("leo remove u8u32", import_path).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_missing_file() {
|
||||
let dir = testdir("test");
|
||||
let path = dir.path("test");
|
||||
|
||||
assert!(run_cmd("leo new test-file-missing", &Some(path.clone())).is_ok());
|
||||
|
||||
let path = path.join("test-file-missing");
|
||||
let file = path.join("src/main.leo");
|
||||
let path = Some(path);
|
||||
|
||||
assert!(run_cmd("leo test", &path).is_ok());
|
||||
std::fs::remove_file(&file).unwrap();
|
||||
assert!(run_cmd("leo test", &path).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_sudoku() {
|
||||
let path = &Some(PathBuf::from("examples/silly-sudoku"));
|
||||
|
||||
assert!(run_cmd("leo build", path).is_ok());
|
||||
assert!(run_cmd("leo test", path).is_ok());
|
||||
assert!(run_cmd("leo test -f examples/silly-sudoku/src/lib.leo", path).is_ok());
|
||||
assert!(run_cmd("leo test -f examples/silly-sudoku/src/main.leo", path).is_ok());
|
||||
}
|
||||
}
|
||||
|
@ -14,9 +14,8 @@
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
use std::path::PathBuf;
|
||||
|
||||
use anyhow::Result;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::{
|
||||
commands::{
|
||||
@ -39,34 +38,82 @@ const PEDERSEN_HASH_PATH: &str = "./examples/pedersen-hash/";
|
||||
|
||||
#[test]
|
||||
pub fn build_pedersen_hash() -> Result<()> {
|
||||
(Build {}).apply(context()?, ())?;
|
||||
(Build {
|
||||
compiler_options: Default::default(),
|
||||
})
|
||||
.apply(context()?, ())?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn setup_pedersen_hash() -> Result<()> {
|
||||
let build = (Build {}).apply(context()?, ())?;
|
||||
(Setup { skip_key_check: false }).apply(context()?, build.clone())?;
|
||||
(Setup { skip_key_check: true }).apply(context()?, build)?;
|
||||
let build = (Build {
|
||||
compiler_options: Default::default(),
|
||||
})
|
||||
.apply(context()?, ())?;
|
||||
(Setup {
|
||||
skip_key_check: false,
|
||||
compiler_options: Default::default(),
|
||||
})
|
||||
.apply(context()?, build.clone())?;
|
||||
(Setup {
|
||||
skip_key_check: true,
|
||||
compiler_options: Default::default(),
|
||||
})
|
||||
.apply(context()?, build)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn prove_pedersen_hash() -> Result<()> {
|
||||
let build = (Build {}).apply(context()?, ())?;
|
||||
let setup = (Setup { skip_key_check: false }).apply(context()?, build)?;
|
||||
(Prove { skip_key_check: false }).apply(context()?, setup.clone())?;
|
||||
(Prove { skip_key_check: true }).apply(context()?, setup)?;
|
||||
let build = (Build {
|
||||
compiler_options: Default::default(),
|
||||
})
|
||||
.apply(context()?, ())?;
|
||||
let setup = (Setup {
|
||||
skip_key_check: false,
|
||||
compiler_options: Default::default(),
|
||||
})
|
||||
.apply(context()?, build)?;
|
||||
(Prove {
|
||||
skip_key_check: false,
|
||||
compiler_options: Default::default(),
|
||||
})
|
||||
.apply(context()?, setup.clone())?;
|
||||
(Prove {
|
||||
skip_key_check: true,
|
||||
compiler_options: Default::default(),
|
||||
})
|
||||
.apply(context()?, setup)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
pub fn run_pedersen_hash() -> Result<()> {
|
||||
let build = (Build {}).apply(context()?, ())?;
|
||||
let setup = (Setup { skip_key_check: false }).apply(context()?, build)?;
|
||||
let prove = (Prove { skip_key_check: false }).apply(context()?, setup)?;
|
||||
(Run { skip_key_check: false }).apply(context()?, prove.clone())?;
|
||||
(Run { skip_key_check: true }).apply(context()?, prove)?;
|
||||
let build = (Build {
|
||||
compiler_options: Default::default(),
|
||||
})
|
||||
.apply(context()?, ())?;
|
||||
let setup = (Setup {
|
||||
skip_key_check: false,
|
||||
compiler_options: Default::default(),
|
||||
})
|
||||
.apply(context()?, build)?;
|
||||
let prove = (Prove {
|
||||
skip_key_check: false,
|
||||
compiler_options: Default::default(),
|
||||
})
|
||||
.apply(context()?, setup)?;
|
||||
(Run {
|
||||
skip_key_check: false,
|
||||
compiler_options: Default::default(),
|
||||
})
|
||||
.apply(context()?, prove.clone())?;
|
||||
(Run {
|
||||
skip_key_check: true,
|
||||
compiler_options: Default::default(),
|
||||
})
|
||||
.apply(context()?, prove)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -75,8 +122,16 @@ pub fn test_pedersen_hash() -> Result<()> {
|
||||
let mut main_file = PathBuf::from(PEDERSEN_HASH_PATH);
|
||||
main_file.push("src/main.leo");
|
||||
|
||||
(Test { files: vec![] }).apply(context()?, ())?;
|
||||
(Test { files: vec![main_file] }).apply(context()?, ())?;
|
||||
(Test {
|
||||
files: vec![],
|
||||
compiler_options: Default::default(),
|
||||
})
|
||||
.apply(context()?, ())?;
|
||||
(Test {
|
||||
files: vec![main_file],
|
||||
compiler_options: Default::default(),
|
||||
})
|
||||
.apply(context()?, ())?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -90,6 +145,8 @@ pub fn test_logout() -> Result<()> {
|
||||
// So this test only tells that error cases are errors
|
||||
#[test]
|
||||
pub fn login_incorrect_credentials_or_token() -> Result<()> {
|
||||
test_logout()?;
|
||||
|
||||
// no credentials passed
|
||||
let login = Login::new(None, None, None).apply(context()?, ());
|
||||
assert!(login.is_err());
|
||||
@ -152,7 +209,7 @@ pub fn leo_update_and_update_automatic() -> Result<()> {
|
||||
/// Create context for Pedersen Hash example
|
||||
fn context() -> Result<Context> {
|
||||
let path = PathBuf::from(&PEDERSEN_HASH_PATH);
|
||||
let context = create_context(path)?;
|
||||
let context = create_context(path, None)?;
|
||||
|
||||
Ok(context)
|
||||
}
|
||||
|
@ -33,13 +33,8 @@ pub struct LeoPackage;
|
||||
|
||||
impl LeoPackage {
|
||||
/// Initializes a Leo package at the given path.
|
||||
pub fn initialize(
|
||||
package_name: &str,
|
||||
is_lib: bool,
|
||||
path: &Path,
|
||||
author: Option<String>,
|
||||
) -> Result<(), PackageError> {
|
||||
package::Package::initialize(package_name, is_lib, path, author)
|
||||
pub fn initialize(package_name: &str, path: &Path, author: Option<String>) -> Result<(), PackageError> {
|
||||
package::Package::initialize(package_name, path, author)
|
||||
}
|
||||
|
||||
/// Returns `true` if the given Leo package name is valid.
|
||||
|
@ -19,7 +19,7 @@ use crate::{
|
||||
imports::ImportsDirectory,
|
||||
inputs::{InputFile, InputsDirectory, StateFile},
|
||||
root::{Gitignore, Manifest, README},
|
||||
source::{LibraryFile, MainFile, SourceDirectory},
|
||||
source::{MainFile, SourceDirectory},
|
||||
};
|
||||
|
||||
use serde::Deserialize;
|
||||
@ -107,7 +107,7 @@ impl Package {
|
||||
}
|
||||
|
||||
/// Returns `true` if a package is can be initialized at a given path.
|
||||
pub fn can_initialize(package_name: &str, is_lib: bool, path: &Path) -> bool {
|
||||
pub fn can_initialize(package_name: &str, path: &Path) -> bool {
|
||||
// Check that the package name is valid.
|
||||
if !Self::is_package_name_valid(package_name) {
|
||||
return false;
|
||||
@ -122,32 +122,24 @@ impl Package {
|
||||
result = false;
|
||||
}
|
||||
|
||||
if is_lib {
|
||||
// Check if the library file already exists.
|
||||
if LibraryFile::exists_at(path) {
|
||||
existing_files.push(LibraryFile::filename());
|
||||
result = false;
|
||||
}
|
||||
} else {
|
||||
// Check if the input file already exists.
|
||||
let input_file = InputFile::new(&package_name);
|
||||
if input_file.exists_at(path) {
|
||||
existing_files.push(input_file.filename());
|
||||
result = false;
|
||||
}
|
||||
// Check if the input file already exists.
|
||||
let input_file = InputFile::new(&package_name);
|
||||
if input_file.exists_at(path) {
|
||||
existing_files.push(input_file.filename());
|
||||
result = false;
|
||||
}
|
||||
|
||||
// Check if the state file already exists.
|
||||
let state_file = StateFile::new(&package_name);
|
||||
if state_file.exists_at(path) {
|
||||
existing_files.push(state_file.filename());
|
||||
result = false;
|
||||
}
|
||||
// Check if the state file already exists.
|
||||
let state_file = StateFile::new(&package_name);
|
||||
if state_file.exists_at(path) {
|
||||
existing_files.push(state_file.filename());
|
||||
result = false;
|
||||
}
|
||||
|
||||
// Check if the main file already exists.
|
||||
if MainFile::exists_at(path) {
|
||||
existing_files.push(MainFile::filename());
|
||||
result = false;
|
||||
}
|
||||
// Check if the main file already exists.
|
||||
if MainFile::exists_at(path) {
|
||||
existing_files.push(MainFile::filename());
|
||||
result = false;
|
||||
}
|
||||
|
||||
if !existing_files.is_empty() {
|
||||
@ -158,7 +150,7 @@ impl Package {
|
||||
}
|
||||
|
||||
/// Returns `true` if a package is initialized at the given path
|
||||
pub fn is_initialized(package_name: &str, is_lib: bool, path: &Path) -> bool {
|
||||
pub fn is_initialized(package_name: &str, path: &Path) -> bool {
|
||||
// Check that the package name is valid.
|
||||
if !Self::is_package_name_valid(package_name) {
|
||||
return false;
|
||||
@ -169,43 +161,31 @@ impl Package {
|
||||
return false;
|
||||
}
|
||||
|
||||
if is_lib {
|
||||
// Check if the library file exists.
|
||||
if !LibraryFile::exists_at(&path) {
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
// Check if the input file exists.
|
||||
let input_file = InputFile::new(&package_name);
|
||||
if !input_file.exists_at(&path) {
|
||||
return false;
|
||||
}
|
||||
// Check if the input file exists.
|
||||
let input_file = InputFile::new(&package_name);
|
||||
if !input_file.exists_at(&path) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check if the state file exists.
|
||||
let state_file = StateFile::new(&package_name);
|
||||
if !state_file.exists_at(&path) {
|
||||
return false;
|
||||
}
|
||||
// Check if the state file exists.
|
||||
let state_file = StateFile::new(&package_name);
|
||||
if !state_file.exists_at(&path) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check if the main file exists.
|
||||
if !MainFile::exists_at(&path) {
|
||||
return false;
|
||||
}
|
||||
// Check if the main file exists.
|
||||
if !MainFile::exists_at(&path) {
|
||||
return false;
|
||||
}
|
||||
|
||||
true
|
||||
}
|
||||
|
||||
/// Creates a package at the given path
|
||||
pub fn initialize(
|
||||
package_name: &str,
|
||||
is_lib: bool,
|
||||
path: &Path,
|
||||
author: Option<String>,
|
||||
) -> Result<(), PackageError> {
|
||||
pub fn initialize(package_name: &str, path: &Path, author: Option<String>) -> Result<(), PackageError> {
|
||||
// First, verify that this directory is not already initialized as a Leo package.
|
||||
{
|
||||
if !Self::can_initialize(package_name, is_lib, path) {
|
||||
if !Self::can_initialize(package_name, path) {
|
||||
return Err(PackageError::FailedToInitialize(
|
||||
package_name.to_owned(),
|
||||
path.as_os_str().to_owned(),
|
||||
@ -232,27 +212,21 @@ impl Package {
|
||||
// Create the source directory.
|
||||
SourceDirectory::create(&path)?;
|
||||
|
||||
// Create a new library or binary file.
|
||||
if is_lib {
|
||||
// Create the library file in the source directory.
|
||||
LibraryFile::new(&package_name).write_to(&path)?;
|
||||
} else {
|
||||
// Create the input directory.
|
||||
InputsDirectory::create(&path)?;
|
||||
// Create the input directory.
|
||||
InputsDirectory::create(&path)?;
|
||||
|
||||
// Create the input file in the inputs directory.
|
||||
InputFile::new(&package_name).write_to(&path)?;
|
||||
// Create the input file in the inputs directory.
|
||||
InputFile::new(&package_name).write_to(&path)?;
|
||||
|
||||
// Create the state file in the inputs directory.
|
||||
StateFile::new(&package_name).write_to(&path)?;
|
||||
// Create the state file in the inputs directory.
|
||||
StateFile::new(&package_name).write_to(&path)?;
|
||||
|
||||
// Create the main file in the source directory.
|
||||
MainFile::new(&package_name).write_to(&path)?;
|
||||
}
|
||||
// Create the main file in the source directory.
|
||||
MainFile::new(&package_name).write_to(&path)?;
|
||||
}
|
||||
// Next, verify that a valid Leo package has been initialized in this directory
|
||||
{
|
||||
if !Self::is_initialized(package_name, is_lib, path) {
|
||||
if !Self::is_initialized(package_name, path) {
|
||||
return Err(PackageError::FailedToInitialize(
|
||||
package_name.to_owned(),
|
||||
path.as_os_str().to_owned(),
|
||||
|
@ -1,76 +0,0 @@
|
||||
// Copyright (C) 2019-2021 Aleo Systems Inc.
|
||||
// This file is part of the Leo library.
|
||||
|
||||
// The Leo library is free software: you can redistribute it and/or modify
|
||||
// it under the terms of the GNU General Public License as published by
|
||||
// the Free Software Foundation, either version 3 of the License, or
|
||||
// (at your option) any later version.
|
||||
|
||||
// The Leo library is distributed in the hope that it will be useful,
|
||||
// but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
// GNU General Public License for more details.
|
||||
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
//! The `lib.leo` file.
|
||||
|
||||
use crate::{errors::LibraryFileError, source::directory::SOURCE_DIRECTORY_NAME};
|
||||
|
||||
use serde::Deserialize;
|
||||
use std::{borrow::Cow, fs::File, io::Write, path::Path};
|
||||
|
||||
pub static LIBRARY_FILENAME: &str = "lib.leo";
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct LibraryFile {
|
||||
pub package_name: String,
|
||||
}
|
||||
|
||||
impl LibraryFile {
|
||||
pub fn new(package_name: &str) -> Self {
|
||||
Self {
|
||||
package_name: package_name.to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn filename() -> String {
|
||||
format!("{}{}", SOURCE_DIRECTORY_NAME, LIBRARY_FILENAME)
|
||||
}
|
||||
|
||||
pub fn exists_at(path: &Path) -> bool {
|
||||
let mut path = Cow::from(path);
|
||||
if path.is_dir() {
|
||||
if !path.ends_with(SOURCE_DIRECTORY_NAME) {
|
||||
path.to_mut().push(SOURCE_DIRECTORY_NAME);
|
||||
}
|
||||
path.to_mut().push(LIBRARY_FILENAME);
|
||||
}
|
||||
path.exists()
|
||||
}
|
||||
|
||||
pub fn write_to(self, path: &Path) -> Result<(), LibraryFileError> {
|
||||
let mut path = Cow::from(path);
|
||||
if path.is_dir() {
|
||||
if !path.ends_with(SOURCE_DIRECTORY_NAME) {
|
||||
path.to_mut().push(SOURCE_DIRECTORY_NAME);
|
||||
}
|
||||
path.to_mut().push(LIBRARY_FILENAME);
|
||||
}
|
||||
|
||||
let mut file = File::create(&path)?;
|
||||
Ok(file.write_all(self.template().as_bytes())?)
|
||||
}
|
||||
|
||||
fn template(&self) -> String {
|
||||
format!(
|
||||
r#"// The '{}' library circuit.
|
||||
circuit Foo {{
|
||||
a: field
|
||||
}}
|
||||
"#,
|
||||
self.package_name
|
||||
)
|
||||
}
|
||||
}
|
@ -17,8 +17,5 @@
|
||||
pub mod directory;
|
||||
pub use directory::*;
|
||||
|
||||
pub mod library;
|
||||
pub use library::*;
|
||||
|
||||
pub mod main;
|
||||
pub use main::*;
|
||||
|
@ -19,7 +19,7 @@ use leo_package::{
|
||||
inputs::{InputFile, InputsDirectory, StateFile},
|
||||
package::Package,
|
||||
root::Manifest,
|
||||
source::{LibraryFile, MainFile, SourceDirectory},
|
||||
source::{MainFile, SourceDirectory},
|
||||
};
|
||||
|
||||
const TEST_PACKAGE_NAME: &str = "test-package";
|
||||
@ -29,13 +29,13 @@ fn initialize_valid_package() {
|
||||
let test_directory = test_dir();
|
||||
|
||||
// Ensure a package can be initialized at the `test_directory`
|
||||
assert!(Package::can_initialize(TEST_PACKAGE_NAME, false, &test_directory));
|
||||
assert!(Package::can_initialize(TEST_PACKAGE_NAME, &test_directory));
|
||||
|
||||
// Initialize a package at the `test_directory`
|
||||
assert!(Package::initialize(TEST_PACKAGE_NAME, false, &test_directory, None).is_ok());
|
||||
assert!(Package::initialize(TEST_PACKAGE_NAME, &test_directory, None).is_ok());
|
||||
|
||||
// Ensure a package is initialized at the `test_directory`
|
||||
assert!(Package::is_initialized(TEST_PACKAGE_NAME, false, &test_directory));
|
||||
assert!(Package::is_initialized(TEST_PACKAGE_NAME, &test_directory));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -43,21 +43,13 @@ fn initialize_valid_package_with_author() {
|
||||
let test_directory = test_dir();
|
||||
|
||||
// Ensure a package can be initialized at the `test_directory`
|
||||
assert!(Package::can_initialize(TEST_PACKAGE_NAME, false, &test_directory));
|
||||
assert!(Package::can_initialize(TEST_PACKAGE_NAME, &test_directory));
|
||||
|
||||
// Initialize a package at the `test_directory`
|
||||
assert!(
|
||||
Package::initialize(
|
||||
TEST_PACKAGE_NAME,
|
||||
false,
|
||||
&test_directory,
|
||||
Some(String::from("test_user"))
|
||||
)
|
||||
.is_ok()
|
||||
);
|
||||
assert!(Package::initialize(TEST_PACKAGE_NAME, &test_directory, Some(String::from("test_user"))).is_ok());
|
||||
|
||||
// Ensure a package is initialized at the `test_directory`
|
||||
assert!(Package::is_initialized(TEST_PACKAGE_NAME, false, &test_directory));
|
||||
assert!(Package::is_initialized(TEST_PACKAGE_NAME, &test_directory));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -71,7 +63,7 @@ fn initialize_fails_with_existing_manifest() {
|
||||
let test_directory = test_dir();
|
||||
|
||||
// Ensure a package can be initialized at the `test_directory`
|
||||
assert!(Package::can_initialize(TEST_PACKAGE_NAME, false, &test_directory));
|
||||
assert!(Package::can_initialize(TEST_PACKAGE_NAME, &test_directory));
|
||||
|
||||
// Manually add a manifest file to the `test_directory`
|
||||
Manifest::new(TEST_PACKAGE_NAME, None)
|
||||
@ -80,28 +72,10 @@ fn initialize_fails_with_existing_manifest() {
|
||||
.unwrap();
|
||||
|
||||
// Attempt to initialize a package at the `test_directory`
|
||||
assert!(Package::initialize(TEST_PACKAGE_NAME, false, &test_directory, None).is_err());
|
||||
assert!(Package::initialize(TEST_PACKAGE_NAME, &test_directory, None).is_err());
|
||||
|
||||
// Ensure package is not initialized at the `test_directory`
|
||||
assert!(!Package::is_initialized(TEST_PACKAGE_NAME, false, &test_directory));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn initialize_fails_with_existing_library_file() {
|
||||
let test_directory = test_dir();
|
||||
|
||||
// Ensure a package can be initialized at the `test_directory`
|
||||
assert!(Package::can_initialize(TEST_PACKAGE_NAME, true, &test_directory));
|
||||
|
||||
// Manually add a source directory and a library file to the `test_directory`
|
||||
SourceDirectory::create(&test_directory).unwrap();
|
||||
LibraryFile::new(TEST_PACKAGE_NAME).write_to(&test_directory).unwrap();
|
||||
|
||||
// Attempt to initialize a package at the `test_directory`
|
||||
assert!(Package::initialize(TEST_PACKAGE_NAME, true, &test_directory, None).is_err());
|
||||
|
||||
// Ensure package is not initialized at the `test_directory`
|
||||
assert!(!Package::is_initialized(TEST_PACKAGE_NAME, true, &test_directory));
|
||||
assert!(!Package::is_initialized(TEST_PACKAGE_NAME, &test_directory));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -109,25 +83,17 @@ fn initialize_fails_with_existing_input_file() {
|
||||
let test_directory = test_dir();
|
||||
|
||||
// Ensure a package can be initialized at the `test_directory`
|
||||
assert!(Package::can_initialize(TEST_PACKAGE_NAME, false, &test_directory));
|
||||
assert!(Package::can_initialize(TEST_PACKAGE_NAME, &test_directory));
|
||||
|
||||
// Manually add an inputs directory and an input file to the `test_directory`
|
||||
InputsDirectory::create(&test_directory).unwrap();
|
||||
InputFile::new(TEST_PACKAGE_NAME).write_to(&test_directory).unwrap();
|
||||
|
||||
// Attempt to initialize a package at the `test_directory`
|
||||
assert!(
|
||||
Package::initialize(
|
||||
TEST_PACKAGE_NAME,
|
||||
false,
|
||||
&test_directory,
|
||||
Some(String::from("test_user"))
|
||||
)
|
||||
.is_err()
|
||||
);
|
||||
assert!(Package::initialize(TEST_PACKAGE_NAME, &test_directory, Some(String::from("test_user"))).is_err());
|
||||
|
||||
// Ensure package is not initialized at the `test_directory`
|
||||
assert!(!Package::is_initialized(TEST_PACKAGE_NAME, false, &test_directory));
|
||||
assert!(!Package::is_initialized(TEST_PACKAGE_NAME, &test_directory));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -135,17 +101,17 @@ fn initialize_fails_with_existing_state_file() {
|
||||
let test_directory = test_dir();
|
||||
|
||||
// Ensure a package can be initialized at the `test_directory`
|
||||
assert!(Package::can_initialize(TEST_PACKAGE_NAME, false, &test_directory));
|
||||
assert!(Package::can_initialize(TEST_PACKAGE_NAME, &test_directory));
|
||||
|
||||
// Manually add an inputs directory and a state file to the `test_directory`
|
||||
InputsDirectory::create(&test_directory).unwrap();
|
||||
StateFile::new(TEST_PACKAGE_NAME).write_to(&test_directory).unwrap();
|
||||
|
||||
// Attempt to initialize a package at the `test_directory`
|
||||
assert!(Package::initialize(TEST_PACKAGE_NAME, false, &test_directory, None).is_err());
|
||||
assert!(Package::initialize(TEST_PACKAGE_NAME, &test_directory, None).is_err());
|
||||
|
||||
// Ensure package is not initialized at the `test_directory`
|
||||
assert!(!Package::is_initialized(TEST_PACKAGE_NAME, false, &test_directory));
|
||||
assert!(!Package::is_initialized(TEST_PACKAGE_NAME, &test_directory));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -153,15 +119,15 @@ fn initialize_fails_with_existing_main_file() {
|
||||
let test_directory = test_dir();
|
||||
|
||||
// Ensure a package can be initialized at the `test_directory`
|
||||
assert!(Package::can_initialize(TEST_PACKAGE_NAME, false, &test_directory));
|
||||
assert!(Package::can_initialize(TEST_PACKAGE_NAME, &test_directory));
|
||||
|
||||
// Manually add a source directory and a main file to the `test_directory`
|
||||
SourceDirectory::create(&test_directory).unwrap();
|
||||
MainFile::new(TEST_PACKAGE_NAME).write_to(&test_directory).unwrap();
|
||||
|
||||
// Attempt to initialize a package at the `test_directory`
|
||||
assert!(Package::initialize(TEST_PACKAGE_NAME, false, &test_directory, None).is_err());
|
||||
assert!(Package::initialize(TEST_PACKAGE_NAME, &test_directory, None).is_err());
|
||||
|
||||
// Ensure package is not initialized at the `test_directory`
|
||||
assert!(!Package::is_initialized(TEST_PACKAGE_NAME, false, &test_directory));
|
||||
assert!(!Package::is_initialized(TEST_PACKAGE_NAME, &test_directory));
|
||||
}
|
||||
|
@ -28,6 +28,7 @@ impl ParserContext {
|
||||
let mut imports = Vec::new();
|
||||
let mut circuits = IndexMap::new();
|
||||
let mut functions = IndexMap::new();
|
||||
let mut global_consts = IndexMap::new();
|
||||
// let mut tests = IndexMap::new();
|
||||
|
||||
while self.has_next() {
|
||||
@ -55,6 +56,10 @@ impl ParserContext {
|
||||
// input_file: None,
|
||||
// });
|
||||
}
|
||||
Token::Const => {
|
||||
let (name, global_const) = self.parse_global_const_declaration()?;
|
||||
global_consts.insert(name, global_const);
|
||||
}
|
||||
_ => {
|
||||
return Err(SyntaxError::unexpected(
|
||||
&token.token,
|
||||
@ -76,6 +81,7 @@ impl ParserContext {
|
||||
imports,
|
||||
circuits,
|
||||
functions,
|
||||
global_consts,
|
||||
})
|
||||
}
|
||||
|
||||
@ -391,4 +397,20 @@ impl ParserContext {
|
||||
block,
|
||||
}))
|
||||
}
|
||||
|
||||
///
|
||||
/// Returns an [`(String, DefinitionStatement)`] AST node if the next tokens represent a global
|
||||
/// const definition statement and assignment.
|
||||
///
|
||||
pub fn parse_global_const_declaration(&mut self) -> SyntaxResult<(String, DefinitionStatement)> {
|
||||
let statement = self.parse_definition_statement()?;
|
||||
let variable_names = statement
|
||||
.variable_names
|
||||
.iter()
|
||||
.map(|variable_name| variable_name.identifier.name.to_string())
|
||||
.collect::<Vec<String>>()
|
||||
.join(",");
|
||||
|
||||
Ok((variable_names, statement))
|
||||
}
|
||||
}
|
||||
|
@ -3,6 +3,7 @@
|
||||
"expected_input": [],
|
||||
"imports": [],
|
||||
"circuits": {},
|
||||
"global_consts": {},
|
||||
"functions": {
|
||||
"{\"name\":\"main\",\"span\":\"{\\\"line_start\\\":1,\\\"line_stop\\\":1,\\\"col_start\\\":10,\\\"col_stop\\\":14,\\\"path\\\":\\\"test\\\",\\\"content\\\":\\\"function main() {\\\"}\"}": {
|
||||
"annotations": [],
|
||||
|
@ -14,6 +14,14 @@
|
||||
// You should have received a copy of the GNU General Public License
|
||||
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
//! The test framework to run integration tests with Leo code text.
|
||||
//!
|
||||
//! This module contains the [`run_tests()`] method which runs all integration tests in the
|
||||
//! root [`tests/`] directory.
|
||||
//!
|
||||
//! To regenerate the tests after a syntax change or failing test, delete the [`tests/expectations/`]
|
||||
//! directory and run the [`parser_tests()`] test in [`parser/src/test.rs`].
|
||||
|
||||
pub mod error;
|
||||
|
||||
pub mod fetch;
|
||||
|
@ -49,4 +49,5 @@ outputs:
|
||||
col_stop: 6
|
||||
path: test
|
||||
content: " function x() -> Self {\n...\n }"
|
||||
global_consts: {}
|
||||
functions: {}
|
||||
|
@ -9,4 +9,5 @@ outputs:
|
||||
"{\"name\":\"X\",\"span\":\"{\\\"line_start\\\":3,\\\"line_stop\\\":3,\\\"col_start\\\":9,\\\"col_stop\\\":10,\\\"path\\\":\\\"test\\\",\\\"content\\\":\\\"circuit X {\\\"}\"}":
|
||||
circuit_name: "{\"name\":\"X\",\"span\":\"{\\\"line_start\\\":3,\\\"line_stop\\\":3,\\\"col_start\\\":9,\\\"col_stop\\\":10,\\\"path\\\":\\\"test\\\",\\\"content\\\":\\\"circuit X {\\\"}\"}"
|
||||
members: []
|
||||
global_consts: {}
|
||||
functions: {}
|
||||
|
@ -93,4 +93,5 @@ outputs:
|
||||
col_stop: 6
|
||||
path: test
|
||||
content: " function y() {\n...\n }"
|
||||
global_consts: {}
|
||||
functions: {}
|
||||
|
@ -15,4 +15,5 @@ outputs:
|
||||
- CircuitVariable:
|
||||
- "{\"name\":\"y\",\"span\":\"{\\\"line_start\\\":5,\\\"line_stop\\\":5,\\\"col_start\\\":5,\\\"col_stop\\\":6,\\\"path\\\":\\\"test\\\",\\\"content\\\":\\\" y: u32,\\\"}\"}"
|
||||
- IntegerType: U32
|
||||
global_consts: {}
|
||||
functions: {}
|
||||
|
@ -87,4 +87,5 @@ outputs:
|
||||
col_stop: 6
|
||||
path: test
|
||||
content: " function y() {\n...\n }"
|
||||
global_consts: {}
|
||||
functions: {}
|
||||
|
@ -49,4 +49,5 @@ outputs:
|
||||
col_stop: 6
|
||||
path: test
|
||||
content: " function x(mut self) {\n...\n }"
|
||||
global_consts: {}
|
||||
functions: {}
|
||||
|
@ -49,4 +49,5 @@ outputs:
|
||||
col_stop: 6
|
||||
path: test
|
||||
content: " function x(self) {\n...\n }"
|
||||
global_consts: {}
|
||||
functions: {}
|
||||
|
@ -6,6 +6,7 @@ outputs:
|
||||
expected_input: []
|
||||
imports: []
|
||||
circuits: {}
|
||||
global_consts: {}
|
||||
functions:
|
||||
"{\"name\":\"x\",\"span\":\"{\\\"line_start\\\":4,\\\"line_stop\\\":4,\\\"col_start\\\":10,\\\"col_stop\\\":11,\\\"path\\\":\\\"test\\\",\\\"content\\\":\\\"function x() {\\\"}\"}":
|
||||
annotations:
|
||||
|
@ -6,6 +6,7 @@ outputs:
|
||||
expected_input: []
|
||||
imports: []
|
||||
circuits: {}
|
||||
global_consts: {}
|
||||
functions:
|
||||
"{\"name\":\"x\",\"span\":\"{\\\"line_start\\\":4,\\\"line_stop\\\":4,\\\"col_start\\\":10,\\\"col_stop\\\":11,\\\"path\\\":\\\"test\\\",\\\"content\\\":\\\"function x() {\\\"}\"}":
|
||||
annotations:
|
||||
|
@ -6,6 +6,7 @@ outputs:
|
||||
expected_input: []
|
||||
imports: []
|
||||
circuits: {}
|
||||
global_consts: {}
|
||||
functions:
|
||||
"{\"name\":\"x\",\"span\":\"{\\\"line_start\\\":4,\\\"line_stop\\\":4,\\\"col_start\\\":10,\\\"col_stop\\\":11,\\\"path\\\":\\\"test\\\",\\\"content\\\":\\\"function x() {\\\"}\"}":
|
||||
annotations:
|
||||
|
@ -6,6 +6,7 @@ outputs:
|
||||
expected_input: []
|
||||
imports: []
|
||||
circuits: {}
|
||||
global_consts: {}
|
||||
functions:
|
||||
"{\"name\":\"x\",\"span\":\"{\\\"line_start\\\":3,\\\"line_stop\\\":3,\\\"col_start\\\":10,\\\"col_stop\\\":11,\\\"path\\\":\\\"test\\\",\\\"content\\\":\\\"function x(x: u32, const y: i32) {\\\"}\"}":
|
||||
annotations: []
|
||||
|
@ -6,6 +6,7 @@ outputs:
|
||||
expected_input: []
|
||||
imports: []
|
||||
circuits: {}
|
||||
global_consts: {}
|
||||
functions:
|
||||
"{\"name\":\"x\",\"span\":\"{\\\"line_start\\\":3,\\\"line_stop\\\":3,\\\"col_start\\\":10,\\\"col_stop\\\":11,\\\"path\\\":\\\"test\\\",\\\"content\\\":\\\"function x(const self) {\\\"}\"}":
|
||||
annotations: []
|
||||
|
@ -6,6 +6,7 @@ outputs:
|
||||
expected_input: []
|
||||
imports: []
|
||||
circuits: {}
|
||||
global_consts: {}
|
||||
functions:
|
||||
"{\"name\":\"x\",\"span\":\"{\\\"line_start\\\":3,\\\"line_stop\\\":3,\\\"col_start\\\":10,\\\"col_stop\\\":11,\\\"path\\\":\\\"test\\\",\\\"content\\\":\\\"function x() {\\\"}\"}":
|
||||
annotations: []
|
||||
|
@ -6,6 +6,7 @@ outputs:
|
||||
expected_input: []
|
||||
imports: []
|
||||
circuits: {}
|
||||
global_consts: {}
|
||||
functions:
|
||||
"{\"name\":\"x\",\"span\":\"{\\\"line_start\\\":3,\\\"line_stop\\\":3,\\\"col_start\\\":10,\\\"col_stop\\\":11,\\\"path\\\":\\\"test\\\",\\\"content\\\":\\\"function x() {}\\\"}\"}":
|
||||
annotations: []
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user