Merge branch 'master' into leo-manifest-dependencies

This commit is contained in:
damirka 2021-07-05 19:59:25 +03:00
commit 4c1298d179
36 changed files with 1906 additions and 520 deletions

8
Cargo.lock generated
View File

@ -865,9 +865,9 @@ checksum = "62aca2aba2d62b4a7f5b33f3712cb1b0692779a56fb510499d5c0aa594daeaf3"
[[package]]
name = "hashbrown"
version = "0.9.1"
version = "0.11.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d7afe4a420e3fe79967a00898cc1f4db7c8a49a9333a29f8a4bd76a253d5cd04"
checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e"
[[package]]
name = "heck"
@ -977,9 +977,9 @@ dependencies = [
[[package]]
name = "indexmap"
version = "1.6.2"
version = "1.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "824845a0bf897a9042383849b02c1bc219c2383772efcd5c6f9766fa4b81aef3"
checksum = "bc633605454125dec4b66843673f01c7df2b89479b32e0ed634e43a91cff62a5"
dependencies = [
"autocfg",
"hashbrown",

View File

@ -24,7 +24,7 @@ version = "1.0"
version = "1.0"
[dependencies.indexmap]
version = "1.6"
version = "1.7"
[dependencies.thiserror]
version = "1.0"

View File

@ -54,8 +54,8 @@ impl<'a> ExpressionNode<'a> for ArrayInitExpression<'a> {
}
fn const_value(&self) -> Option<ConstValue> {
// not implemented due to performance concerns
None
let element = self.element.get().const_value()?;
Some(ConstValue::Array(vec![element; self.len]))
}
fn is_consty(&self) -> bool {

View File

@ -22,7 +22,7 @@ path = "../input"
version = "1.5.2"
[dependencies.indexmap]
version = "1.6.2"
version = "1.7.0"
features = [ "serde-1" ]
[dependencies.pest]

40
ast/src/errors/ast.rs Normal file
View File

@ -0,0 +1,40 @@
// Copyright (C) 2019-2021 Aleo Systems Inc.
// This file is part of the Leo library.
// The Leo library is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// The Leo library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
use crate::{FormattedError, LeoError, ReducerError, Span};
#[derive(Debug, Error)]
pub enum AstError {
#[error("{}", _0)]
Error(#[from] FormattedError),
#[error("{}", _0)]
IOError(#[from] std::io::Error),
#[error("{}", _0)]
ReducerError(#[from] ReducerError),
#[error("{}", _0)]
SerdeJsonError(#[from] ::serde_json::Error),
}
impl LeoError for AstError {}
impl AstError {
fn _new_from_span(message: String, span: &Span) -> Self {
AstError::Error(FormattedError::new_from_span(message, span))
}
}

View File

@ -34,10 +34,4 @@ impl CombinerError {
Self::new_from_span(message, span)
}
pub fn illegal_compound_array_range(span: &Span) -> Self {
let message = "Illegal compound assignement with array range".to_string();
Self::new_from_span(message, span)
}
}

View File

@ -14,6 +14,9 @@
// You should have received a copy of the GNU General Public License
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
pub mod ast;
pub use ast::*;
pub mod canonicalization;
pub use canonicalization::*;

View File

@ -86,7 +86,7 @@ impl Ast {
}
/// Mutates the program ast by preforming canonicalization on it.
pub fn canonicalize(&mut self) -> Result<(), ReducerError> {
pub fn canonicalize(&mut self) -> Result<(), AstError> {
self.ast = ReconstructingDirector::new(Canonicalizer::default()).reduce_program(self.as_repr())?;
Ok(())
}
@ -101,12 +101,20 @@ impl Ast {
}
/// Serializes the ast into a JSON string.
pub fn to_json_string(&self) -> Result<String, serde_json::Error> {
serde_json::to_string_pretty(&self.ast)
pub fn to_json_string(&self) -> Result<String, AstError> {
Ok(serde_json::to_string_pretty(&self.ast)?)
}
pub fn to_json_file(&self, mut path: std::path::PathBuf, file_name: &str) -> Result<(), AstError> {
path.push(file_name);
let file = std::fs::File::create(path)?;
let writer = std::io::BufWriter::new(file);
serde_json::to_writer_pretty(writer, &self.ast)?;
Ok(())
}
/// Deserializes the JSON string into a ast.
pub fn from_json_string(json: &str) -> Result<Self, serde_json::Error> {
pub fn from_json_string(json: &str) -> Result<Self, AstError> {
let ast: Program = serde_json::from_str(json)?;
Ok(Self { ast })
}

View File

@ -55,6 +55,14 @@ impl Canonicalizer {
span: span.clone(),
}));
}
AssigneeAccess::ArrayRange(start, stop) => {
left = Box::new(Expression::ArrayRangeAccess(ArrayRangeAccessExpression {
array: left,
left: start.map(Box::new),
right: stop.map(Box::new),
span: span.clone(),
}));
}
AssigneeAccess::Tuple(positive_number, _) => {
left = Box::new(Expression::TupleAccess(TupleAccessExpression {
tuple: left,
@ -69,7 +77,6 @@ impl Canonicalizer {
span: span.clone(),
}));
}
_ => return Err(ReducerError::from(CombinerError::illegal_compound_array_range(&span))),
}
}
@ -593,19 +600,13 @@ impl ReconstructingReducer for Canonicalizer {
value: Expression,
) -> Result<AssignStatement, ReducerError> {
match value {
Expression::Binary(binary_expr) if assign.operation == AssignOperation::Assign => Ok(AssignStatement {
operation: AssignOperation::Assign,
assignee,
value: Expression::Binary(binary_expr),
span: assign.span.clone(),
}),
Expression::Binary(binary_expr) if assign.operation != AssignOperation::Assign => {
value if assign.operation != AssignOperation::Assign => {
let left = self.canonicalize_accesses(
Expression::Identifier(assignee.identifier.clone()),
&assignee.accesses,
&assign.span,
)?;
let right = Box::new(Expression::Binary(binary_expr));
let right = Box::new(value);
let op = self.compound_operation_converstion(&assign.operation)?;
let new_value = Expression::Binary(BinaryExpression {
@ -622,36 +623,12 @@ impl ReconstructingReducer for Canonicalizer {
span: assign.span.clone(),
})
}
Expression::Value(value_expr) if assign.operation != AssignOperation::Assign => {
let left = self.canonicalize_accesses(
Expression::Identifier(assignee.identifier.clone()),
&assignee.accesses,
&assign.span,
)?;
let right = Box::new(Expression::Value(value_expr));
let op = self.compound_operation_converstion(&assign.operation)?;
let new_value = Expression::Binary(BinaryExpression {
left,
right,
op,
span: assign.span.clone(),
});
Ok(AssignStatement {
operation: AssignOperation::Assign,
assignee,
value: new_value,
span: assign.span.clone(),
})
}
Expression::ArrayInline(_) => Ok(AssignStatement {
value => Ok(AssignStatement {
operation: AssignOperation::Assign,
assignee,
value,
span: assign.span.clone(),
}),
_ => Ok(assign.clone()),
}
}

View File

@ -86,7 +86,7 @@ version = "1.3"
version = "0.4.2"
[dependencies.indexmap]
version = "1.6.2"
version = "1.7.0"
features = [ "serde-1" ]
[dependencies.pest]

View File

@ -15,7 +15,6 @@
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
//! Compiles a Leo program from a file path.
use crate::{
constraints::{generate_constraints, generate_test_constraints},
errors::CompilerError,
@ -23,6 +22,8 @@ use crate::{
GroupType,
Output,
OutputFile,
TheoremOptions,
TypeInferencePhase,
};
pub use leo_asg::{new_context, AsgContext as Context, AsgContext};
use leo_asg::{Asg, AsgPass, FormattedError, Program as AsgProgram};
@ -66,6 +67,7 @@ pub struct Compiler<'a, F: PrimeField, G: GroupType<F>> {
context: AsgContext<'a>,
asg: Option<AsgProgram<'a>>,
options: CompilerOptions,
proof_options: TheoremOptions,
_engine: PhantomData<F>,
_group: PhantomData<G>,
}
@ -80,6 +82,7 @@ impl<'a, F: PrimeField, G: GroupType<F>> Compiler<'a, F, G> {
output_directory: PathBuf,
context: AsgContext<'a>,
options: Option<CompilerOptions>,
proof_options: Option<TheoremOptions>,
) -> Self {
Self {
program_name: package_name.clone(),
@ -90,6 +93,7 @@ impl<'a, F: PrimeField, G: GroupType<F>> Compiler<'a, F, G> {
asg: None,
context,
options: options.unwrap_or_default(),
proof_options: proof_options.unwrap_or_default(),
_engine: PhantomData,
_group: PhantomData,
}
@ -108,8 +112,16 @@ impl<'a, F: PrimeField, G: GroupType<F>> Compiler<'a, F, G> {
output_directory: PathBuf,
context: AsgContext<'a>,
options: Option<CompilerOptions>,
proof_options: Option<TheoremOptions>,
) -> Result<Self, CompilerError> {
let mut compiler = Self::new(package_name, main_file_path, output_directory, context, options);
let mut compiler = Self::new(
package_name,
main_file_path,
output_directory,
context,
options,
proof_options,
);
compiler.parse_program()?;
@ -139,8 +151,16 @@ impl<'a, F: PrimeField, G: GroupType<F>> Compiler<'a, F, G> {
state_path: &Path,
context: AsgContext<'a>,
options: Option<CompilerOptions>,
proof_options: Option<TheoremOptions>,
) -> Result<Self, CompilerError> {
let mut compiler = Self::new(package_name, main_file_path, output_directory, context, options);
let mut compiler = Self::new(
package_name,
main_file_path,
output_directory,
context,
options,
proof_options,
);
compiler.parse_input(input_string, input_path, state_string, state_path)?;
@ -218,11 +238,19 @@ impl<'a, F: PrimeField, G: GroupType<F>> Compiler<'a, F, G> {
pub fn parse_program_from_string(&mut self, program_string: &str) -> Result<(), CompilerError> {
// Use the parser to construct the abstract syntax tree (ast).
let mut ast = parse_ast(self.main_file_path.to_str().unwrap_or_default(), program_string)?;
let mut ast: leo_ast::Ast = parse_ast(self.main_file_path.to_str().unwrap_or_default(), program_string)?;
if self.proof_options.initial {
ast.to_json_file(self.output_directory.clone(), "inital_ast.json")?;
}
// Preform compiler optimization via canonicalizing AST if its enabled.
if self.options.canonicalization_enabled {
ast.canonicalize()?;
if self.proof_options.canonicalized {
ast.to_json_file(self.output_directory.clone(), "canonicalization_ast.json")?;
}
}
// Store the main program file.
@ -238,6 +266,13 @@ impl<'a, F: PrimeField, G: GroupType<F>> Compiler<'a, F, G> {
&mut leo_imports::ImportParser::new(self.main_file_path.clone()),
)?;
if self.proof_options.type_inferenced {
let new_ast = TypeInferencePhase::default()
.phase_ast(&self.program, &asg.clone().into_repr())
.expect("Failed to produce type inference ast.");
new_ast.to_json_file(self.output_directory.clone(), "type_inferenced_ast.json")?;
}
tracing::debug!("ASG generation complete");
// Store the ASG.

View File

@ -16,7 +16,7 @@
use crate::errors::{ExpressionError, FunctionError, ImportError, StatementError};
use leo_asg::{AsgConvertError, FormattedError};
use leo_ast::{LeoError, ReducerError};
use leo_ast::{AstError, LeoError};
use leo_input::InputParserError;
use leo_parser::SyntaxError;
use leo_state::LocalDataVerificationError;
@ -62,7 +62,7 @@ pub enum CompilerError {
AsgConvertError(#[from] AsgConvertError),
#[error("{}", _0)]
ReducerError(#[from] ReducerError),
AstError(#[from] AstError),
#[error("{}", _0)]
StatementError(#[from] StatementError),

View File

@ -36,3 +36,20 @@ impl Default for CompilerOptions {
}
}
}
#[derive(Clone)]
pub struct TheoremOptions {
pub initial: bool,
pub canonicalized: bool,
pub type_inferenced: bool,
}
impl Default for TheoremOptions {
fn default() -> Self {
Self {
initial: false,
canonicalized: false,
type_inferenced: false,
}
}
}

View File

@ -40,85 +40,171 @@ impl<'a, F: PrimeField, G: GroupType<F>> ConstrainedProgram<'a, F, G> {
mut context: ResolverContext<'a, 'b, F, G>,
index: &'a Expression<'a>,
) -> Result<(), StatementError> {
if context.input.len() != 1 {
return Err(StatementError::array_assign_interior_index(&context.span));
}
let input = match context.input.remove(0) {
ConstrainedValue::Array(old) => old,
_ => return Err(StatementError::array_assign_index(&context.span)),
};
let input_len = context.input.len();
let index_resolved = self.enforce_index(cs, index, &context.span)?;
if let Some(index) = index_resolved.to_usize() {
if index >= input.len() {
Err(StatementError::array_assign_index_bounds(
index,
input.len(),
&context.span,
))
} else {
let target = input.get_mut(index).unwrap();
if !context.from_range && input_len == 1 {
match context.input.remove(0) {
ConstrainedValue::Array(input) => {
if let Some(index) = index_resolved.to_usize() {
if index >= input.len() {
Err(StatementError::array_assign_index_bounds(
index,
input.len(),
&context.span,
))
} else {
let target = input.get_mut(index).unwrap();
if context.remaining_accesses.is_empty() {
self.enforce_assign_context(cs, &context, target)
} else {
context.input = vec![target];
self.resolve_target_access(cs, context)
}
}
} else {
let span = index.span().cloned().unwrap_or_default();
{
let array_len: u32 = input
.len()
.try_into()
.map_err(|_| ExpressionError::array_length_out_of_bounds(&span))?;
self.array_bounds_check(cs, &&index_resolved, array_len, &span)?;
}
for (i, item) in input.iter_mut().enumerate() {
let namespace_string = format!(
"evaluate dyn array assignment eq {} {}:{}",
i, span.line_start, span.col_start
);
let eq_namespace = cs.ns(|| namespace_string);
let index_bounded = i
.try_into()
.map_err(|_| ExpressionError::array_index_out_of_legal_bounds(&span))?;
let const_index = ConstInt::U32(index_bounded).cast_to(&index_resolved.get_type());
let index_comparison = index_resolved
.evaluate_equal(eq_namespace, &Integer::new(&const_index))
.map_err(|_| ExpressionError::cannot_evaluate("==".to_string(), &span))?;
let mut unique_namespace = cs.ns(|| {
format!(
"select array dyn assignment {} {}:{}",
i, span.line_start, span.col_start
)
});
let temp_item = {
let mut item = item.clone();
let mut new_context = ResolverContext {
input: vec![&mut item],
span: context.span.clone(),
target_value: context.target_value.clone(),
remaining_accesses: context.remaining_accesses,
indicator: context.indicator,
operation: context.operation,
from_range: false,
};
if context.remaining_accesses.is_empty() {
let item = new_context.input.remove(0);
self.enforce_assign_context(&mut unique_namespace, &new_context, item)?;
} else {
self.resolve_target_access(&mut unique_namespace, new_context)?;
}
item
};
let value = ConstrainedValue::conditionally_select(
unique_namespace,
&index_comparison,
&temp_item,
&item,
)
.map_err(|e| ExpressionError::cannot_enforce("conditional select".to_string(), e, &span))?;
*item = value;
}
Ok(())
}
}
_ => Err(StatementError::array_assign_interior_index(&context.span)),
}
} else if context.from_range && input_len != 0 {
context.from_range = false;
if let Some(index) = index_resolved.to_usize() {
if index >= input_len {
return Err(StatementError::array_assign_index_bounds(
index,
input_len,
&context.span,
));
}
let target = context.input.remove(index);
if context.remaining_accesses.is_empty() {
self.enforce_assign_context(cs, &context, target)
} else {
context.input = vec![target];
self.resolve_target_access(cs, context)
}
} else {
// index is input variable
let span = index.span().cloned().unwrap_or_default();
{
let array_len: u32 = context
.input
.len()
.try_into()
.map_err(|_| ExpressionError::array_length_out_of_bounds(&span))?;
self.array_bounds_check(cs, &&index_resolved, array_len, &span)?;
}
for (i, item) in context.input.iter_mut().enumerate() {
let namespace_string = format!(
"evaluate dyn array assignment eq {} {}:{}",
i, span.line_start, span.col_start
);
let eq_namespace = cs.ns(|| namespace_string);
let index_bounded = i
.try_into()
.map_err(|_| ExpressionError::array_index_out_of_legal_bounds(&span))?;
let const_index = ConstInt::U32(index_bounded).cast_to(&index_resolved.get_type());
let index_comparison = index_resolved
.evaluate_equal(eq_namespace, &Integer::new(&const_index))
.map_err(|_| ExpressionError::cannot_evaluate("==".to_string(), &span))?;
let mut unique_namespace = cs.ns(|| {
format!(
"select array dyn assignment {} {}:{}",
i, span.line_start, span.col_start
)
});
let temp_item = {
let mut item = item.clone();
let mut new_context = ResolverContext {
input: vec![&mut item],
span: context.span.clone(),
target_value: context.target_value.clone(),
remaining_accesses: context.remaining_accesses,
indicator: context.indicator,
operation: context.operation,
from_range: false,
};
if context.remaining_accesses.is_empty() {
let item = new_context.input.remove(0);
self.enforce_assign_context(&mut unique_namespace, &new_context, item)?;
} else {
self.resolve_target_access(&mut unique_namespace, new_context)?;
}
item
};
let value =
ConstrainedValue::conditionally_select(unique_namespace, &index_comparison, &temp_item, &item)
.map_err(|e| ExpressionError::cannot_enforce("conditional select".to_string(), e, &span))?;
**item = value;
}
Ok(())
}
} else {
let span = index.span().cloned().unwrap_or_default();
{
let array_len: u32 = input
.len()
.try_into()
.map_err(|_| ExpressionError::array_length_out_of_bounds(&span))?;
self.array_bounds_check(cs, &&index_resolved, array_len, &span)?;
}
for (i, item) in input.iter_mut().enumerate() {
let namespace_string = format!(
"evaluate dyn array assignment eq {} {}:{}",
i, span.line_start, span.col_start
);
let eq_namespace = cs.ns(|| namespace_string);
let index_bounded = i
.try_into()
.map_err(|_| ExpressionError::array_index_out_of_legal_bounds(&span))?;
let const_index = ConstInt::U32(index_bounded).cast_to(&index_resolved.get_type());
let index_comparison = index_resolved
.evaluate_equal(eq_namespace, &Integer::new(&const_index))
.map_err(|_| ExpressionError::cannot_evaluate("==".to_string(), &span))?;
let mut unique_namespace = cs.ns(|| {
format!(
"select array dyn assignment {} {}:{}",
i, span.line_start, span.col_start
)
});
let temp_item = {
let mut item = item.clone();
let mut new_context = ResolverContext {
input: vec![&mut item],
span: context.span.clone(),
target_value: context.target_value.clone(),
remaining_accesses: context.remaining_accesses,
indicator: context.indicator,
operation: context.operation,
};
if context.remaining_accesses.is_empty() {
let item = new_context.input.remove(0);
self.enforce_assign_context(&mut unique_namespace, &new_context, item)?;
} else {
self.resolve_target_access(&mut unique_namespace, new_context)?;
}
item
};
let value =
ConstrainedValue::conditionally_select(unique_namespace, &index_comparison, &temp_item, &item)
.map_err(|e| ExpressionError::cannot_enforce("conditional select".to_string(), e, &span))?;
*item = value;
}
Ok(())
Err(StatementError::array_assign_interior_index(&context.span))
}
}
}

View File

@ -50,8 +50,9 @@ impl<'a, F: PrimeField, G: GroupType<F>> ConstrainedProgram<'a, F, G> {
.transpose()?;
let start_index = start_index.unwrap_or(0);
if context.input.len() == 1 {
if !context.from_range {
// not a range of a range
context.from_range = true;
match context.input.remove(0) {
ConstrainedValue::Array(old) => {
let stop_index = stop_index.unwrap_or(old.len());

View File

@ -35,6 +35,7 @@ struct ResolverContext<'a, 'b, F: PrimeField, G: GroupType<F>> {
remaining_accesses: &'b [&'b AssignAccess<'a>],
indicator: &'b Boolean,
operation: AssignOperation,
from_range: bool,
}
impl<'a, F: PrimeField, G: GroupType<F>> ConstrainedProgram<'a, F, G> {
@ -68,8 +69,10 @@ impl<'a, F: PrimeField, G: GroupType<F>> ConstrainedProgram<'a, F, G> {
self.enforce_assign_context(cs, &context, input)?;
return Ok(());
}
let access = context.remaining_accesses[context.remaining_accesses.len() - 1];
context.remaining_accesses = &context.remaining_accesses[..context.remaining_accesses.len() - 1];
match access {
AssignAccess::ArrayRange(start, stop) => {
self.resolve_target_access_array_range(cs, context, start.get(), stop.get())
@ -99,6 +102,7 @@ impl<'a, F: PrimeField, G: GroupType<F>> ConstrainedProgram<'a, F, G> {
remaining_accesses: &accesses[..],
indicator,
operation: assignee.operation,
from_range: false,
})?;
*self.get_mut(variable.id).unwrap() = target;
Ok(())

File diff suppressed because it is too large Load Diff

View File

@ -12,9 +12,15 @@ function main() {
x += 20;
console.assert(x == 30u32);
let y = [1u8, 2u8];
let w = 3u32;
w += x;
console.assert(w == 33u32);
let y = [1u8, 2u8, 3, 4];
y[0] += 3u8;
y[0..3][1] *= 3;
console.assert(y[0] == 4u8);
console.assert(y[1] == 6u8);
let z = (1u8, 2u8);
z.1 += 3u8;

View File

@ -718,11 +718,450 @@
"content": " x = \"test1\" == \"test2\";"
}
}
},
{
"Definition": {
"declaration_type": "Let",
"variable_names": [
{
"mutable": true,
"identifier": "{\"name\":\"z\",\"span\":\"{\\\"line_start\\\":6,\\\"line_stop\\\":6,\\\"col_start\\\":9,\\\"col_stop\\\":10,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\" let z = [1u8, 2u8, 3u8, 4u8];\\\"}\"}",
"span": {
"line_start": 6,
"line_stop": 6,
"col_start": 9,
"col_stop": 10,
"path": "",
"content": " let z = [1u8, 2u8, 3u8, 4u8];"
}
}
],
"type_": null,
"value": {
"ArrayInline": {
"elements": [
{
"Expression": {
"Value": {
"Integer": [
"U8",
"1",
{
"line_start": 6,
"line_stop": 6,
"col_start": 14,
"col_stop": 17,
"path": "",
"content": " let z = [1u8, 2u8, 3u8, 4u8];"
}
]
}
}
},
{
"Expression": {
"Value": {
"Integer": [
"U8",
"2",
{
"line_start": 6,
"line_stop": 6,
"col_start": 19,
"col_stop": 22,
"path": "",
"content": " let z = [1u8, 2u8, 3u8, 4u8];"
}
]
}
}
},
{
"Expression": {
"Value": {
"Integer": [
"U8",
"3",
{
"line_start": 6,
"line_stop": 6,
"col_start": 24,
"col_stop": 27,
"path": "",
"content": " let z = [1u8, 2u8, 3u8, 4u8];"
}
]
}
}
},
{
"Expression": {
"Value": {
"Integer": [
"U8",
"4",
{
"line_start": 6,
"line_stop": 6,
"col_start": 29,
"col_stop": 32,
"path": "",
"content": " let z = [1u8, 2u8, 3u8, 4u8];"
}
]
}
}
}
],
"span": {
"line_start": 6,
"line_stop": 6,
"col_start": 13,
"col_stop": 33,
"path": "",
"content": " let z = [1u8, 2u8, 3u8, 4u8];"
}
}
},
"span": {
"line_start": 6,
"line_stop": 6,
"col_start": 5,
"col_stop": 33,
"path": "",
"content": " let z = [1u8, 2u8, 3u8, 4u8];"
}
}
},
{
"Assign": {
"operation": "Assign",
"assignee": {
"identifier": "{\"name\":\"z\",\"span\":\"{\\\"line_start\\\":7,\\\"line_stop\\\":7,\\\"col_start\\\":5,\\\"col_stop\\\":6,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\" z[0..\\\\\\\"test\\\\\\\" == \\\\\\\"test\\\\\\\"? 2 : 2] = x[0..2];\\\"}\"}",
"accesses": [
{
"ArrayRange": [
{
"Value": {
"Implicit": [
"0",
{
"line_start": 7,
"line_stop": 7,
"col_start": 7,
"col_stop": 8,
"path": "",
"content": " z[0..\"test\" == \"test\"? 2 : 2] = x[0..2];"
}
]
}
},
{
"Ternary": {
"condition": {
"Binary": {
"left": {
"ArrayInline": {
"elements": [
{
"Expression": {
"Value": {
"Char": {
"character": {
"Scalar": 116
},
"span": {
"line_start": 7,
"line_stop": 7,
"col_start": 11,
"col_stop": 12,
"path": "",
"content": " z[0..\"test\" == \"test\"? 2 : 2] = x[0..2];"
}
}
}
}
},
{
"Expression": {
"Value": {
"Char": {
"character": {
"Scalar": 101
},
"span": {
"line_start": 7,
"line_stop": 7,
"col_start": 12,
"col_stop": 13,
"path": "",
"content": " z[0..\"test\" == \"test\"? 2 : 2] = x[0..2];"
}
}
}
}
},
{
"Expression": {
"Value": {
"Char": {
"character": {
"Scalar": 115
},
"span": {
"line_start": 7,
"line_stop": 7,
"col_start": 13,
"col_stop": 14,
"path": "",
"content": " z[0..\"test\" == \"test\"? 2 : 2] = x[0..2];"
}
}
}
}
},
{
"Expression": {
"Value": {
"Char": {
"character": {
"Scalar": 116
},
"span": {
"line_start": 7,
"line_stop": 7,
"col_start": 14,
"col_stop": 15,
"path": "",
"content": " z[0..\"test\" == \"test\"? 2 : 2] = x[0..2];"
}
}
}
}
}
],
"span": {
"line_start": 7,
"line_stop": 7,
"col_start": 10,
"col_stop": 16,
"path": "",
"content": " z[0..\"test\" == \"test\"? 2 : 2] = x[0..2];"
}
}
},
"right": {
"ArrayInline": {
"elements": [
{
"Expression": {
"Value": {
"Char": {
"character": {
"Scalar": 116
},
"span": {
"line_start": 7,
"line_stop": 7,
"col_start": 21,
"col_stop": 22,
"path": "",
"content": " z[0..\"test\" == \"test\"? 2 : 2] = x[0..2];"
}
}
}
}
},
{
"Expression": {
"Value": {
"Char": {
"character": {
"Scalar": 101
},
"span": {
"line_start": 7,
"line_stop": 7,
"col_start": 22,
"col_stop": 23,
"path": "",
"content": " z[0..\"test\" == \"test\"? 2 : 2] = x[0..2];"
}
}
}
}
},
{
"Expression": {
"Value": {
"Char": {
"character": {
"Scalar": 115
},
"span": {
"line_start": 7,
"line_stop": 7,
"col_start": 23,
"col_stop": 24,
"path": "",
"content": " z[0..\"test\" == \"test\"? 2 : 2] = x[0..2];"
}
}
}
}
},
{
"Expression": {
"Value": {
"Char": {
"character": {
"Scalar": 116
},
"span": {
"line_start": 7,
"line_stop": 7,
"col_start": 24,
"col_stop": 25,
"path": "",
"content": " z[0..\"test\" == \"test\"? 2 : 2] = x[0..2];"
}
}
}
}
}
],
"span": {
"line_start": 7,
"line_stop": 7,
"col_start": 20,
"col_stop": 26,
"path": "",
"content": " z[0..\"test\" == \"test\"? 2 : 2] = x[0..2];"
}
}
},
"op": "Eq",
"span": {
"line_start": 7,
"line_stop": 7,
"col_start": 10,
"col_stop": 26,
"path": "",
"content": " z[0..\"test\" == \"test\"? 2 : 2] = x[0..2];"
}
}
},
"if_true": {
"Value": {
"Implicit": [
"2",
{
"line_start": 7,
"line_stop": 7,
"col_start": 28,
"col_stop": 29,
"path": "",
"content": " z[0..\"test\" == \"test\"? 2 : 2] = x[0..2];"
}
]
}
},
"if_false": {
"Value": {
"Implicit": [
"2",
{
"line_start": 7,
"line_stop": 7,
"col_start": 32,
"col_stop": 33,
"path": "",
"content": " z[0..\"test\" == \"test\"? 2 : 2] = x[0..2];"
}
]
}
},
"span": {
"line_start": 7,
"line_stop": 7,
"col_start": 10,
"col_stop": 33,
"path": "",
"content": " z[0..\"test\" == \"test\"? 2 : 2] = x[0..2];"
}
}
}
]
}
],
"span": {
"line_start": 7,
"line_stop": 7,
"col_start": 5,
"col_stop": 34,
"path": "",
"content": " z[0..\"test\" == \"test\"? 2 : 2] = x[0..2];"
}
},
"value": {
"ArrayRangeAccess": {
"array": {
"Identifier": "{\"name\":\"x\",\"span\":\"{\\\"line_start\\\":7,\\\"line_stop\\\":7,\\\"col_start\\\":37,\\\"col_stop\\\":38,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\" z[0..\\\\\\\"test\\\\\\\" == \\\\\\\"test\\\\\\\"? 2 : 2] = x[0..2];\\\"}\"}"
},
"left": {
"Value": {
"Implicit": [
"0",
{
"line_start": 7,
"line_stop": 7,
"col_start": 39,
"col_stop": 40,
"path": "",
"content": " z[0..\"test\" == \"test\"? 2 : 2] = x[0..2];"
}
]
}
},
"right": {
"Value": {
"Implicit": [
"2",
{
"line_start": 7,
"line_stop": 7,
"col_start": 42,
"col_stop": 43,
"path": "",
"content": " z[0..\"test\" == \"test\"? 2 : 2] = x[0..2];"
}
]
}
},
"span": {
"line_start": 7,
"line_stop": 7,
"col_start": 37,
"col_stop": 44,
"path": "",
"content": " z[0..\"test\" == \"test\"? 2 : 2] = x[0..2];"
}
}
},
"span": {
"line_start": 7,
"line_stop": 7,
"col_start": 5,
"col_stop": 44,
"path": "",
"content": " z[0..\"test\" == \"test\"? 2 : 2] = x[0..2];"
}
}
}
],
"span": {
"line_start": 1,
"line_stop": 6,
"line_stop": 8,
"col_start": 17,
"col_stop": 2,
"path": "",
@ -731,11 +1170,11 @@
},
"span": {
"line_start": 1,
"line_stop": 6,
"line_stop": 8,
"col_start": 1,
"col_stop": 2,
"path": "",
"content": "function main() {\n...\n}\n\n\n"
"content": "function main() {\n...\n}\n\n\n\n\n"
}
}
}

View File

@ -3,4 +3,6 @@ function main() {
s[..2] = "he";
let x = false;
x = "test1" == "test2";
let z = [1u8, 2u8, 3u8, 4u8];
z[0.."test" == "test"? 2 : 2] = x[0..2];
}

View File

@ -52,7 +52,7 @@ fn new_compiler() -> EdwardsTestCompiler {
let path = PathBuf::from("/test/src/main.leo");
let output_dir = PathBuf::from(TEST_OUTPUT_DIRECTORY);
EdwardsTestCompiler::new(program_name, path, output_dir, make_test_context(), None)
EdwardsTestCompiler::new(program_name, path, output_dir, make_test_context(), None, None)
}
pub(crate) fn parse_program(program_string: &str) -> Result<EdwardsTestCompiler, CompilerError> {

View File

@ -40,7 +40,7 @@ fn new_compiler(path: PathBuf) -> EdwardsTestCompiler {
let program_name = "test".to_string();
let output_dir = PathBuf::from("/output/");
EdwardsTestCompiler::new(program_name, path, output_dir, make_test_context(), None)
EdwardsTestCompiler::new(program_name, path, output_dir, make_test_context(), None, None)
}
pub(crate) fn parse_program(program_string: &str) -> Result<EdwardsTestCompiler, CompilerError> {

View File

@ -18,30 +18,62 @@ DRAFT
# Summary
This proposal aims to improve the import management system in Leo programs to
make program environment more reproducible and predictable. To achieve that
we suggest few changes to Leo CLI and Manifest:
make program environment more reproducible, predictable and compatible. To achieve
that we suggest few changes to Leo CLI and Manifest:
- add a "dependencies" section to Leo Manifest and add a command to pull those dependencies;
- allow custom names for imports to manually resolve name conflicts;
- store imports as they are called in Leo Manifest;
- add "curve" and "proving system" sections to the Manifest;
- add "include" and "exclude" parameters for "proving system" and "curve";
Later this solution can be improved by adding a lock-file which would lock
imported packages based on both their contents and version.
# Motivation
What problems does it solve? What is the background?
The current design of imports does not provide any guarantees on what's stored
in program imports and published with the program to Aleo Package Manager.
When a dependency is "added," it is stored inside imports folder, and it is possible
to manually edit and/or add packages in this folder.
Current state:
- imports are published with a program to Aleo PM;
- we treat programs as files with no verification of imports (they can be changed locally and published in that state);
- name collisions cannot be resolved; a new import overwrites existing;
Also, imports are stored under the package name which makes it impossible to import
two different packages with the same name.
TBD
Another important detail in the scope of this proposal is that in future Leo
programs will have the ability to be run with different proving systems
and curves, possibly creating incompatibility between programs written
for different proving systems or curves. To make a foundation for these features,
imports need to be managed with include/exclude lists for allowed (compatible)
proving systems and curves.
# Design
## Leo Manifest
## Leo Manifest - target section
To lay the foundation for the future of the Leo ecosystem and start integrating
information about programs compatibility we suggest adding two new fields to
the new `[target]` section of the Leo Manifest: `proving_system` and `curve`.
Currently, the Leo compiler only supports `Groth16` for the proving system and `Bls12_377`
for the curve, they are meant to be default values in Leo Manifest.
```toml
[project]
name = "first"
version = "0.1.0"
description = "The first package"
license = "MIT"
[target]
curve = "Bls12_377"
proving_system = "Groth16"
```
These fields are meant to be used to determine whether imported program is
compatible to the original when support for different curves and proving systems
is added.
## Leo Manifest - dependencies
Dependencies section:
@ -49,13 +81,27 @@ Dependencies section:
[dependencies]
name = { author = "author", package = "package", version = "version" }
# alternative way of adding dependency record
[dependencies.name]
author = "author"
package = "package"
version = "1.0"
```
TBD
### Parameters description
`name` field sets the name of the dependency in Leo code. That way we allow
developer to resolve collisions in import names manually. So, for example,
if a developer is adding `howard/silly-sudoku` package to his program, he
might define its in-code name as `sudoku` and import it with that name:
```ts
import sudoku;
```
`package`, `author` and `version` are package name, package author and
version respectively. They are already used as arguments in `leo add`
command, so these fields are already understood by the Leo developers.
## Leo CLI
@ -63,21 +109,62 @@ To support updated Manifest new command should be added to Leo CLI.
```bash
# pull imports
leo pull
leo install
```
Alternatively it can be called `pull`.
```
leo pull
```
## Imports Restructurization
One of the goals of proposed changes is to allow importing packages with the
same name but different authors. To resolve name conflict we suggest storing
imports as they are named in Leo Manifest file (Leo.toml).
same name but different authors. This has to be solved not only on the
language level but also on the level of storing program imports.
We suggest using set of all 3 possible program identifiers for import
folder name: `author-package@version`. Later it can be extended to
include hash for version, but having the inital set already solves name
collisions.
<!-- The suggested change is soft. It changes only the way imports are organized
with minimal changes to other parts of the language.
So, updated imports would look like:
We can consider implementing imports/username-package storage, but imports
will have to be resolved on a different level in compiler. -->
```
leo-program
├── Leo.toml
├── README.md
├── imports
│ ├── author1-program@0.1.0
│ │ └── ...
│ ├── author2-program2@1.0.4
│ └── ...
├── inputs
│ └── ...
└── src
└── main.leo
```
This change would also affect the way imports are being processed on the ASG
level, and we'd need to add an imports map as an argument to the Leo compiler.
The Leo Manifest's dependencies sections needs to be parsed and passed as
a hashmap to the compiler:
```
first-program => author1-program@0.1.0
second-program => author2-program2@1.0.4
```
## Recursive Dependencies
This improvement introduces recursive dependencies. To solve this case preemptively
Leo CLI needs to check the dependency tree and throw an error when a recursive dependency
is met. We suggest implementing simple dependency tree checking while fetching
imports - if imported dependency is met on a higher level - abort the execution.
Later this solution can be improved by building a lock file containing all the
information on program dependencies, and the file itself will have enough data
to track and prevent recursion.
# Drawbacks
@ -85,6 +172,8 @@ This change might require the update of already published programs on Aleo PM du
Leo Manifest change. However it is possible to implement it in a backward-compatible
way.
It also introduces the danger of having recursive dependencies, this problem is addressed in the Design section above.
# Effect on Ecosystem
Proposed improvement provides safety inside Leo programs and should not affect

View File

@ -0,0 +1,231 @@
# Leo RFC 004: Integer Type Casts
## Authors
- Max Bruce
- Collin Chin
- Alessandro Coglio
- Eric McCarthy
- Jon Pavlik
- Damir Shamanaev
- Damon Sicore
- Howard Wu
## Status
DRAFT
# Summary
This proposal provides support for casts among integer types in Leo.
The syntax is similar to Rust.
Two possible semantics are discussed:
_value-preserving casts_,
which just serve to change types
but cause errors when values are not representable in the new types;
and _values-changing casts_,
which never cause errors but may change the mathematical values.
# Motivation
Currently the Leo integer types are "siloed":
arithmetic integer operations require operands of the same type
and return results of the same type.
There are no implicit or explicit ways to turn, for example,
a `u8` into a `u16`, even though
every non-negative integers that fits in 8 bits also fits in 16 bits.
However, the ability to convert values between different (integer) types
is a useful feature that is normally found in programming languages.
# Design
## Background
Recall that Leo supports the following _integer types_:
```
u8 u16 u32 u64 u128
i8 i16 i32 i64 i128
```
## Scope
This RFC proposes type casts between any two integer types,
but not between two non-integer types
or between an integer type and a non-integer type.
This RFC does not propose any implicit cast,
even widening casts (i.e. upcasts)
from a type to another type with the same signedness
and with the same or larger size
(e.g. from `u8` to `u16`).
All the type casts must be explicit.
## Syntax and Static Semantics
The proposed syntax is
```
<expression> as <integer-type>
```
where `<expression>` must have an integer type.
The ABNF grammar is modified as follows:
```
; add this rule:
cast-expression = unary-expression
/ cast-expression %s"as" integer-type
; modify this rule:
exponential-expression = cast-expression
/ cast-expression "**" exponential-expression
```
There is no need to modify the `keyword` rule
because it already includes `as` as one of the keywords.
Note the use of `integer-type` in the `cast-expression` rule;
an alternative is to use `type` there
and check post-parsing that the type is in fact an integer one.
The above grammar rules imply that casts bind
tighter than binary operators and looser than unary operators.
For instance,
```
x + - y as u8
```
is like
```
x + ((- y) as u8)
```
This precedence is the same as in Rust:
see [here](https://doc.rust-lang.org/stable/reference/expressions.html#expression-precedence).
## Dynamic Semantics
When the mathematical integer value of the expression
is representable in the type that the expression is cast to,
there is no question that the cast must succeed
and merely change the type of the Leo value,
but not its mathematical integer value.
This is always the case when the cast is to a type
with the same signedness and with the same or larger size.
This is also the case when
the cast is to a type whose range does not cover the range of the source type
but the value in question is in the intersection of the two ranges.
When the mathematical integer value of the expression
is not representable in the type that the expression is cast to,
there are two possible approaches, discussed below.
### Value-Preserving Casts
The first approach is to deem that situation erroneous.
That is, to require casts to always preserve the mathematical integer values.
In this approach, casts only serve to change types, never values.
When values are to be changed, separate (built-in) functions can be used,
e.g. to mask bits and achieve the same effect as
the value-changing casts discussed below.
From a point of view, this approach seems to match Leo's
treatment of potentially erroneous situations like integer overflows:
the principle is that developers should explicitly use
operations that may overflow if that is their intention,
rather than having those situation possibly occur unexpectedly.
A value-preserving cast to a type
whose range does not cover the original type's range
implicitly expresses a developer expectation that the value
is actually in the intersection of the two types' ranges,
in the same way that the use of integer addition
implicitly expresses the expectation that the addition does not overflow.
Consider this somewhat abstract example:
```
... // some computations on u32 values, which could not be done with u16
let r: u32 = ...; // this is the final result of the u32 operations above
let s: u16 = r as u16; // but r is expected to fit in u16, so we cast it here
```
With value-preserving casts, the expectation mentioned above
is checked by the Leo compiler during proof generation,
in the same way as with integer overflow.
In the example above,
if instead the variable `s` is meant to contain the low 16 bits of `r`,
e.g. in a cryptographic computation,
then the value-preserving cast should be preceded by
an explicit operation to obtain the low 16 bits, making the intent clear:
```
... // some computations on u32 values, which could not be done with u16
let r: u32 = ...; // this is the final result of the u32 operations above
let r_low16: u32 = r & 0xFFFF; // assuming we have bitwise ops and hex literals
let s: u16 = r_low16 as u16; // no value change here
```
### Value-Changing Casts
The second approach is the following:
1. `uN` to `uM` with `N < M`: just change type of value.
2. `uN` to `uM` with `N > M`: take low `M` bits of value.
3. `iN` to `iM` with `N < M`: just change type of value.
4. `iN` to `iM` with `N > M`: take low `M` bits of value.
5. `uN` to `iM` with `N < M`: zero-extend to `M` bits and re-interpret as signed.
6. `uN` to `iM` with `N > M`: take low `M` bits and re-interpret as signed.
7. `uN` to `iN`: re-interpret as signed
8. `iN` to `uM` with `N < M`: sign-extend to `M` bits and re-interpret as unsigned.
9. `iN` to `uM` with `N > M`: take low `M` bits and re-interpret as unsigned.
10. `iN` to `uN`: re-interpret as unsigned
Except for the 1st and 3rd cases, the value may change.
This approach is common in other programming languages.
However, it should be noted that other programming languages
typically do not check for overflow in integer operations either
(at least, not for production code).
Presumably, the behavior of type casts in those programming languages
is motivated by efficiency of execution, at least in part.
Since in Leo the input data is available at compile time,
considerations that apply to typical programming languages
do not necessarily apply to Leo.
Back to the somewhat abstract example in the section on value-preserving casts,
with value-changing casts, the expectation that the final result fits in `u16`
would have to be checked with explicit code:
```
... // some computations on u32 values, which could not be done with u16
let r: u32 = ...; // this is the final result of the u32 operations above
if (r > 0xFFFF) {
... // error
}
let s: u16 = r as u16; // could change value in principle, but does not here
```
However, it would be easy for a developer to neglect to add the checking code,
and thus have the Leo code silently produce an unexpected result.
## Compilation to R1CS
It should be possible to compile Leo casts to the same R1CS constraints
whether we choose the value-preserving or value-changing semantics.
If the R1CS constraints represent Leo integers as bits,
the bits of the new value can be determined from the bits of the old value,
with additional zero or sign extension bits when needed.
This is clearly the value-changing behavior.
With the value-preserving behavior,
all casts for the known inputs are checked,
and thus we value-changing behavior coincides
with the value-preserving behavior if the checks succeed.
Thus, if he behavior of the R1CS constraints is "don't care"
for Leo program inputs that cause errors (such as cast errors),
the compilation strategy for value-changing casts
should be also adequate for value-preserving casts.
# Drawbacks
This proposal does not appear to bring any drawbacks,
other than making the language and compiler inevitably more complex.
But the benefits to support type casts justifies the extra complexity.
# Effect on Ecosystem
This proposal does not appear to have any direct effects on the ecosystem.
# Alternatives
The 'Design' section above already discusses two alternative semantics.
After we settle on one, the other one could be mentioned in this section.

View File

@ -30,7 +30,7 @@ path = "../parser"
version = "1.5.2"
[dependencies.indexmap]
version = "1.6.2"
version = "1.7.0"
features = [ "serde-1" ]
[dependencies.thiserror]

View File

@ -19,6 +19,7 @@ use leo_compiler::{
compiler::{thread_leaked_context, Compiler},
group::targets::edwards_bls12::EdwardsGroupType,
CompilerOptions,
TheoremOptions,
};
use leo_package::{
inputs::*,
@ -43,6 +44,14 @@ pub struct BuildOptions {
pub disable_code_elimination: bool,
#[structopt(long, help = "Disable all compiler optimizations")]
pub disable_all_optimizations: bool,
#[structopt(long, help = "Writes all theorem input AST files.")]
pub enable_all_theorems: bool,
#[structopt(long, help = "Writes AST files needed for the initial theorem before any changes.")]
pub enable_initial_theorem: bool,
#[structopt(long, help = "Writes AST files needed for canonicalization theorem.")]
pub enable_canonicalized_theorem: bool,
#[structopt(long, help = "Writes AST files needed for type inference theorem.")]
pub enable_type_inferenced_theorem: bool,
}
impl Default for BuildOptions {
@ -51,6 +60,10 @@ impl Default for BuildOptions {
disable_constant_folding: true,
disable_code_elimination: true,
disable_all_optimizations: true,
enable_all_theorems: false,
enable_initial_theorem: false,
enable_canonicalized_theorem: false,
enable_type_inferenced_theorem: false,
}
}
}
@ -73,6 +86,24 @@ impl From<BuildOptions> for CompilerOptions {
}
}
impl From<BuildOptions> for TheoremOptions {
fn from(options: BuildOptions) -> Self {
if options.enable_all_theorems {
TheoremOptions {
initial: true,
canonicalized: true,
type_inferenced: true,
}
} else {
TheoremOptions {
initial: options.enable_initial_theorem,
canonicalized: options.enable_canonicalized_theorem,
type_inferenced: options.enable_type_inferenced_theorem,
}
}
}
}
/// Compile and build program command.
#[derive(StructOpt, Debug)]
#[structopt(setting = structopt::clap::AppSettings::ColoredHelp)]
@ -141,6 +172,7 @@ impl Command for Build {
&state_string,
&state_path,
thread_leaked_context(),
Some(self.compiler_options.clone().into()),
Some(self.compiler_options.into()),
)?;

View File

@ -112,6 +112,7 @@ impl Command for Test {
output_directory.clone(),
thread_leaked_context(),
Some(self.compiler_options.clone().into()),
Some(self.compiler_options.clone().into()),
)?;
let temporary_program = program;

View File

@ -316,7 +316,7 @@ mod cli_tests {
let path = Some(dir.path("new"));
assert!(run_cmd("leo new test", &path).is_ok());
assert!(run_cmd("leo new test", &path).is_err()); // 2nd time
assert!(run_cmd("leo new test", &path).is_err()); // 2nd time
assert!(run_cmd("leo new wrong_name", &path).is_err());
}

View File

@ -43,7 +43,7 @@ version = "1.0"
version = "0.1"
[dependencies.indexmap]
version = "1.6"
version = "1.7"
[dependencies.tendril]
version = "0.4"

View File

@ -41,7 +41,7 @@ features = [ "testnet1" ]
version = "0.6.0"
[dependencies.indexmap]
version = "1.6.2"
version = "1.7.0"
features = [ "serde-1" ]
[dependencies.rand]

View File

@ -0,0 +1,30 @@
/*
namespace: Compile
expectation: Pass
input_file:
- input/complex_access.in
*/
function main (a: [u8; 8], b: u32, c: [[u8; 3]; 3], d: [(u8, u32); 1], e: [u8; (3, 4)] ) -> bool {
a[0..3][b] = 93;
a[2..6][1] = 87;
a[2..6][1] *= 2;
a[2..3] = [42u8];
a[6..][0] = 43u8;
a[0..1][0..1] = [200];
c[0..2][0] = [1u8; 3];
c[1..][1][1..2][0] = 126;
c[1..][0] = [42, 43, 44];
d[..1][0].1 = 1;
e[0..][0] = [22; 4];
e[0..][0][0] = 33;
return
a == [200u8, 93, 42, 174, 5, 6, 43, 8]
&& c == [[1u8, 1, 1], [42, 43, 44], [7, 126, 9]]
&& d == [(0u8, 1u32)]
&& e == [[33u8, 22, 22, 22], [0, 0, 0, 0], [0, 0, 0, 0]];
}

View File

@ -0,0 +1,9 @@
[main]
a: [u8; 8] = [1u8, 2, 3, 4, 5, 6, 7, 8];
b: u32 = 1;
c: [[u8; 3]; 3] = [[1u8, 2, 3], [4, 5, 6], [7, 8, 9]];
d: [(u8, u32); 1] = [(0u8, 0u32)];
e: [u8; (3, 4)] = [0u8; (3, 4)];
[registers]
out: bool = true;

View File

@ -0,0 +1,10 @@
/*
namespace: Compile
expectation: Pass
input_file: input/three_ones.in
*/
function main (a: [u8; 3]) -> bool {
let y = a[0..[0u8; 2] == [0u8; 2]? 2u8 : 2u8];
return y == [1u8, 1];
}

View File

@ -0,0 +1,18 @@
---
namespace: Compile
expectation: Pass
outputs:
- circuit:
num_public_variables: 0
num_private_variables: 637
num_constraints: 662
at: 9f1bcfcac007139af57f04172704330d348767a9c51fe8ce15fb60f457fa6337
bt: b6500ddf37d10958fdc014dcb2f54b37a2c684f05b7f0aa4776556331db8c924
ct: a15c712d74487a80767116103c38fa7277066705d0c7f1f9902a6016466706f0
output:
- input_file: input/complex_access.in
output:
registers:
out:
type: bool
value: "true"

View File

@ -0,0 +1,18 @@
---
namespace: Compile
expectation: Pass
outputs:
- circuit:
num_public_variables: 0
num_private_variables: 39
num_constraints: 39
at: 6f3ffff33f4e513211e7a55cc9edcab3bc2d2a146c2b280981308bb69165f86f
bt: adde6ad1b603a50c986ec99b6edcc972138bb6239e58a1b88e931bc0165b2e8e
ct: 867e3f6ee1f26af954e7868633a595d319267d91afc5d7e2074fe641fabde1d6
output:
- input_file: input/three_ones.in
output:
registers:
x:
type: bool
value: "true"

View File

@ -4,11 +4,11 @@ expectation: Pass
outputs:
- circuit:
num_public_variables: 0
num_private_variables: 1087
num_constraints: 1350
at: aae29cb6b4a71a5cf49d3de006b415b9a7818a2e23191819692e8d2ee69d8be2
bt: 11262f31fcaa7950be43eded328287305e7cbcb73c19455808f08f38116cfdd6
ct: 01dfba9e0754ad9890117694c9b03f4646642140aa7b6c393b2e264e701c323e
num_private_variables: 1277
num_constraints: 1604
at: c5d2d5bbb85d0f7ba48170fdb652aff057db6d1babf378d8a548af874e0734ff
bt: 726f3c39d11da5bc8487b8b7228a55cb7819a47a9b674d41f9c756c6927fb55a
ct: 7c4917152b59cb9caa30bad7d5119a3e36c40f7a783fdb8ff602dd5447573cc7
output:
- input_file: input/index1.in
output: