console.log refactor, concat test... has asg or TIPhase reducer bug

This commit is contained in:
gluax 2021-05-22 21:15:07 -04:00
parent bce10cc885
commit 7145a751d9
20 changed files with 417 additions and 447 deletions

View File

@ -19,7 +19,7 @@ use crate::{Identifier, Span};
use serde::{Deserialize, Serialize};
use tendril::StrTendril;
#[derive(Clone, Serialize, Deserialize, PartialEq, Eq)]
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)]
pub struct Annotation {
pub span: Span,
pub name: Identifier,

View File

@ -35,6 +35,12 @@ impl ReducerError {
ReducerError::Error(FormattedError::new_from_span(message, span))
}
pub fn failed_to_convert_tendril_to_char(tendril: String, span: &Span) -> Self {
let message = format!("Failed to convert tendril `{}` to char", tendril);
Self::new_from_span(message, span)
}
pub fn impossible_console_assert_call(span: &Span) -> Self {
let message = "Console::Assert cannot be matched here, its handled in another case.".to_string();

View File

@ -18,14 +18,43 @@ use crate::{Expression, Node, Span};
use serde::{Deserialize, Serialize};
use std::fmt;
use tendril::StrTendril;
#[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Debug)]
pub enum FormatStringPart {
Const(#[serde(with = "crate::common::tendril_json")] StrTendril),
Const(char),
Container,
}
impl FormatStringPart {
pub fn from_string(string: Vec<char>) -> Vec<Self> {
let mut parts = Vec::new();
let mut in_container = false;
let mut i = 0;
while i < string.len() {
let character = string[i];
match character {
'{' if !in_container => in_container = true,
'}' if in_container => {
in_container = false;
parts.push(FormatStringPart::Container);
}
_ if in_container => {
in_container = false;
parts.push(FormatStringPart::Const('{'));
continue;
}
_ => parts.push(FormatStringPart::Const(character)),
}
i += 1;
}
parts
}
}
#[derive(Clone, PartialEq, Eq, Serialize, Deserialize, Debug)]
pub struct FormatString {
pub parts: Vec<FormatStringPart>,
@ -41,8 +70,8 @@ impl fmt::Display for FormatString {
self.parts
.iter()
.map(|x| match x {
FormatStringPart::Const(x) => x,
FormatStringPart::Container => "{}",
FormatStringPart::Const(x) => x.to_string(),
FormatStringPart::Container => "{}".to_string(),
})
.collect::<Vec<_>>()
.join("")

View File

@ -51,8 +51,8 @@ impl<'a, F: PrimeField, G: GroupType<F>> ConstrainedProgram<'a, F, G> {
let mut parameters = executed_containers.iter();
for part in formatted.parts.iter() {
match part {
FormatStringPart::Const(c) => out.push(&**c),
FormatStringPart::Container => out.push(&**parameters.next().unwrap()),
FormatStringPart::Const(c) => out.push(c.to_string()),
FormatStringPart::Container => out.push(parameters.next().unwrap().to_string()),
}
}

View File

@ -116,8 +116,10 @@ impl<R: ReconstructingReducer, O: CombinerOptions> CombineAstAsgDirector<R, O> {
}
pub fn reduce_type(&mut self, ast: &AstType, asg: &AsgType, span: &Span) -> Result<AstType, ReducerError> {
println!("Hellllloooo");
let new = match (ast, asg) {
(AstType::Array(ast_type, ast_dimensions), AsgType::Array(asg_type, asg_dimensions)) => {
println!("astd {}, asgd {}", ast_dimensions, asg_dimensions);
if self.options.type_inference_enabled() {
AstType::Array(
Box::new(self.reduce_type(ast_type, asg_type, span)?),
@ -434,8 +436,12 @@ impl<R: ReconstructingReducer, O: CombinerOptions> CombineAstAsgDirector<R, O> {
ConstValue::Char(_) => {
if let Some(c) = tendril.chars().next() {
new = ValueExpression::Char(c, span.clone());
} else {
return Err(ReducerError::failed_to_convert_tendril_to_char(
tendril.to_string(),
span,
));
}
// TODO RETURN ERR
}
_ => unimplemented!(), // impossible?
}

View File

@ -1,310 +1,309 @@
{
"name": "",
"expected_input": [],
"imports": [],
"circuits": {},
"global_consts": {},
"functions": {
"{\"name\":\"main\",\"span\":\"{\\\"line_start\\\":1,\\\"line_stop\\\":1,\\\"col_start\\\":10,\\\"col_stop\\\":14,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\"function main() {\\\"}\"}": {
"annotations": [],
"identifier": "{\"name\":\"main\",\"span\":\"{\\\"line_start\\\":1,\\\"line_stop\\\":1,\\\"col_start\\\":10,\\\"col_stop\\\":14,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\"function main() {\\\"}\"}",
"input": [],
"output": {
"Tuple": []
},
"block": {
"statements": [
{
"Definition": {
"declaration_type": "Let",
"variable_names": [
{
"mutable": true,
"identifier": "{\"name\":\"s\",\"span\":\"{\\\"line_start\\\":2,\\\"line_stop\\\":2,\\\"col_start\\\":9,\\\"col_stop\\\":10,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\" let s = `Hello, World!`;\\\"}\"}",
"span": {
"line_start": 2,
"line_stop": 2,
"col_start": 9,
"col_stop": 10,
"path": "",
"content": " let s = `Hello, World!`;"
}
"name": "",
"expected_input": [],
"imports": [],
"circuits": {},
"global_consts": {},
"functions": {
"{\"name\":\"main\",\"span\":\"{\\\"line_start\\\":1,\\\"line_stop\\\":1,\\\"col_start\\\":10,\\\"col_stop\\\":14,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\"function main() {\\\"}\"}": {
"annotations": [],
"identifier": "{\"name\":\"main\",\"span\":\"{\\\"line_start\\\":1,\\\"line_stop\\\":1,\\\"col_start\\\":10,\\\"col_stop\\\":14,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\"function main() {\\\"}\"}",
"input": [],
"output": {
"Tuple": []
},
"block": {
"statements": [
{
"Definition": {
"declaration_type": "Let",
"variable_names": [
{
"mutable": true,
"identifier": "{\"name\":\"s\",\"span\":\"{\\\"line_start\\\":2,\\\"line_stop\\\":2,\\\"col_start\\\":9,\\\"col_stop\\\":10,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\" let s = \\\\\\\"Hello, World!\\\\\\\";\\\"}\"}",
"span": {
"line_start": 2,
"line_stop": 2,
"col_start": 9,
"col_stop": 10,
"path": "",
"content": " let s = \"Hello, World!\";"
}
],
"type_": {
"Array": [
"Char",
[
{
"value": "13"
}
]
]
},
"value": {
"ArrayInline": {
"elements": [
{
"Expression": {
"Value": {
"Char": [
"H",
{
"line_start": 2,
"line_stop": 2,
"col_start": 13,
"col_stop": 28,
"path": "",
"content": " let s = `Hello, World!`;"
}
]
}
}
},
{
"Expression": {
"Value": {
"Char": [
"e",
{
"line_start": 2,
"line_stop": 2,
"col_start": 13,
"col_stop": 28,
"path": "",
"content": " let s = `Hello, World!`;"
}
]
}
}
},
{
"Expression": {
"Value": {
"Char": [
"l",
{
"line_start": 2,
"line_stop": 2,
"col_start": 13,
"col_stop": 28,
"path": "",
"content": " let s = `Hello, World!`;"
}
]
}
}
},
{
"Expression": {
"Value": {
"Char": [
"l",
{
"line_start": 2,
"line_stop": 2,
"col_start": 13,
"col_stop": 28,
"path": "",
"content": " let s = `Hello, World!`;"
}
]
}
}
},
{
"Expression": {
"Value": {
"Char": [
"o",
{
"line_start": 2,
"line_stop": 2,
"col_start": 13,
"col_stop": 28,
"path": "",
"content": " let s = `Hello, World!`;"
}
]
}
}
},
{
"Expression": {
"Value": {
"Char": [
",",
{
"line_start": 2,
"line_stop": 2,
"col_start": 13,
"col_stop": 28,
"path": "",
"content": " let s = `Hello, World!`;"
}
]
}
}
},
{
"Expression": {
"Value": {
"Char": [
" ",
{
"line_start": 2,
"line_stop": 2,
"col_start": 13,
"col_stop": 28,
"path": "",
"content": " let s = `Hello, World!`;"
}
]
}
}
},
{
"Expression": {
"Value": {
"Char": [
"W",
{
"line_start": 2,
"line_stop": 2,
"col_start": 13,
"col_stop": 28,
"path": "",
"content": " let s = `Hello, World!`;"
}
]
}
}
},
{
"Expression": {
"Value": {
"Char": [
"o",
{
"line_start": 2,
"line_stop": 2,
"col_start": 13,
"col_stop": 28,
"path": "",
"content": " let s = `Hello, World!`;"
}
]
}
}
},
{
"Expression": {
"Value": {
"Char": [
"r",
{
"line_start": 2,
"line_stop": 2,
"col_start": 13,
"col_stop": 28,
"path": "",
"content": " let s = `Hello, World!`;"
}
]
}
}
},
{
"Expression": {
"Value": {
"Char": [
"l",
{
"line_start": 2,
"line_stop": 2,
"col_start": 13,
"col_stop": 28,
"path": "",
"content": " let s = `Hello, World!`;"
}
]
}
}
},
{
"Expression": {
"Value": {
"Char": [
"d",
{
"line_start": 2,
"line_stop": 2,
"col_start": 13,
"col_stop": 28,
"path": "",
"content": " let s = `Hello, World!`;"
}
]
}
}
},
{
"Expression": {
"Value": {
"Char": [
"!",
{
"line_start": 2,
"line_stop": 2,
"col_start": 13,
"col_stop": 28,
"path": "",
"content": " let s = `Hello, World!`;"
}
]
}
}
}
],
"span": {
"line_start": 2,
"line_stop": 2,
"col_start": 13,
"col_stop": 28,
"path": "",
"content": " let s = `Hello, World!`;"
}
}
},
"span": {
"line_start": 2,
"line_stop": 2,
"col_start": 5,
"col_stop": 28,
"path": "",
"content": " let s = `Hello, World!`;"
}
],
"type_": {
"Array": [
"Char",
[
{
"value": "13"
}
]
]
},
"value": {
"ArrayInline": {
"elements": [
{
"Expression": {
"Value": {
"Char": [
"H",
{
"line_start": 2,
"line_stop": 2,
"col_start": 13,
"col_stop": 28,
"path": "",
"content": " let s = \"Hello, World!\";"
}
]
}
}
},
{
"Expression": {
"Value": {
"Char": [
"e",
{
"line_start": 2,
"line_stop": 2,
"col_start": 13,
"col_stop": 28,
"path": "",
"content": " let s = \"Hello, World!\";"
}
]
}
}
},
{
"Expression": {
"Value": {
"Char": [
"l",
{
"line_start": 2,
"line_stop": 2,
"col_start": 13,
"col_stop": 28,
"path": "",
"content": " let s = \"Hello, World!\";"
}
]
}
}
},
{
"Expression": {
"Value": {
"Char": [
"l",
{
"line_start": 2,
"line_stop": 2,
"col_start": 13,
"col_stop": 28,
"path": "",
"content": " let s = \"Hello, World!\";"
}
]
}
}
},
{
"Expression": {
"Value": {
"Char": [
"o",
{
"line_start": 2,
"line_stop": 2,
"col_start": 13,
"col_stop": 28,
"path": "",
"content": " let s = \"Hello, World!\";"
}
]
}
}
},
{
"Expression": {
"Value": {
"Char": [
",",
{
"line_start": 2,
"line_stop": 2,
"col_start": 13,
"col_stop": 28,
"path": "",
"content": " let s = \"Hello, World!\";"
}
]
}
}
},
{
"Expression": {
"Value": {
"Char": [
" ",
{
"line_start": 2,
"line_stop": 2,
"col_start": 13,
"col_stop": 28,
"path": "",
"content": " let s = \"Hello, World!\";"
}
]
}
}
},
{
"Expression": {
"Value": {
"Char": [
"W",
{
"line_start": 2,
"line_stop": 2,
"col_start": 13,
"col_stop": 28,
"path": "",
"content": " let s = \"Hello, World!\";"
}
]
}
}
},
{
"Expression": {
"Value": {
"Char": [
"o",
{
"line_start": 2,
"line_stop": 2,
"col_start": 13,
"col_stop": 28,
"path": "",
"content": " let s = \"Hello, World!\";"
}
]
}
}
},
{
"Expression": {
"Value": {
"Char": [
"r",
{
"line_start": 2,
"line_stop": 2,
"col_start": 13,
"col_stop": 28,
"path": "",
"content": " let s = \"Hello, World!\";"
}
]
}
}
},
{
"Expression": {
"Value": {
"Char": [
"l",
{
"line_start": 2,
"line_stop": 2,
"col_start": 13,
"col_stop": 28,
"path": "",
"content": " let s = \"Hello, World!\";"
}
]
}
}
},
{
"Expression": {
"Value": {
"Char": [
"d",
{
"line_start": 2,
"line_stop": 2,
"col_start": 13,
"col_stop": 28,
"path": "",
"content": " let s = \"Hello, World!\";"
}
]
}
}
},
{
"Expression": {
"Value": {
"Char": [
"!",
{
"line_start": 2,
"line_stop": 2,
"col_start": 13,
"col_stop": 28,
"path": "",
"content": " let s = \"Hello, World!\";"
}
]
}
}
}
],
"span": {
"line_start": 2,
"line_stop": 2,
"col_start": 13,
"col_stop": 28,
"path": "",
"content": " let s = \"Hello, World!\";"
}
}
},
"span": {
"line_start": 2,
"line_stop": 2,
"col_start": 5,
"col_stop": 28,
"path": "",
"content": " let s = \"Hello, World!\";"
}
}
],
"span": {
"line_start": 1,
"line_stop": 3,
"col_start": 17,
"col_stop": 2,
"path": "",
"content": "function main() {\n...\n}"
}
},
],
"span": {
"line_start": 1,
"line_stop": 3,
"col_start": 1,
"col_start": 17,
"col_stop": 2,
"path": "",
"content": "function main() {\n...\n}"
}
},
"span": {
"line_start": 1,
"line_stop": 3,
"col_start": 1,
"col_stop": 2,
"path": "",
"content": "function main() {\n...\n}"
}
}
}
}

View File

@ -1,3 +1,3 @@
function main() {
let s = `Hello, World!`;
let s = "Hello, World!";
}

View File

@ -1034,14 +1034,14 @@
"variable_names": [
{
"mutable": false,
"identifier": "{\"name\":\"o\",\"span\":\"{\\\"line_start\\\":24,\\\"line_stop\\\":24,\\\"col_start\\\":9,\\\"col_stop\\\":10,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\" const o = `Hello, World!`;\\\"}\"}",
"identifier": "{\"name\":\"o\",\"span\":\"{\\\"line_start\\\":24,\\\"line_stop\\\":24,\\\"col_start\\\":9,\\\"col_stop\\\":10,\\\"path\\\":\\\"\\\",\\\"content\\\":\\\" const o = \\\\\\\"Hello, World!\\\\\\\";\\\"}\"}",
"span": {
"line_start": 24,
"line_stop": 24,
"col_start": 9,
"col_stop": 10,
"path": "",
"content": " const o = `Hello, World!`;"
"content": " const o = \"Hello, World!\";"
}
}
],
@ -1069,7 +1069,7 @@
"col_start": 13,
"col_stop": 28,
"path": "",
"content": " const o = `Hello, World!`;"
"content": " const o = \"Hello, World!\";"
}
]
}
@ -1086,7 +1086,7 @@
"col_start": 13,
"col_stop": 28,
"path": "",
"content": " const o = `Hello, World!`;"
"content": " const o = \"Hello, World!\";"
}
]
}
@ -1103,7 +1103,7 @@
"col_start": 13,
"col_stop": 28,
"path": "",
"content": " const o = `Hello, World!`;"
"content": " const o = \"Hello, World!\";"
}
]
}
@ -1120,7 +1120,7 @@
"col_start": 13,
"col_stop": 28,
"path": "",
"content": " const o = `Hello, World!`;"
"content": " const o = \"Hello, World!\";"
}
]
}
@ -1137,7 +1137,7 @@
"col_start": 13,
"col_stop": 28,
"path": "",
"content": " const o = `Hello, World!`;"
"content": " const o = \"Hello, World!\";"
}
]
}
@ -1154,7 +1154,7 @@
"col_start": 13,
"col_stop": 28,
"path": "",
"content": " const o = `Hello, World!`;"
"content": " const o = \"Hello, World!\";"
}
]
}
@ -1171,7 +1171,7 @@
"col_start": 13,
"col_stop": 28,
"path": "",
"content": " const o = `Hello, World!`;"
"content": " const o = \"Hello, World!\";"
}
]
}
@ -1188,7 +1188,7 @@
"col_start": 13,
"col_stop": 28,
"path": "",
"content": " const o = `Hello, World!`;"
"content": " const o = \"Hello, World!\";"
}
]
}
@ -1205,7 +1205,7 @@
"col_start": 13,
"col_stop": 28,
"path": "",
"content": " const o = `Hello, World!`;"
"content": " const o = \"Hello, World!\";"
}
]
}
@ -1222,7 +1222,7 @@
"col_start": 13,
"col_stop": 28,
"path": "",
"content": " const o = `Hello, World!`;"
"content": " const o = \"Hello, World!\";"
}
]
}
@ -1239,7 +1239,7 @@
"col_start": 13,
"col_stop": 28,
"path": "",
"content": " const o = `Hello, World!`;"
"content": " const o = \"Hello, World!\";"
}
]
}
@ -1256,7 +1256,7 @@
"col_start": 13,
"col_stop": 28,
"path": "",
"content": " const o = `Hello, World!`;"
"content": " const o = \"Hello, World!\";"
}
]
}
@ -1273,7 +1273,7 @@
"col_start": 13,
"col_stop": 28,
"path": "",
"content": " const o = `Hello, World!`;"
"content": " const o = \"Hello, World!\";"
}
]
}
@ -1286,7 +1286,7 @@
"col_start": 13,
"col_stop": 28,
"path": "",
"content": " const o = `Hello, World!`;"
"content": " const o = \"Hello, World!\";"
}
}
},
@ -1296,7 +1296,7 @@
"col_start": 3,
"col_stop": 28,
"path": "",
"content": " const o = `Hello, World!`;"
"content": " const o = \"Hello, World!\";"
}
}
}

View File

@ -21,5 +21,5 @@ function main() {
const l = (1u8, 1u8, true);
const m = Foo {};
const n = 'a';
const o = `Hello, World!`;
const o = "Hello, World!";
}

View File

@ -226,16 +226,19 @@ impl ParserContext {
///
pub fn parse_formatted_string(&mut self) -> SyntaxResult<FormatString> {
let start_span;
let parts = match self.expect_any()? {
let string = match self.expect_any()? {
SpannedToken {
token: Token::FormatString(parts),
token: Token::StringLiteral(chars),
span,
} => {
start_span = span;
parts
chars
}
SpannedToken { token, span } => return Err(SyntaxError::unexpected_str(&token, "formatted string", &span)),
};
let parts = FormatStringPart::from_string(string);
let mut parameters = Vec::new();
while self.eat(Token::Comma).is_some() {
let param = self.parse_expression()?;
@ -243,13 +246,7 @@ impl ParserContext {
}
Ok(FormatString {
parts: parts
.into_iter()
.map(|x| match x {
crate::FormatStringPart::Const(value) => FormatStringPart::Const(value),
crate::FormatStringPart::Container => FormatStringPart::Container,
})
.collect(),
parts,
span: &start_span + parameters.last().map(|x| x.span()).unwrap_or(&start_span),
parameters,
})

View File

@ -14,7 +14,7 @@
// You should have received a copy of the GNU General Public License
// along with the Leo library. If not, see <https://www.gnu.org/licenses/>.
use crate::tokenizer::{FormatStringPart, Token};
use crate::tokenizer::Token;
use leo_ast::Span;
use serde::{Deserialize, Serialize};
use tendril::StrTendril;
@ -168,7 +168,7 @@ impl Token {
let input = input_tendril[..].as_bytes();
match input[0] {
x if x.is_ascii_whitespace() => return (1, None),
b'`' => {
b'"' => {
let mut i = 1;
let mut len: u32 = 1;
let mut start = 1;
@ -181,7 +181,7 @@ impl Token {
while i < input.len() {
if !in_escape {
if input[i] == b'`' {
if input[i] == b'"' {
end = true;
break;
} else if input[i] == b'\\' {
@ -245,51 +245,6 @@ impl Token {
return (i + 1, Some(Token::StringLiteral(string)));
}
b'"' => {
let mut i = 1;
let mut in_escape = false;
let mut start = 1usize;
let mut segments = Vec::new();
while i < input.len() {
if !in_escape {
if input[i] == b'"' {
break;
}
if input[i] == b'\\' {
in_escape = !in_escape;
} else if i < input.len() - 1 && input[i] == b'{' {
if i < input.len() - 2 && input[i + 1] == b'{' {
i += 2;
continue;
} else if input[i + 1] != b'}' {
i += 1;
continue;
}
if start < i {
segments.push(FormatStringPart::Const(
input_tendril.subtendril(start as u32, (i - start) as u32),
));
}
segments.push(FormatStringPart::Container);
start = i + 2;
i = start;
continue;
}
} else {
in_escape = false;
}
i += 1;
}
if i == input.len() {
return (0, None);
}
if start < i {
segments.push(FormatStringPart::Const(
input_tendril.subtendril(start as u32, (i - start) as u32),
));
}
return (i + 1, Some(Token::FormatString(segments)));
}
b'\'' => {
let mut i = 1;
let mut in_escape = false;

View File

@ -18,22 +18,6 @@ use serde::{Deserialize, Serialize};
use std::fmt;
use tendril::StrTendril;
/// Parts of a formatted string for logging to the console.
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum FormatStringPart {
Const(#[serde(with = "leo_ast::common::tendril_json")] StrTendril),
Container,
}
impl fmt::Display for FormatStringPart {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
FormatStringPart::Const(c) => write!(f, "{}", c),
FormatStringPart::Container => write!(f, "{{}}"),
}
}
}
/// Represents all valid Leo syntax tokens.
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub enum Token {
@ -41,7 +25,6 @@ pub enum Token {
// Literals
CommentLine(#[serde(with = "leo_ast::common::tendril_json")] StrTendril),
CommentBlock(#[serde(with = "leo_ast::common::tendril_json")] StrTendril),
FormatString(Vec<FormatStringPart>),
StringLiteral(Vec<char>),
Ident(#[serde(with = "leo_ast::common::tendril_json")] StrTendril),
Int(#[serde(with = "leo_ast::common::tendril_json")] StrTendril),
@ -208,14 +191,6 @@ impl fmt::Display for Token {
match self {
CommentLine(s) => write!(f, "{}", s),
CommentBlock(s) => write!(f, "{}", s),
FormatString(parts) => {
// todo escapes
write!(f, "\"")?;
for part in parts.iter() {
part.fmt(f)?;
}
write!(f, "\"")
}
StringLiteral(content) => {
write!(f, "\"")?;
for character in content {

View File

@ -21,7 +21,7 @@ const use_another_const = basic + 1;
const foo = Foo { width: 10, height: 20 };
const uno = uno();
const character = 'a';
const hello = `Hello, World!`;
const hello = "Hello, World!";
circuit Foo {
width: u32,
@ -51,5 +51,5 @@ function main(a: u32) -> bool {
&& foo.height == 20u32
&& uno == 1u32 // function test
&& character == 'a' // char test
&& hello == `Hello, World!`;
&& hello == "Hello, World!";
}

View File

@ -10,13 +10,13 @@ circuit Foo {
}
function takes_string(s: [char; 13]) -> bool {
return s == `Hello, World!`;
return s == "Hello, World!";
}
function main(s1: [char; 13]) -> [char; 13] {
let f = Foo { s1 };
let b = takes_string(s1);
let result = f.s1 == `Hello, World!` ? s1 : `abcdefghjklmn`;
let result = f.s1 == "Hello, World!" ? s1 : "abcdefghjklmn";
return result;
}

View File

@ -7,6 +7,9 @@ input_file:
*/
function main(s1: [char; 13], s2: [char; 4]) -> bool {
let hello: [char; 13] = `Hello, World!`;
return hello == s1 && `nope` != s2;
let hello: [char; 13] = "Hello, World!";
let part1 = "Good";
let part2 = " dog!";
let concat: [char; 9] = [...part1, ...part2];
return hello == s1 && "nope" != s2 && "es" == s2[1..3] && concat == "Good dog!";
}

View File

@ -2,5 +2,5 @@
namespace: Token
expectation: Fail
outputs:
- " --> test:1:1\n |\n 1 | ``\n | ^\n |\n = unexpected token: '`'"
- " --> test:1:1\n |\n 1 | `Hello world!\n | ^\n |\n = unexpected token: '`'"
- " --> test:1:1\n |\n 1 | \"\"\n | ^\n |\n = unexpected token: '\"'"
- " --> test:1:1\n |\n 1 | \"Hello world!\n | ^\n |\n = unexpected token: '\"'"

View File

@ -15,7 +15,7 @@ outputs:
col_start: 1
col_stop: 9
path: test
content: "`string`"
content: "\"string\""
- Value:
String:
- - a
@ -41,7 +41,7 @@ outputs:
col_start: 1
col_stop: 21
path: test
content: "`another { } string`"
content: "\"another { } string\""
- Value:
String:
- - "{"
@ -58,7 +58,7 @@ outputs:
col_start: 1
col_stop: 12
path: test
content: "`{ ] [ ; a`"
content: "\"{ ] [ ; a\""
- Value:
String:
- - ࿺
@ -67,7 +67,7 @@ outputs:
col_start: 1
col_stop: 10
path: test
content: "`\\u{FFA}`"
content: "\"\\u{FFA}\""
- Value:
String:
- - 򯫺
@ -76,7 +76,7 @@ outputs:
col_start: 1
col_stop: 12
path: test
content: "`\\u{afafa}`"
content: "\"\\u{afafa}\""
- Value:
String:
- - 꾯
@ -85,7 +85,7 @@ outputs:
col_start: 1
col_stop: 11
path: test
content: "`\\u{afaf}`"
content: "\"\\u{afaf}\""
- Value:
String:
- - ૺ
@ -94,7 +94,7 @@ outputs:
col_start: 1
col_stop: 10
path: test
content: "`\\u{afa}`"
content: "\"\\u{afa}\""
- Value:
String:
- - ¯
@ -103,7 +103,7 @@ outputs:
col_start: 1
col_stop: 9
path: test
content: "`\\u{af}`"
content: "\"\\u{af}\""
- Value:
String:
- - "\n"
@ -112,7 +112,7 @@ outputs:
col_start: 1
col_stop: 8
path: test
content: "`\\u{a}`"
content: "\"\\u{a}\""
- Value:
String:
- - "\n"
@ -121,7 +121,7 @@ outputs:
col_start: 1
col_stop: 7
path: test
content: "`\\x0A`"
content: "\"\\x0A\""
- Value:
String:
- - a
@ -148,4 +148,4 @@ outputs:
col_start: 1
col_stop: 28
path: test
content: "`aa \\\\ \\\" \\n aa \\t \\r \\0`"
content: "\"aa \\\\ \\\" \\n aa \\t \\r \\0\""

View File

@ -3,19 +3,19 @@ namespace: Token
expectation: Pass
*/
`string`
"string"
`another { } string`
"another { } string"
`{ ] [ ; a`
"{ ] [ ; a"
`\u{FFA}`
`\u{afafa}`
`\u{afaf}`
`\u{afa}`
`\u{af}`
`\u{a}`
"\u{FFA}"
"\u{afafa}"
"\u{afaf}"
"\u{afa}"
"\u{af}"
"\u{a}"
`\x0A`
"\x0A"
`aa \\ \" \n aa \t \r \0`
"aa \\ \" \n aa \t \r \0"

View File

@ -3,6 +3,6 @@ namespace: Token
expectation: Fail
*/
``
""
`Hello world!
"Hello world!

View File

@ -3,19 +3,19 @@ namespace: ParseExpression
expectation: Pass
*/
`string`
"string"
`another { } string`
"another { } string"
`{ ] [ ; a`
"{ ] [ ; a"
`\u{FFA}`
`\u{afafa}`
`\u{afaf}`
`\u{afa}`
`\u{af}`
`\u{a}`
"\u{FFA}"
"\u{afafa}"
"\u{afaf}"
"\u{afa}"
"\u{af}"
"\u{a}"
`\x0A`
"\x0A"
`aa \\ \" \n aa \t \r \0`
"aa \\ \" \n aa \t \r \0"