mirror of
https://github.com/roc-lang/roc.git
synced 2024-11-13 09:49:11 +03:00
clippy+fmt
This commit is contained in:
parent
ec1e2cd1d0
commit
c8fd1836c9
@ -1,6 +1,13 @@
|
||||
use crate::{syntax_highlight::HighlightStyle, slow_pool::{MarkNodeId, SlowPool}};
|
||||
use crate::{
|
||||
slow_pool::{MarkNodeId, SlowPool},
|
||||
syntax_highlight::HighlightStyle,
|
||||
};
|
||||
|
||||
use super::{attribute::Attributes, nodes::{self, make_nested_mn}, nodes::MarkupNode};
|
||||
use super::{
|
||||
attribute::Attributes,
|
||||
nodes::MarkupNode,
|
||||
nodes::{self, make_nested_mn},
|
||||
};
|
||||
|
||||
pub fn new_equals_mn() -> MarkupNode {
|
||||
common_text_node(nodes::EQUALS.to_owned(), HighlightStyle::Operator, 0)
|
||||
@ -22,9 +29,7 @@ pub fn new_blank_mn() -> MarkupNode {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new_blank_mn_w_nls(
|
||||
nr_of_newlines: usize,
|
||||
) -> MarkupNode {
|
||||
pub fn new_blank_mn_w_nls(nr_of_newlines: usize) -> MarkupNode {
|
||||
MarkupNode::Blank {
|
||||
attributes: Attributes::default(),
|
||||
parent_id_opt: None,
|
||||
@ -36,9 +41,7 @@ pub fn new_colon_mn() -> MarkupNode {
|
||||
new_operator_mn(nodes::COLON.to_owned())
|
||||
}
|
||||
|
||||
pub fn new_operator_mn(
|
||||
content: String,
|
||||
) -> MarkupNode {
|
||||
pub fn new_operator_mn(content: String) -> MarkupNode {
|
||||
common_text_node(content, HighlightStyle::Operator, 0)
|
||||
}
|
||||
|
||||
@ -55,7 +58,11 @@ pub fn new_left_square_mn() -> MarkupNode {
|
||||
}
|
||||
|
||||
pub fn new_right_square_mn() -> MarkupNode {
|
||||
common_text_node(nodes::RIGHT_SQUARE_BR.to_owned(), HighlightStyle::Bracket, 0)
|
||||
common_text_node(
|
||||
nodes::RIGHT_SQUARE_BR.to_owned(),
|
||||
HighlightStyle::Bracket,
|
||||
0,
|
||||
)
|
||||
}
|
||||
|
||||
pub fn new_func_name_mn(content: String) -> MarkupNode {
|
||||
@ -67,20 +74,21 @@ pub fn new_arg_name_mn(content: String) -> MarkupNode {
|
||||
}
|
||||
|
||||
pub fn new_arrow_mn(newlines_at_end: usize) -> MarkupNode {
|
||||
common_text_node(nodes::ARROW.to_owned(), HighlightStyle::Operator, newlines_at_end)
|
||||
common_text_node(
|
||||
nodes::ARROW.to_owned(),
|
||||
HighlightStyle::Operator,
|
||||
newlines_at_end,
|
||||
)
|
||||
}
|
||||
|
||||
pub fn new_comments_mn(
|
||||
comment: String,
|
||||
newlines_at_end: usize,
|
||||
) -> MarkupNode {
|
||||
pub fn new_comments_mn(comment: String, newlines_at_end: usize) -> MarkupNode {
|
||||
common_text_node(comment, HighlightStyle::Comment, newlines_at_end)
|
||||
}
|
||||
|
||||
fn common_text_node(
|
||||
content: String,
|
||||
highlight_style: HighlightStyle,
|
||||
newlines_at_end: usize
|
||||
newlines_at_end: usize,
|
||||
) -> MarkupNode {
|
||||
MarkupNode::Text {
|
||||
content,
|
||||
@ -91,7 +99,11 @@ fn common_text_node(
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new_assign_mn(val_name_mn_id: MarkNodeId, equals_mn_id: MarkNodeId, expr_mark_node_id: MarkNodeId) -> MarkupNode {
|
||||
pub fn new_assign_mn(
|
||||
val_name_mn_id: MarkNodeId,
|
||||
equals_mn_id: MarkNodeId,
|
||||
expr_mark_node_id: MarkNodeId,
|
||||
) -> MarkupNode {
|
||||
make_nested_mn(vec![val_name_mn_id, equals_mn_id, expr_mark_node_id], 2)
|
||||
}
|
||||
|
||||
@ -104,7 +116,11 @@ pub fn new_module_name_mn_id(mn_ids: Vec<MarkNodeId>, mark_node_pool: &mut SlowP
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new_module_var_mn(module_name_id: MarkNodeId, dot_id: MarkNodeId, ident_id: MarkNodeId) -> MarkupNode {
|
||||
pub fn new_module_var_mn(
|
||||
module_name_id: MarkNodeId,
|
||||
dot_id: MarkNodeId,
|
||||
ident_id: MarkNodeId,
|
||||
) -> MarkupNode {
|
||||
make_nested_mn(vec![module_name_id, dot_id, ident_id], 0)
|
||||
}
|
||||
|
||||
@ -132,5 +148,15 @@ pub fn new_if_expr_mn(
|
||||
else_mn_id: MarkNodeId,
|
||||
else_expr_mn_id: MarkNodeId,
|
||||
) -> MarkupNode {
|
||||
make_nested_mn(vec![if_mn_id, cond_expr_mn_id, then_mn_id, then_expr_mn_id, else_mn_id, else_expr_mn_id], 1)
|
||||
make_nested_mn(
|
||||
vec![
|
||||
if_mn_id,
|
||||
cond_expr_mn_id,
|
||||
then_mn_id,
|
||||
then_expr_mn_id,
|
||||
else_mn_id,
|
||||
else_expr_mn_id,
|
||||
],
|
||||
1,
|
||||
)
|
||||
}
|
||||
|
@ -7,8 +7,8 @@ use roc_module::symbol::Interns;
|
||||
use crate::{
|
||||
markup::{
|
||||
convert::{from_def2::def2_to_markup, from_header::header_to_markup},
|
||||
mark_id_ast_id_map::MarkIdAstIdMap,
|
||||
nodes::set_parent_for_all,
|
||||
mark_id_ast_id_map::MarkIdAstIdMap
|
||||
},
|
||||
slow_pool::{MarkNodeId, SlowPool},
|
||||
};
|
||||
@ -20,7 +20,11 @@ pub fn ast_to_mark_nodes<'a>(
|
||||
interns: &Interns,
|
||||
) -> ASTResult<(Vec<MarkNodeId>, MarkIdAstIdMap)> {
|
||||
let mut mark_id_ast_id_map = MarkIdAstIdMap::default();
|
||||
let mut all_mark_node_ids = vec![header_to_markup(&ast.header, mark_node_pool, &mut mark_id_ast_id_map)];
|
||||
let mut all_mark_node_ids = vec![header_to_markup(
|
||||
&ast.header,
|
||||
mark_node_pool,
|
||||
&mut mark_id_ast_id_map,
|
||||
)];
|
||||
|
||||
for &def_id in ast.def_ids.iter() {
|
||||
// for debugging
|
||||
@ -28,7 +32,14 @@ pub fn ast_to_mark_nodes<'a>(
|
||||
|
||||
let def2 = env.pool.get(def_id);
|
||||
|
||||
let expr2_markup_id = def2_to_markup(env, def2, def_id, mark_node_pool, &mut mark_id_ast_id_map, interns)?;
|
||||
let expr2_markup_id = def2_to_markup(
|
||||
env,
|
||||
def2,
|
||||
def_id,
|
||||
mark_node_pool,
|
||||
&mut mark_id_ast_id_map,
|
||||
interns,
|
||||
)?;
|
||||
|
||||
set_parent_for_all(expr2_markup_id, mark_node_pool);
|
||||
|
||||
|
@ -1,7 +1,9 @@
|
||||
use crate::{
|
||||
markup::{
|
||||
common_nodes::new_blank_mn_w_nls,
|
||||
top_level_def::{assignment_mark_node, tld_w_comments_mark_node}, mark_id_ast_id_map::MarkIdAstIdMap, nodes::MarkupNode,
|
||||
mark_id_ast_id_map::MarkIdAstIdMap,
|
||||
nodes::MarkupNode,
|
||||
top_level_def::{assignment_mark_node, tld_w_comments_mark_node},
|
||||
},
|
||||
slow_pool::{MarkNodeId, SlowPool},
|
||||
};
|
||||
@ -58,23 +60,33 @@ pub fn def2_to_markup<'a>(
|
||||
0,
|
||||
)?;
|
||||
|
||||
let tld_mn =
|
||||
assignment_mark_node(*identifier_id, expr_mn_id, ast_node_id, mark_node_pool, mark_id_ast_id_map, env)?;
|
||||
let tld_mn = assignment_mark_node(
|
||||
*identifier_id,
|
||||
expr_mn_id,
|
||||
ast_node_id,
|
||||
mark_node_pool,
|
||||
mark_id_ast_id_map,
|
||||
env,
|
||||
)?;
|
||||
|
||||
add_node(tld_mn, ast_node_id, mark_node_pool, mark_id_ast_id_map)
|
||||
}
|
||||
Def2::Blank => {
|
||||
add_node(
|
||||
new_blank_mn_w_nls(2),
|
||||
ast_node_id,
|
||||
mark_node_pool,
|
||||
mark_id_ast_id_map
|
||||
)
|
||||
},
|
||||
Def2::Blank => add_node(
|
||||
new_blank_mn_w_nls(2),
|
||||
ast_node_id,
|
||||
mark_node_pool,
|
||||
mark_id_ast_id_map,
|
||||
),
|
||||
Def2::CommentsBefore { comments, def_id } => {
|
||||
let inner_def = env.pool.get(*def_id);
|
||||
let inner_def_mark_node_id =
|
||||
def2_to_markup(env, inner_def, *def_id, mark_node_pool, mark_id_ast_id_map, interns)?;
|
||||
let inner_def_mark_node_id = def2_to_markup(
|
||||
env,
|
||||
inner_def,
|
||||
*def_id,
|
||||
mark_node_pool,
|
||||
mark_id_ast_id_map,
|
||||
interns,
|
||||
)?;
|
||||
|
||||
let full_mark_node = tld_w_comments_mark_node(
|
||||
comments.clone(),
|
||||
@ -85,12 +97,23 @@ pub fn def2_to_markup<'a>(
|
||||
true,
|
||||
)?;
|
||||
|
||||
add_node(full_mark_node, ast_node_id, mark_node_pool, mark_id_ast_id_map)
|
||||
add_node(
|
||||
full_mark_node,
|
||||
ast_node_id,
|
||||
mark_node_pool,
|
||||
mark_id_ast_id_map,
|
||||
)
|
||||
}
|
||||
Def2::CommentsAfter { def_id, comments } => {
|
||||
let inner_def = env.pool.get(*def_id);
|
||||
let inner_def_mark_node_id =
|
||||
def2_to_markup(env, inner_def, *def_id, mark_node_pool, mark_id_ast_id_map, interns)?;
|
||||
let inner_def_mark_node_id = def2_to_markup(
|
||||
env,
|
||||
inner_def,
|
||||
*def_id,
|
||||
mark_node_pool,
|
||||
mark_id_ast_id_map,
|
||||
interns,
|
||||
)?;
|
||||
|
||||
let full_mark_node = tld_w_comments_mark_node(
|
||||
comments.clone(),
|
||||
@ -101,7 +124,12 @@ pub fn def2_to_markup<'a>(
|
||||
false,
|
||||
)?;
|
||||
|
||||
add_node(full_mark_node, ast_node_id, mark_node_pool, mark_id_ast_id_map)
|
||||
add_node(
|
||||
full_mark_node,
|
||||
ast_node_id,
|
||||
mark_node_pool,
|
||||
mark_id_ast_id_map,
|
||||
)
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -6,9 +6,10 @@ use crate::{
|
||||
new_left_accolade_mn, new_left_square_mn, new_operator_mn, new_right_accolade_mn,
|
||||
new_right_square_mn,
|
||||
},
|
||||
mark_id_ast_id_map::MarkIdAstIdMap,
|
||||
nodes::{
|
||||
get_string, join_mark_nodes_commas, join_mark_nodes_spaces, new_markup_node, MarkupNode,
|
||||
}, mark_id_ast_id_map::MarkIdAstIdMap,
|
||||
},
|
||||
},
|
||||
slow_pool::{MarkNodeId, SlowPool},
|
||||
syntax_highlight::HighlightStyle,
|
||||
@ -68,12 +69,24 @@ pub fn expr2_to_markup<'a>(
|
||||
Expr2::Str(text) => {
|
||||
let content = format!("\"{}\"", text.as_str(env.pool));
|
||||
|
||||
string_mark_node(&content, indent_level, ast_node_id, mark_node_pool, mark_id_ast_id_map)
|
||||
string_mark_node(
|
||||
&content,
|
||||
indent_level,
|
||||
ast_node_id,
|
||||
mark_node_pool,
|
||||
mark_id_ast_id_map,
|
||||
)
|
||||
}
|
||||
Expr2::SmallStr(array_str) => {
|
||||
let content = format!("\"{}\"", array_str.as_str());
|
||||
|
||||
string_mark_node(&content, indent_level, ast_node_id, mark_node_pool, mark_id_ast_id_map)
|
||||
string_mark_node(
|
||||
&content,
|
||||
indent_level,
|
||||
ast_node_id,
|
||||
mark_node_pool,
|
||||
mark_id_ast_id_map,
|
||||
)
|
||||
}
|
||||
Expr2::GlobalTag { name, .. } => new_markup_node(
|
||||
with_indent(indent_level, &get_string(env, name)),
|
||||
@ -85,8 +98,15 @@ pub fn expr2_to_markup<'a>(
|
||||
),
|
||||
Expr2::Call { args, expr_id, .. } => {
|
||||
let expr = env.pool.get(*expr_id);
|
||||
let fun_call_mark_id =
|
||||
expr2_to_markup(env, expr, *expr_id, mark_node_pool, mark_id_ast_id_map, interns, indent_level)?;
|
||||
let fun_call_mark_id = expr2_to_markup(
|
||||
env,
|
||||
expr,
|
||||
*expr_id,
|
||||
mark_node_pool,
|
||||
mark_id_ast_id_map,
|
||||
interns,
|
||||
indent_level,
|
||||
)?;
|
||||
|
||||
let arg_expr_ids: Vec<ExprId> =
|
||||
args.iter(env.pool).map(|(_, arg_id)| *arg_id).collect();
|
||||
@ -96,7 +116,15 @@ pub fn expr2_to_markup<'a>(
|
||||
.map(|arg_id| {
|
||||
let arg_expr = env.pool.get(*arg_id);
|
||||
|
||||
expr2_to_markup(env, arg_expr, *arg_id, mark_node_pool, mark_id_ast_id_map, interns, 0)
|
||||
expr2_to_markup(
|
||||
env,
|
||||
arg_expr,
|
||||
*arg_id,
|
||||
mark_node_pool,
|
||||
mark_id_ast_id_map,
|
||||
interns,
|
||||
0,
|
||||
)
|
||||
})
|
||||
.collect::<ASTResult<Vec<MarkNodeId>>>()?;
|
||||
|
||||
@ -127,15 +155,12 @@ pub fn expr2_to_markup<'a>(
|
||||
)
|
||||
}
|
||||
Expr2::List { elems, .. } => {
|
||||
let mut children_ids =
|
||||
vec![
|
||||
add_node(
|
||||
new_left_square_mn(),
|
||||
ast_node_id,
|
||||
mark_node_pool,
|
||||
mark_id_ast_id_map
|
||||
)
|
||||
];
|
||||
let mut children_ids = vec![add_node(
|
||||
new_left_square_mn(),
|
||||
ast_node_id,
|
||||
mark_node_pool,
|
||||
mark_id_ast_id_map,
|
||||
)];
|
||||
|
||||
let indexed_node_ids: Vec<(usize, ExprId)> =
|
||||
elems.iter(env.pool).copied().enumerate().collect();
|
||||
@ -154,24 +179,20 @@ pub fn expr2_to_markup<'a>(
|
||||
)?);
|
||||
|
||||
if idx + 1 < elems.len() {
|
||||
children_ids.push(
|
||||
add_node(
|
||||
new_comma_mn(),
|
||||
ast_node_id,
|
||||
mark_node_pool,
|
||||
mark_id_ast_id_map
|
||||
)
|
||||
);
|
||||
children_ids.push(add_node(
|
||||
new_comma_mn(),
|
||||
ast_node_id,
|
||||
mark_node_pool,
|
||||
mark_id_ast_id_map,
|
||||
));
|
||||
}
|
||||
}
|
||||
children_ids.push(
|
||||
add_node(
|
||||
new_right_square_mn(),
|
||||
ast_node_id,
|
||||
mark_node_pool,
|
||||
mark_id_ast_id_map
|
||||
)
|
||||
);
|
||||
children_ids.push(add_node(
|
||||
new_right_square_mn(),
|
||||
ast_node_id,
|
||||
mark_node_pool,
|
||||
mark_id_ast_id_map,
|
||||
));
|
||||
|
||||
let list_mn = MarkupNode::Nested {
|
||||
children_ids,
|
||||
@ -183,8 +204,18 @@ pub fn expr2_to_markup<'a>(
|
||||
}
|
||||
Expr2::EmptyRecord => {
|
||||
let children_ids = vec![
|
||||
add_node(new_left_accolade_mn(), ast_node_id, mark_node_pool, mark_id_ast_id_map),
|
||||
add_node(new_right_accolade_mn(), ast_node_id, mark_node_pool, mark_id_ast_id_map),
|
||||
add_node(
|
||||
new_left_accolade_mn(),
|
||||
ast_node_id,
|
||||
mark_node_pool,
|
||||
mark_id_ast_id_map,
|
||||
),
|
||||
add_node(
|
||||
new_right_accolade_mn(),
|
||||
ast_node_id,
|
||||
mark_node_pool,
|
||||
mark_id_ast_id_map,
|
||||
),
|
||||
];
|
||||
|
||||
let record_mn = MarkupNode::Nested {
|
||||
@ -196,10 +227,12 @@ pub fn expr2_to_markup<'a>(
|
||||
add_node(record_mn, ast_node_id, mark_node_pool, mark_id_ast_id_map)
|
||||
}
|
||||
Expr2::Record { fields, .. } => {
|
||||
let mut children_ids =
|
||||
vec![
|
||||
add_node(new_left_accolade_mn(), ast_node_id, mark_node_pool, mark_id_ast_id_map)
|
||||
];
|
||||
let mut children_ids = vec![add_node(
|
||||
new_left_accolade_mn(),
|
||||
ast_node_id,
|
||||
mark_node_pool,
|
||||
mark_id_ast_id_map,
|
||||
)];
|
||||
|
||||
for (idx, field_node_id) in fields.iter_node_ids().enumerate() {
|
||||
let record_field = env.pool.get(field_node_id);
|
||||
@ -219,9 +252,12 @@ pub fn expr2_to_markup<'a>(
|
||||
RecordField::InvalidLabelOnly(_, _) => (),
|
||||
RecordField::LabelOnly(_, _, _) => (),
|
||||
RecordField::LabeledValue(_, _, sub_expr2_node_id) => {
|
||||
children_ids.push(
|
||||
add_node(new_colon_mn(), ast_node_id, mark_node_pool, mark_id_ast_id_map)
|
||||
);
|
||||
children_ids.push(add_node(
|
||||
new_colon_mn(),
|
||||
ast_node_id,
|
||||
mark_node_pool,
|
||||
mark_id_ast_id_map,
|
||||
));
|
||||
|
||||
let sub_expr2 = env.pool.get(*sub_expr2_node_id);
|
||||
children_ids.push(expr2_to_markup(
|
||||
@ -237,15 +273,21 @@ pub fn expr2_to_markup<'a>(
|
||||
}
|
||||
|
||||
if idx + 1 < fields.len() {
|
||||
children_ids.push(
|
||||
add_node(new_comma_mn(), ast_node_id, mark_node_pool, mark_id_ast_id_map)
|
||||
);
|
||||
children_ids.push(add_node(
|
||||
new_comma_mn(),
|
||||
ast_node_id,
|
||||
mark_node_pool,
|
||||
mark_id_ast_id_map,
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
children_ids.push(
|
||||
add_node(new_right_accolade_mn(), ast_node_id, mark_node_pool, mark_id_ast_id_map)
|
||||
);
|
||||
children_ids.push(add_node(
|
||||
new_right_accolade_mn(),
|
||||
ast_node_id,
|
||||
mark_node_pool,
|
||||
mark_id_ast_id_map,
|
||||
));
|
||||
|
||||
let record_mn = MarkupNode::Nested {
|
||||
children_ids,
|
||||
@ -255,7 +297,12 @@ pub fn expr2_to_markup<'a>(
|
||||
|
||||
add_node(record_mn, ast_node_id, mark_node_pool, mark_id_ast_id_map)
|
||||
}
|
||||
Expr2::Blank => add_node(new_blank_mn(), ast_node_id, mark_node_pool, mark_id_ast_id_map),
|
||||
Expr2::Blank => add_node(
|
||||
new_blank_mn(),
|
||||
ast_node_id,
|
||||
mark_node_pool,
|
||||
mark_id_ast_id_map,
|
||||
),
|
||||
Expr2::LetValue {
|
||||
def_id,
|
||||
body_id: _,
|
||||
@ -275,9 +322,15 @@ pub fn expr2_to_markup<'a>(
|
||||
newlines_at_end: 0,
|
||||
};
|
||||
|
||||
let val_name_mn_id = add_node(val_name_mn, ast_node_id, mark_node_pool, mark_id_ast_id_map);
|
||||
let val_name_mn_id =
|
||||
add_node(val_name_mn, ast_node_id, mark_node_pool, mark_id_ast_id_map);
|
||||
|
||||
let equals_mn_id = add_node(new_equals_mn(), ast_node_id, mark_node_pool, mark_id_ast_id_map);
|
||||
let equals_mn_id = add_node(
|
||||
new_equals_mn(),
|
||||
ast_node_id,
|
||||
mark_node_pool,
|
||||
mark_id_ast_id_map,
|
||||
);
|
||||
|
||||
let value_def = env.pool.get(*def_id);
|
||||
|
||||
@ -325,7 +378,12 @@ pub fn expr2_to_markup<'a>(
|
||||
extra: _,
|
||||
} => {
|
||||
let backslash_mn = new_operator_mn("\\".to_string());
|
||||
let backslash_mn_id = add_node(backslash_mn, ast_node_id, mark_node_pool, mark_id_ast_id_map);
|
||||
let backslash_mn_id = add_node(
|
||||
backslash_mn,
|
||||
ast_node_id,
|
||||
mark_node_pool,
|
||||
mark_id_ast_id_map,
|
||||
);
|
||||
|
||||
let arg_names: Vec<&str> = args
|
||||
.iter(env.pool)
|
||||
@ -356,8 +414,7 @@ pub fn expr2_to_markup<'a>(
|
||||
.map(|arg_name| new_arg_name_mn(arg_name.to_string()))
|
||||
.collect_vec();
|
||||
|
||||
let args_with_commas: Vec<MarkupNode> =
|
||||
join_mark_nodes_commas(arg_mark_nodes);
|
||||
let args_with_commas: Vec<MarkupNode> = join_mark_nodes_commas(arg_mark_nodes);
|
||||
|
||||
let mut args_with_commas_ids: Vec<MarkNodeId> = args_with_commas
|
||||
.into_iter()
|
||||
|
@ -7,8 +7,8 @@ use crate::{
|
||||
new_comma_mn, new_left_accolade_mn, new_left_square_mn, new_right_accolade_mn,
|
||||
new_right_square_mn,
|
||||
},
|
||||
mark_id_ast_id_map::MarkIdAstIdMap,
|
||||
nodes::{set_parent_for_all, MarkupNode},
|
||||
mark_id_ast_id_map::MarkIdAstIdMap
|
||||
},
|
||||
slow_pool::{MarkNodeId, SlowPool},
|
||||
syntax_highlight::HighlightStyle,
|
||||
@ -16,18 +16,27 @@ use crate::{
|
||||
|
||||
use super::from_def2::add_node;
|
||||
|
||||
pub fn header_to_markup(app_header: &AppHeader, mark_node_pool: &mut SlowPool, mark_id_ast_id_map: &mut MarkIdAstIdMap) -> MarkNodeId {
|
||||
pub fn header_to_markup(
|
||||
app_header: &AppHeader,
|
||||
mark_node_pool: &mut SlowPool,
|
||||
mark_id_ast_id_map: &mut MarkIdAstIdMap,
|
||||
) -> MarkNodeId {
|
||||
let expr_id = app_header.ast_node_id;
|
||||
let ast_node_id = ASTNodeId::AExprId(expr_id);
|
||||
|
||||
let app_node_id = header_mn("app ".to_owned(), ast_node_id, mark_node_pool, mark_id_ast_id_map);
|
||||
let app_node_id = header_mn(
|
||||
"app ".to_owned(),
|
||||
ast_node_id,
|
||||
mark_node_pool,
|
||||
mark_id_ast_id_map,
|
||||
);
|
||||
|
||||
let app_name_node_id = header_val_mn(
|
||||
app_header.app_name.clone(),
|
||||
ast_node_id,
|
||||
HighlightStyle::String,
|
||||
mark_node_pool,
|
||||
mark_id_ast_id_map
|
||||
mark_id_ast_id_map,
|
||||
);
|
||||
|
||||
let full_app_node = MarkupNode::Nested {
|
||||
@ -36,16 +45,26 @@ pub fn header_to_markup(app_header: &AppHeader, mark_node_pool: &mut SlowPool, m
|
||||
newlines_at_end: 1,
|
||||
};
|
||||
|
||||
let packages_node_id = header_mn(" packages ".to_owned(), ast_node_id, mark_node_pool, mark_id_ast_id_map);
|
||||
let packages_node_id = header_mn(
|
||||
" packages ".to_owned(),
|
||||
ast_node_id,
|
||||
mark_node_pool,
|
||||
mark_id_ast_id_map,
|
||||
);
|
||||
|
||||
let pack_left_acc_node_id = add_node(new_left_accolade_mn(), ast_node_id, mark_node_pool, mark_id_ast_id_map);
|
||||
let pack_left_acc_node_id = add_node(
|
||||
new_left_accolade_mn(),
|
||||
ast_node_id,
|
||||
mark_node_pool,
|
||||
mark_id_ast_id_map,
|
||||
);
|
||||
|
||||
let pack_base_node_id = header_val_mn(
|
||||
"base: ".to_owned(),
|
||||
ast_node_id,
|
||||
HighlightStyle::RecordField,
|
||||
mark_node_pool,
|
||||
mark_id_ast_id_map
|
||||
mark_id_ast_id_map,
|
||||
);
|
||||
|
||||
let pack_val_node_id = header_val_mn(
|
||||
@ -53,10 +72,15 @@ pub fn header_to_markup(app_header: &AppHeader, mark_node_pool: &mut SlowPool, m
|
||||
ast_node_id,
|
||||
HighlightStyle::String,
|
||||
mark_node_pool,
|
||||
mark_id_ast_id_map
|
||||
mark_id_ast_id_map,
|
||||
);
|
||||
|
||||
let pack_right_acc_node_id = add_node(new_right_accolade_mn(), ast_node_id, mark_node_pool, mark_id_ast_id_map);
|
||||
let pack_right_acc_node_id = add_node(
|
||||
new_right_accolade_mn(),
|
||||
ast_node_id,
|
||||
mark_node_pool,
|
||||
mark_id_ast_id_map,
|
||||
);
|
||||
|
||||
let full_packages_node = MarkupNode::Nested {
|
||||
children_ids: vec![
|
||||
@ -70,9 +94,19 @@ pub fn header_to_markup(app_header: &AppHeader, mark_node_pool: &mut SlowPool, m
|
||||
newlines_at_end: 1,
|
||||
};
|
||||
|
||||
let imports_node_id = header_mn(" imports ".to_owned(), ast_node_id, mark_node_pool, mark_id_ast_id_map);
|
||||
let imports_node_id = header_mn(
|
||||
" imports ".to_owned(),
|
||||
ast_node_id,
|
||||
mark_node_pool,
|
||||
mark_id_ast_id_map,
|
||||
);
|
||||
|
||||
let imports_left_square_node_id = add_node(new_left_square_mn(), ast_node_id, mark_node_pool, mark_id_ast_id_map);
|
||||
let imports_left_square_node_id = add_node(
|
||||
new_left_square_mn(),
|
||||
ast_node_id,
|
||||
mark_node_pool,
|
||||
mark_id_ast_id_map,
|
||||
);
|
||||
|
||||
let mut import_child_ids: Vec<MarkNodeId> = add_header_mn_list(
|
||||
&app_header.imports,
|
||||
@ -82,7 +116,12 @@ pub fn header_to_markup(app_header: &AppHeader, mark_node_pool: &mut SlowPool, m
|
||||
mark_id_ast_id_map,
|
||||
);
|
||||
|
||||
let imports_right_square_node_id = add_node(new_right_square_mn(), ast_node_id, mark_node_pool, mark_id_ast_id_map);
|
||||
let imports_right_square_node_id = add_node(
|
||||
new_right_square_mn(),
|
||||
ast_node_id,
|
||||
mark_node_pool,
|
||||
mark_id_ast_id_map,
|
||||
);
|
||||
|
||||
let mut full_import_children = vec![imports_node_id, imports_left_square_node_id];
|
||||
|
||||
@ -95,9 +134,19 @@ pub fn header_to_markup(app_header: &AppHeader, mark_node_pool: &mut SlowPool, m
|
||||
newlines_at_end: 1,
|
||||
};
|
||||
|
||||
let provides_node_id = header_mn(" provides ".to_owned(), ast_node_id, mark_node_pool, mark_id_ast_id_map);
|
||||
let provides_node_id = header_mn(
|
||||
" provides ".to_owned(),
|
||||
ast_node_id,
|
||||
mark_node_pool,
|
||||
mark_id_ast_id_map,
|
||||
);
|
||||
|
||||
let provides_left_square_node_id = add_node(new_left_square_mn(), ast_node_id, mark_node_pool, mark_id_ast_id_map);
|
||||
let provides_left_square_node_id = add_node(
|
||||
new_left_square_mn(),
|
||||
ast_node_id,
|
||||
mark_node_pool,
|
||||
mark_id_ast_id_map,
|
||||
);
|
||||
|
||||
let mut provides_val_node_ids: Vec<MarkNodeId> = add_header_mn_list(
|
||||
&app_header.provides,
|
||||
@ -107,9 +156,19 @@ pub fn header_to_markup(app_header: &AppHeader, mark_node_pool: &mut SlowPool, m
|
||||
mark_id_ast_id_map,
|
||||
);
|
||||
|
||||
let provides_right_square_node_id = add_node(new_right_square_mn(), ast_node_id, mark_node_pool, mark_id_ast_id_map);
|
||||
let provides_right_square_node_id = add_node(
|
||||
new_right_square_mn(),
|
||||
ast_node_id,
|
||||
mark_node_pool,
|
||||
mark_id_ast_id_map,
|
||||
);
|
||||
|
||||
let provides_end_node_id = header_mn(" to base".to_owned(), ast_node_id, mark_node_pool, mark_id_ast_id_map);
|
||||
let provides_end_node_id = header_mn(
|
||||
" to base".to_owned(),
|
||||
ast_node_id,
|
||||
mark_node_pool,
|
||||
mark_id_ast_id_map,
|
||||
);
|
||||
|
||||
let mut full_provides_children = vec![provides_node_id, provides_left_square_node_id];
|
||||
|
||||
@ -123,10 +182,30 @@ pub fn header_to_markup(app_header: &AppHeader, mark_node_pool: &mut SlowPool, m
|
||||
newlines_at_end: 1,
|
||||
};
|
||||
|
||||
let full_app_node_id = add_node(full_app_node, ast_node_id, mark_node_pool, mark_id_ast_id_map);
|
||||
let full_packages_node = add_node(full_packages_node, ast_node_id, mark_node_pool, mark_id_ast_id_map);
|
||||
let full_import_node_id = add_node(full_import_node, ast_node_id, mark_node_pool, mark_id_ast_id_map);
|
||||
let full_provides_node_id = add_node(full_provides_node, ast_node_id, mark_node_pool, mark_id_ast_id_map);
|
||||
let full_app_node_id = add_node(
|
||||
full_app_node,
|
||||
ast_node_id,
|
||||
mark_node_pool,
|
||||
mark_id_ast_id_map,
|
||||
);
|
||||
let full_packages_node = add_node(
|
||||
full_packages_node,
|
||||
ast_node_id,
|
||||
mark_node_pool,
|
||||
mark_id_ast_id_map,
|
||||
);
|
||||
let full_import_node_id = add_node(
|
||||
full_import_node,
|
||||
ast_node_id,
|
||||
mark_node_pool,
|
||||
mark_id_ast_id_map,
|
||||
);
|
||||
let full_provides_node_id = add_node(
|
||||
full_provides_node,
|
||||
ast_node_id,
|
||||
mark_node_pool,
|
||||
mark_id_ast_id_map,
|
||||
);
|
||||
|
||||
let header_mark_node = MarkupNode::Nested {
|
||||
children_ids: vec![
|
||||
@ -139,7 +218,12 @@ pub fn header_to_markup(app_header: &AppHeader, mark_node_pool: &mut SlowPool, m
|
||||
newlines_at_end: 1,
|
||||
};
|
||||
|
||||
let header_mn_id = add_node(header_mark_node, ast_node_id, mark_node_pool, mark_id_ast_id_map);
|
||||
let header_mn_id = add_node(
|
||||
header_mark_node,
|
||||
ast_node_id,
|
||||
mark_node_pool,
|
||||
mark_id_ast_id_map,
|
||||
);
|
||||
|
||||
set_parent_for_all(header_mn_id, mark_node_pool);
|
||||
|
||||
@ -171,8 +255,13 @@ fn add_header_mn_list(
|
||||
if indx != nr_of_elts - 1 {
|
||||
vec![
|
||||
provide_str,
|
||||
add_node(new_comma_mn(), ast_node_id, mark_node_pool, mark_id_ast_id_map)
|
||||
]
|
||||
add_node(
|
||||
new_comma_mn(),
|
||||
ast_node_id,
|
||||
mark_node_pool,
|
||||
mark_id_ast_id_map,
|
||||
),
|
||||
]
|
||||
} else {
|
||||
vec![provide_str]
|
||||
}
|
||||
@ -185,7 +274,7 @@ fn header_mn(
|
||||
content: String,
|
||||
ast_node_id: ASTNodeId,
|
||||
mark_node_pool: &mut SlowPool,
|
||||
mark_id_ast_id_map: &mut MarkIdAstIdMap
|
||||
mark_id_ast_id_map: &mut MarkIdAstIdMap,
|
||||
) -> MarkNodeId {
|
||||
let mark_node = MarkupNode::Text {
|
||||
content,
|
||||
|
@ -2,14 +2,13 @@ use std::collections::HashMap;
|
||||
|
||||
use roc_ast::lang::core::ast::ASTNodeId;
|
||||
|
||||
use crate::{slow_pool::MarkNodeId, markup_error::MarkResult};
|
||||
use crate::markup_error::MarkNodeIdWithoutCorrespondingASTNodeId;
|
||||
|
||||
use crate::{markup_error::MarkResult, slow_pool::MarkNodeId};
|
||||
|
||||
/// A hashmap is wrapped to allow for an easy swap out with more performant alternatives
|
||||
#[derive(Debug)]
|
||||
pub struct MarkIdAstIdMap{
|
||||
map: HashMap<MarkNodeId, ASTNodeId>
|
||||
#[derive(Debug, Default)]
|
||||
pub struct MarkIdAstIdMap {
|
||||
map: HashMap<MarkNodeId, ASTNodeId>,
|
||||
}
|
||||
|
||||
impl MarkIdAstIdMap {
|
||||
@ -20,15 +19,11 @@ impl MarkIdAstIdMap {
|
||||
pub fn get(&self, mn_id: MarkNodeId) -> MarkResult<ASTNodeId> {
|
||||
match self.map.get(&mn_id) {
|
||||
Some(ast_node_id) => Ok(*ast_node_id),
|
||||
None => MarkNodeIdWithoutCorrespondingASTNodeId { node_id: mn_id, keys_str: format!("{:?}", self.map.keys()) }.fail()
|
||||
None => MarkNodeIdWithoutCorrespondingASTNodeId {
|
||||
node_id: mn_id,
|
||||
keys_str: format!("{:?}", self.map.keys()),
|
||||
}
|
||||
.fail(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for MarkIdAstIdMap {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
map: HashMap::new()
|
||||
}
|
||||
}
|
||||
}
|
@ -1,6 +1,6 @@
|
||||
pub mod attribute;
|
||||
pub mod common_nodes;
|
||||
pub mod convert;
|
||||
pub mod mark_id_ast_id_map;
|
||||
pub mod nodes;
|
||||
pub mod top_level_def;
|
||||
pub mod mark_id_ast_id_map;
|
@ -4,7 +4,10 @@ use crate::{
|
||||
syntax_highlight::HighlightStyle,
|
||||
};
|
||||
|
||||
use super::{attribute::Attributes, common_nodes::new_comma_mn, mark_id_ast_id_map::{MarkIdAstIdMap}, convert::from_def2::add_node};
|
||||
use super::{
|
||||
attribute::Attributes, common_nodes::new_comma_mn, convert::from_def2::add_node,
|
||||
mark_id_ast_id_map::MarkIdAstIdMap,
|
||||
};
|
||||
|
||||
use crate::markup_error::{ExpectedTextNode, NestedNodeMissingChild, NestedNodeRequired};
|
||||
use itertools::Itertools;
|
||||
@ -74,7 +77,7 @@ impl MarkupNode {
|
||||
&self,
|
||||
mark_node_id: MarkNodeId,
|
||||
ast_node_id: ASTNodeId,
|
||||
mark_id_ast_id_map: &MarkIdAstIdMap
|
||||
mark_id_ast_id_map: &MarkIdAstIdMap,
|
||||
) -> MarkResult<(usize, usize)> {
|
||||
match self {
|
||||
MarkupNode::Nested { children_ids, .. } => {
|
||||
@ -449,9 +452,7 @@ pub fn join_mark_nodes_spaces(
|
||||
}
|
||||
|
||||
// put comma mark nodes between each node in mark_nodes
|
||||
pub fn join_mark_nodes_commas(
|
||||
mark_nodes: Vec<MarkupNode>,
|
||||
) -> Vec<MarkupNode> {
|
||||
pub fn join_mark_nodes_commas(mark_nodes: Vec<MarkupNode>) -> Vec<MarkupNode> {
|
||||
let join_nodes: Vec<MarkupNode> = (0..(mark_nodes.len() - 1))
|
||||
.map(|_| new_comma_mn())
|
||||
.collect();
|
||||
|
@ -14,7 +14,9 @@ use crate::{
|
||||
syntax_highlight::HighlightStyle,
|
||||
};
|
||||
|
||||
use super::{mark_id_ast_id_map::MarkIdAstIdMap, convert::from_def2::add_node, common_nodes::new_assign_mn};
|
||||
use super::{
|
||||
common_nodes::new_assign_mn, convert::from_def2::add_node, mark_id_ast_id_map::MarkIdAstIdMap,
|
||||
};
|
||||
|
||||
// represents for example: `main = "Hello, World!"`
|
||||
pub fn assignment_mark_node<'a>(
|
||||
@ -37,9 +39,18 @@ pub fn assignment_mark_node<'a>(
|
||||
|
||||
let val_name_mn_id = add_node(val_name_mn, ast_node_id, mark_node_pool, mark_id_ast_id_map);
|
||||
|
||||
let equals_mn_id = add_node(new_equals_mn(), ast_node_id, mark_node_pool, mark_id_ast_id_map);
|
||||
let equals_mn_id = add_node(
|
||||
new_equals_mn(),
|
||||
ast_node_id,
|
||||
mark_node_pool,
|
||||
mark_id_ast_id_map,
|
||||
);
|
||||
|
||||
Ok(new_assign_mn(val_name_mn_id, equals_mn_id, expr_mark_node_id))
|
||||
Ok(new_assign_mn(
|
||||
val_name_mn_id,
|
||||
equals_mn_id,
|
||||
expr_mark_node_id,
|
||||
))
|
||||
}
|
||||
|
||||
pub fn tld_w_comments_mark_node(
|
||||
@ -54,7 +65,7 @@ pub fn tld_w_comments_mark_node(
|
||||
new_comments_mn(comments, 1),
|
||||
ast_node_id,
|
||||
mark_node_pool,
|
||||
mark_id_ast_id_map
|
||||
mark_id_ast_id_map,
|
||||
);
|
||||
|
||||
let children_ids = if comments_before {
|
||||
|
@ -1,4 +1,4 @@
|
||||
use crate::markup::{nodes::MarkupNode, mark_id_ast_id_map::MarkIdAstIdMap};
|
||||
use crate::markup::{mark_id_ast_id_map::MarkIdAstIdMap, nodes::MarkupNode};
|
||||
|
||||
pub type MarkNodeId = usize;
|
||||
|
||||
@ -37,11 +37,10 @@ impl SlowPool {
|
||||
let mut ret_str = String::new();
|
||||
|
||||
for (mark_node_id, node) in self.nodes.iter().enumerate() {
|
||||
let ast_node_id_str =
|
||||
match mark_id_ast_id_map.get(mark_node_id) {
|
||||
Ok(ast_id) => format!("{:?}", ast_id),
|
||||
Err(err) => format!("{:?}", err)
|
||||
};
|
||||
let ast_node_id_str = match mark_id_ast_id_map.get(mark_node_id) {
|
||||
Ok(ast_id) => format!("{:?}", ast_id),
|
||||
Err(err) => format!("{:?}", err),
|
||||
};
|
||||
let ast_node_id: String = ast_node_id_str
|
||||
.chars()
|
||||
.filter(|c| c.is_ascii_digit())
|
||||
@ -55,16 +54,14 @@ impl SlowPool {
|
||||
child_str = format!("children: {:?}", node_children);
|
||||
}
|
||||
|
||||
ret_str.push_str(
|
||||
&format!(
|
||||
"{}: {} ({}) ast_id {:?} {}",
|
||||
mark_node_id,
|
||||
node.node_type_as_string(),
|
||||
node.get_content(),
|
||||
ast_node_id.parse::<usize>().unwrap(),
|
||||
child_str
|
||||
)
|
||||
);
|
||||
ret_str.push_str(&format!(
|
||||
"{}: {} ({}) ast_id {:?} {}",
|
||||
mark_node_id,
|
||||
node.node_type_as_string(),
|
||||
node.get_content(),
|
||||
ast_node_id.parse::<usize>().unwrap(),
|
||||
child_str
|
||||
));
|
||||
}
|
||||
|
||||
ret_str
|
||||
|
@ -22,7 +22,7 @@ pub enum HighlightStyle {
|
||||
DocsComment,
|
||||
UppercaseIdent, // TODO remove other HighlightStyle subtypes of UppercaseIdent?
|
||||
LowercaseIdent, // TODO remove other HighlightStyle subtypes of LowercaseIdent?
|
||||
Keyword, // if, else, when
|
||||
Keyword, // if, else, when
|
||||
}
|
||||
|
||||
pub fn default_highlight_map() -> HashMap<HighlightStyle, RgbaTup> {
|
||||
|
@ -360,8 +360,7 @@ fn can_annotation_help(
|
||||
As(
|
||||
loc_inner,
|
||||
_spaces,
|
||||
alias_header
|
||||
@ TypeHeader {
|
||||
alias_header @ TypeHeader {
|
||||
name,
|
||||
vars: loc_vars,
|
||||
},
|
||||
|
@ -2,7 +2,7 @@ use peg::error::ParseError;
|
||||
use roc_ast::ast_error::ASTError;
|
||||
use roc_module::module_err::ModuleError;
|
||||
use roc_parse::parser::SyntaxError;
|
||||
use snafu::{Snafu};
|
||||
use snafu::Snafu;
|
||||
|
||||
#[derive(Debug, Snafu)]
|
||||
#[snafu(visibility(pub))]
|
||||
@ -28,7 +28,9 @@ pub type DocsResult<T, E = DocsError> = std::result::Result<T, E>;
|
||||
|
||||
impl<'a> From<SyntaxError<'a>> for DocsError {
|
||||
fn from(syntax_err: SyntaxError) -> Self {
|
||||
Self::WrapSyntaxError { msg: format!("{:?}", syntax_err) }
|
||||
Self::WrapSyntaxError {
|
||||
msg: format!("{:?}", syntax_err),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -46,6 +48,8 @@ impl From<ModuleError> for DocsError {
|
||||
|
||||
impl From<ParseError<usize>> for DocsError {
|
||||
fn from(peg_parse_err: ParseError<usize>) -> Self {
|
||||
Self::WrapPegParseError { source: peg_parse_err }
|
||||
Self::WrapPegParseError {
|
||||
source: peg_parse_err,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,11 +1,7 @@
|
||||
use roc_code_markup::{markup::nodes::MarkupNode, slow_pool::SlowPool};
|
||||
|
||||
// determine appropriate css class for MarkupNode
|
||||
pub fn mark_node_to_html(
|
||||
mark_node: &MarkupNode,
|
||||
mark_node_pool: &SlowPool,
|
||||
buf: &mut String,
|
||||
) {
|
||||
pub fn mark_node_to_html(mark_node: &MarkupNode, mark_node_pool: &SlowPool, buf: &mut String) {
|
||||
let mut additional_newlines = 0;
|
||||
|
||||
match mark_node {
|
||||
|
@ -1,14 +1,14 @@
|
||||
extern crate pulldown_cmark;
|
||||
extern crate roc_load;
|
||||
use bumpalo::{Bump};
|
||||
use docs_error::{DocsResult, DocsError};
|
||||
use bumpalo::Bump;
|
||||
use docs_error::{DocsError, DocsResult};
|
||||
use html::mark_node_to_html;
|
||||
use roc_builtins::std::StdLib;
|
||||
use roc_can::scope::Scope;
|
||||
use roc_code_markup::markup::nodes::MarkupNode;
|
||||
use roc_code_markup::slow_pool::SlowPool;
|
||||
use roc_collections::all::MutMap;
|
||||
use roc_highlight::highlight_parser::{highlight_expr, highlight_defs};
|
||||
use roc_highlight::highlight_parser::{highlight_defs, highlight_expr};
|
||||
use roc_load::docs::DocEntry::DocDef;
|
||||
use roc_load::docs::{DocEntry, TypeAnnotation};
|
||||
use roc_load::docs::{ModuleDocumentation, RecordField};
|
||||
@ -16,7 +16,7 @@ use roc_load::file::{LoadedModule, LoadingProblem};
|
||||
use roc_module::symbol::{IdentIds, Interns, ModuleId};
|
||||
use roc_parse::ident::{parse_ident, Ident};
|
||||
use roc_parse::state::State;
|
||||
use roc_region::all::{Region};
|
||||
use roc_region::all::Region;
|
||||
use std::fs;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
@ -109,9 +109,7 @@ pub fn generate_docs_html(filenames: Vec<PathBuf>, std_lib: StdLib, build_dir: &
|
||||
}
|
||||
|
||||
// converts plain-text code to highlighted html
|
||||
pub fn syntax_highlight_expr<'a>(
|
||||
code_str: &'a str,
|
||||
) -> DocsResult<String> {
|
||||
pub fn syntax_highlight_expr(code_str: &str) -> DocsResult<String> {
|
||||
let trimmed_code_str = code_str.trim_end().trim();
|
||||
let mut mark_node_pool = SlowPool::default();
|
||||
|
||||
@ -120,18 +118,16 @@ pub fn syntax_highlight_expr<'a>(
|
||||
match highlight_expr(trimmed_code_str, &mut mark_node_pool) {
|
||||
Ok(root_mark_node_id) => {
|
||||
let root_mark_node = mark_node_pool.get(root_mark_node_id);
|
||||
mark_node_to_html(&root_mark_node, &mark_node_pool, &mut highlighted_html_str);
|
||||
|
||||
mark_node_to_html(root_mark_node, &mark_node_pool, &mut highlighted_html_str);
|
||||
|
||||
Ok(highlighted_html_str)
|
||||
},
|
||||
}
|
||||
Err(err) => Err(DocsError::from(err)),
|
||||
}
|
||||
}
|
||||
|
||||
// converts plain-text code to highlighted html
|
||||
pub fn syntax_highlight_top_level_defs<'a>(
|
||||
code_str: &'a str,
|
||||
) -> DocsResult<String> {
|
||||
pub fn syntax_highlight_top_level_defs(code_str: &str) -> DocsResult<String> {
|
||||
let trimmed_code_str = code_str.trim_end().trim();
|
||||
|
||||
let mut mark_node_pool = SlowPool::default();
|
||||
@ -140,14 +136,17 @@ pub fn syntax_highlight_top_level_defs<'a>(
|
||||
|
||||
match highlight_defs(trimmed_code_str, &mut mark_node_pool) {
|
||||
Ok(mark_node_id_vec) => {
|
||||
let def_mark_nodes: Vec<&MarkupNode> = mark_node_id_vec.iter().map(|mn_id| mark_node_pool.get(*mn_id)).collect();
|
||||
let def_mark_nodes: Vec<&MarkupNode> = mark_node_id_vec
|
||||
.iter()
|
||||
.map(|mn_id| mark_node_pool.get(*mn_id))
|
||||
.collect();
|
||||
|
||||
for mn in def_mark_nodes {
|
||||
mark_node_to_html(mn, &mark_node_pool, &mut highlighted_html_str)
|
||||
}
|
||||
|
||||
|
||||
Ok(highlighted_html_str)
|
||||
},
|
||||
}
|
||||
Err(err) => Err(DocsError::from(err)),
|
||||
}
|
||||
}
|
||||
|
@ -8,13 +8,9 @@ mod insert_doc_syntax_highlighting {
|
||||
|
||||
use roc_docs::{syntax_highlight_expr, syntax_highlight_top_level_defs};
|
||||
|
||||
|
||||
fn expect_html(code_str: &str, want: &str, use_expr: bool) {
|
||||
|
||||
if use_expr {
|
||||
match syntax_highlight_expr(
|
||||
code_str,
|
||||
) {
|
||||
match syntax_highlight_expr(code_str) {
|
||||
Ok(highlighted_code_str) => {
|
||||
assert_eq!(highlighted_code_str, want);
|
||||
}
|
||||
@ -23,9 +19,7 @@ mod insert_doc_syntax_highlighting {
|
||||
}
|
||||
};
|
||||
} else {
|
||||
match syntax_highlight_top_level_defs(
|
||||
code_str,
|
||||
) {
|
||||
match syntax_highlight_top_level_defs(code_str) {
|
||||
Ok(highlighted_code_str) => {
|
||||
assert_eq!(highlighted_code_str, want);
|
||||
}
|
||||
|
@ -217,7 +217,12 @@ impl GridNodeMap {
|
||||
if node.is_nested() {
|
||||
let (start_pos, end_pos) = self.get_nested_start_end_pos(node_id, ed_model)?;
|
||||
|
||||
Ok((start_pos, end_pos, ed_model.mark_id_ast_id_map.get(node_id)?, node_id))
|
||||
Ok((
|
||||
start_pos,
|
||||
end_pos,
|
||||
ed_model.mark_id_ast_id_map.get(node_id)?,
|
||||
node_id,
|
||||
))
|
||||
} else {
|
||||
let (first_node_index, last_node_index) = first_last_index_of(node_id, line)?;
|
||||
|
||||
@ -232,9 +237,7 @@ impl GridNodeMap {
|
||||
|
||||
for i in (0..first_node_index).rev() {
|
||||
let prev_pos_node_id = *slice_get(i, line)?;
|
||||
let prev_ast_node_id = ed_model
|
||||
.mark_id_ast_id_map
|
||||
.get(prev_pos_node_id)?;
|
||||
let prev_ast_node_id = ed_model.mark_id_ast_id_map.get(prev_pos_node_id)?;
|
||||
|
||||
if prev_ast_node_id == curr_ast_node_id {
|
||||
if pos_extra_subtract > 0 {
|
||||
@ -253,9 +256,7 @@ impl GridNodeMap {
|
||||
|
||||
for i in last_node_index..line.len() {
|
||||
let next_pos_node_id = slice_get(i, line)?;
|
||||
let next_ast_node_id = ed_model
|
||||
.mark_id_ast_id_map
|
||||
.get(*next_pos_node_id)?;
|
||||
let next_ast_node_id = ed_model.mark_id_ast_id_map.get(*next_pos_node_id)?;
|
||||
|
||||
if next_ast_node_id == curr_ast_node_id {
|
||||
if pos_extra_add > 0 {
|
||||
@ -269,8 +270,11 @@ impl GridNodeMap {
|
||||
}
|
||||
}
|
||||
|
||||
let correct_mark_node_id =
|
||||
GridNodeMap::get_top_node_with_expr_id(curr_node_id, &ed_model.mark_node_pool, &ed_model.mark_id_ast_id_map)?;
|
||||
let correct_mark_node_id = GridNodeMap::get_top_node_with_expr_id(
|
||||
curr_node_id,
|
||||
&ed_model.mark_node_pool,
|
||||
&ed_model.mark_id_ast_id_map,
|
||||
)?;
|
||||
|
||||
Ok((
|
||||
TextPos {
|
||||
|
@ -50,9 +50,11 @@ pub fn insert_new_blank(ed_model: &mut EdModel, insert_on_line_nr: usize) -> EdR
|
||||
);
|
||||
|
||||
// find position of the previous ASTNode to figure out where to add this new Blank ASTNode
|
||||
let def_mark_node_id = ed_model
|
||||
.grid_node_map
|
||||
.get_def_mark_node_id_before_line(insert_on_line_nr, &ed_model.mark_node_pool, &ed_model.mark_id_ast_id_map)?;
|
||||
let def_mark_node_id = ed_model.grid_node_map.get_def_mark_node_id_before_line(
|
||||
insert_on_line_nr,
|
||||
&ed_model.mark_node_pool,
|
||||
&ed_model.mark_id_ast_id_map,
|
||||
)?;
|
||||
|
||||
let new_line_blank = Def2::Blank;
|
||||
let new_line_blank_id = ed_model.module.env.pool.add(new_line_blank);
|
||||
|
@ -164,7 +164,11 @@ impl<'a> EdModel<'a> {
|
||||
if let Some(parent_id) = curr_mark_node.get_parent_id_opt() {
|
||||
let parent = self.mark_node_pool.get(parent_id);
|
||||
let ast_node_id = self.mark_id_ast_id_map.get(curr_mark_node_id)?;
|
||||
Ok(parent.get_child_indices(curr_mark_node_id, ast_node_id, &self.mark_id_ast_id_map)?)
|
||||
Ok(parent.get_child_indices(
|
||||
curr_mark_node_id,
|
||||
ast_node_id,
|
||||
&self.mark_id_ast_id_map,
|
||||
)?)
|
||||
} else {
|
||||
MissingParent {
|
||||
node_id: curr_mark_node_id,
|
||||
|
@ -1003,9 +1003,7 @@ pub fn handle_new_char_expr(
|
||||
match expr_ref {
|
||||
Expr2::SmallInt { .. } => update_int(ed_model, curr_mark_node_id, ch)?,
|
||||
_ => {
|
||||
let prev_ast_node_id = ed_model
|
||||
.mark_id_ast_id_map
|
||||
.get(prev_mark_node_id)?;
|
||||
let prev_ast_node_id = ed_model.mark_id_ast_id_map.get(prev_mark_node_id)?;
|
||||
|
||||
match prev_ast_node_id {
|
||||
ASTNodeId::ADefId(_) => InputOutcome::Ignored,
|
||||
@ -1026,9 +1024,7 @@ pub fn handle_new_char_expr(
|
||||
let mark_parent_id_opt = curr_mark_node.get_parent_id_opt();
|
||||
|
||||
if let Some(mark_parent_id) = mark_parent_id_opt {
|
||||
let parent_ast_id = ed_model
|
||||
.mark_id_ast_id_map
|
||||
.get(mark_parent_id)?;
|
||||
let parent_ast_id = ed_model.mark_id_ast_id_map.get(mark_parent_id)?;
|
||||
|
||||
match parent_ast_id {
|
||||
ASTNodeId::ADefId(_) => InputOutcome::Ignored,
|
||||
@ -1046,9 +1042,7 @@ pub fn handle_new_char_expr(
|
||||
let mark_parent_id_opt = curr_mark_node.get_parent_id_opt();
|
||||
|
||||
if let Some(mark_parent_id) = mark_parent_id_opt {
|
||||
let parent_ast_id = ed_model
|
||||
.mark_id_ast_id_map
|
||||
.get(mark_parent_id)?;
|
||||
let parent_ast_id = ed_model.mark_id_ast_id_map.get(mark_parent_id)?;
|
||||
|
||||
match parent_ast_id {
|
||||
ASTNodeId::ADefId(_) => InputOutcome::Ignored,
|
||||
|
@ -58,7 +58,6 @@ pub fn add_blank_child(
|
||||
|
||||
let trip_result: EdResult<(ExprId, ExprId, MarkNodeId)> = if let Some(parent_id) = parent_id_opt
|
||||
{
|
||||
|
||||
let list_ast_node_id = ed_model.mark_id_ast_id_map.get(parent_id)?;
|
||||
let list_ast_node = ed_model.module.env.pool.get(list_ast_node_id.to_expr_id()?);
|
||||
|
||||
|
@ -46,13 +46,12 @@ pub fn build_debug_graphics(
|
||||
.with_scale(config.debug_font_size);
|
||||
|
||||
let mark_node_pool_text = glyph_brush::OwnedText::new(
|
||||
format!(
|
||||
"{}",
|
||||
ed_model.mark_node_pool.debug_string(&ed_model.mark_id_ast_id_map)
|
||||
)
|
||||
)
|
||||
.with_color(colors::to_slice(from_hsb(110, 45, 82)))
|
||||
.with_scale(config.debug_font_size);
|
||||
ed_model
|
||||
.mark_node_pool
|
||||
.debug_string(&ed_model.mark_id_ast_id_map),
|
||||
)
|
||||
.with_color(colors::to_slice(from_hsb(110, 45, 82)))
|
||||
.with_scale(config.debug_font_size);
|
||||
|
||||
let mut ast_node_text_str = "AST:\n".to_owned();
|
||||
|
||||
|
@ -1,15 +1,16 @@
|
||||
use peg::error::ParseError;
|
||||
use roc_code_markup::markup::attribute::Attributes;
|
||||
use roc_code_markup::markup::common_nodes::{new_equals_mn, new_dot_mn, new_assign_mn, new_module_name_mn_id, new_module_var_mn, if_mn, then_mn, else_mn, new_if_expr_mn};
|
||||
use roc_code_markup::slow_pool::{SlowPool, MarkNodeId};
|
||||
use roc_code_markup::{syntax_highlight::HighlightStyle};
|
||||
use roc_code_markup::markup::common_nodes::{
|
||||
else_mn, if_mn, new_assign_mn, new_dot_mn, new_equals_mn, new_if_expr_mn,
|
||||
new_module_name_mn_id, new_module_var_mn, then_mn,
|
||||
};
|
||||
use roc_code_markup::markup::nodes::MarkupNode;
|
||||
use roc_code_markup::slow_pool::{MarkNodeId, SlowPool};
|
||||
use roc_code_markup::syntax_highlight::HighlightStyle;
|
||||
|
||||
use crate::tokenizer::{Token, TokenTable, full_tokenize};
|
||||
use crate::tokenizer::{full_tokenize, Token, TokenTable};
|
||||
|
||||
|
||||
|
||||
type T = Token;
|
||||
type T = Token;
|
||||
|
||||
// Inspired by https://ziglang.org/documentation/0.7.1/#Grammar
|
||||
// license information can be found in the LEGAL_DETAILS file in
|
||||
@ -36,7 +37,7 @@ HOW TO ADD NEW RULES:
|
||||
- we finsih up by adding a test: `test_highlight_if_expr`
|
||||
```
|
||||
*/
|
||||
peg::parser!{
|
||||
peg::parser! {
|
||||
grammar highlightparser(t_table: &TokenTable, code_str: &str, mn_pool: &mut SlowPool) for [T] {
|
||||
|
||||
pub rule full_expr() -> MarkNodeId =
|
||||
@ -103,7 +104,7 @@ peg::parser!{
|
||||
mod_name_id:module_name() dot_id:dot() ident_id:lowercase_ident() {
|
||||
mn_pool.add(
|
||||
new_module_var_mn(mod_name_id, dot_id, ident_id)
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
rule module_name() -> MarkNodeId =
|
||||
@ -138,142 +139,140 @@ peg::parser!{
|
||||
|
||||
rule end_of_file() =
|
||||
![_]
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
fn merge_ids(
|
||||
mn_id: MarkNodeId,
|
||||
other_mn_id: Vec<MarkNodeId>
|
||||
) -> Vec<MarkNodeId> {
|
||||
let mut ids = vec![mn_id];
|
||||
let mut rest_ids: Vec<usize> = other_mn_id;
|
||||
fn merge_ids(mn_id: MarkNodeId, other_mn_id: Vec<MarkNodeId>) -> Vec<MarkNodeId> {
|
||||
let mut ids = vec![mn_id];
|
||||
let mut rest_ids: Vec<usize> = other_mn_id;
|
||||
|
||||
ids.append(&mut rest_ids);
|
||||
ids.append(&mut rest_ids);
|
||||
|
||||
ids
|
||||
ids
|
||||
}
|
||||
|
||||
fn flatten_tups(tup_vec: Vec<(MarkNodeId, MarkNodeId)>) -> Vec<MarkNodeId> {
|
||||
tup_vec.iter().flat_map(|(a,b)| vec![*a, *b]).collect()
|
||||
tup_vec.iter().flat_map(|(a, b)| vec![*a, *b]).collect()
|
||||
}
|
||||
|
||||
fn add_new_mn(
|
||||
text: &str,
|
||||
highlight_style: HighlightStyle,
|
||||
mark_node_pool: &mut SlowPool,
|
||||
text: &str,
|
||||
highlight_style: HighlightStyle,
|
||||
mark_node_pool: &mut SlowPool,
|
||||
) -> MarkNodeId {
|
||||
let m_node = MarkupNode::Text {
|
||||
content: text.to_owned(),
|
||||
syn_high_style: highlight_style,
|
||||
attributes: Attributes::default(),
|
||||
parent_id_opt: None,
|
||||
newlines_at_end: 0,
|
||||
};
|
||||
mark_node_pool.add(m_node)
|
||||
let m_node = MarkupNode::Text {
|
||||
content: text.to_owned(),
|
||||
syn_high_style: highlight_style,
|
||||
attributes: Attributes::default(),
|
||||
parent_id_opt: None,
|
||||
newlines_at_end: 0,
|
||||
};
|
||||
mark_node_pool.add(m_node)
|
||||
}
|
||||
|
||||
pub fn highlight_expr(code_str: &str, mark_node_pool: &mut SlowPool) -> Result<MarkNodeId, ParseError<usize>> {
|
||||
let token_table = full_tokenize(code_str);
|
||||
pub fn highlight_expr(
|
||||
code_str: &str,
|
||||
mark_node_pool: &mut SlowPool,
|
||||
) -> Result<MarkNodeId, ParseError<usize>> {
|
||||
let token_table = full_tokenize(code_str);
|
||||
|
||||
highlightparser::full_expr(&token_table.tokens, &token_table, code_str, mark_node_pool)
|
||||
highlightparser::full_expr(&token_table.tokens, &token_table, code_str, mark_node_pool)
|
||||
}
|
||||
|
||||
pub fn highlight_defs(code_str: &str, mark_node_pool: &mut SlowPool) -> Result<Vec<MarkNodeId>, ParseError<usize>> {
|
||||
let token_table = full_tokenize(code_str);
|
||||
pub fn highlight_defs(
|
||||
code_str: &str,
|
||||
mark_node_pool: &mut SlowPool,
|
||||
) -> Result<Vec<MarkNodeId>, ParseError<usize>> {
|
||||
let token_table = full_tokenize(code_str);
|
||||
|
||||
highlightparser::module_defs(&token_table.tokens, &token_table, code_str, mark_node_pool)
|
||||
highlightparser::module_defs(&token_table.tokens, &token_table, code_str, mark_node_pool)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub mod highlight_tests {
|
||||
use roc_code_markup::{slow_pool::{SlowPool}, markup::nodes::{node_to_string_w_children}};
|
||||
use roc_code_markup::{markup::nodes::node_to_string_w_children, slow_pool::SlowPool};
|
||||
|
||||
use crate::highlight_parser::{highlight_expr, highlight_defs};
|
||||
use crate::highlight_parser::{highlight_defs, highlight_expr};
|
||||
|
||||
fn test_highlight_expr(input: &str, expected_output: &str) {
|
||||
let mut mark_node_pool = SlowPool::default();
|
||||
fn test_highlight_expr(input: &str, expected_output: &str) {
|
||||
let mut mark_node_pool = SlowPool::default();
|
||||
|
||||
let mark_id = highlight_expr(input, &mut mark_node_pool).unwrap();
|
||||
let mark_id = highlight_expr(input, &mut mark_node_pool).unwrap();
|
||||
|
||||
let mut str_buffer = String::new();
|
||||
let mut str_buffer = String::new();
|
||||
|
||||
node_to_string_w_children(
|
||||
mark_id,
|
||||
&mut str_buffer,
|
||||
&mark_node_pool
|
||||
);
|
||||
node_to_string_w_children(mark_id, &mut str_buffer, &mark_node_pool);
|
||||
|
||||
assert_eq!(
|
||||
&str_buffer,
|
||||
expected_output
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_highlight() {
|
||||
test_highlight_expr("0","0");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_highlight_module_var() {
|
||||
test_highlight_expr("Foo.Bar.var","Foo.Bar.var");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_highlight_if_expr() {
|
||||
test_highlight_expr("if booly then 42 else 31415", "if booly then 42 else 31415\n")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_highlight_defs() {
|
||||
let mut mark_node_pool = SlowPool::default();
|
||||
|
||||
let mut str_buffer = String::new();
|
||||
|
||||
node_to_string_w_children(
|
||||
*highlight_defs("a = 0", &mut mark_node_pool).unwrap()
|
||||
.get(0).unwrap(),
|
||||
&mut str_buffer,
|
||||
&mark_node_pool
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
&str_buffer,
|
||||
"a = 0\n\n"
|
||||
);
|
||||
}
|
||||
|
||||
/*#[test]
|
||||
fn test_highlight_defs() {
|
||||
let mut mark_node_pool = SlowPool::default();
|
||||
|
||||
let res =
|
||||
highlight_defs(
|
||||
r#"0
|
||||
1"#,
|
||||
&mut mark_node_pool
|
||||
);
|
||||
|
||||
assert!(
|
||||
all_highlight_style(res, HighlightStyle::Number, 2, &mark_node_pool)
|
||||
);
|
||||
}
|
||||
|
||||
fn all_highlight_style(parse_res: Result<Vec<MarkNodeId>, ParseError<usize>>, highlight_style: HighlightStyle, expected_len: usize, mark_node_pool: &SlowPool) -> bool {
|
||||
let node_vec = parse_res
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(node_vec.len(), expected_len);
|
||||
|
||||
node_vec
|
||||
.iter()
|
||||
.all(|m_node| has_highlight_style(mark_node_pool.get(*m_node), highlight_style))
|
||||
}
|
||||
|
||||
fn has_highlight_style(mark_node: &MarkupNode, highlight_style: HighlightStyle) -> bool {
|
||||
match *mark_node {
|
||||
MarkupNode::Text { syn_high_style, .. } => syn_high_style == highlight_style,
|
||||
_ => false,
|
||||
assert_eq!(&str_buffer, expected_output);
|
||||
}
|
||||
}*/
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_highlight() {
|
||||
test_highlight_expr("0", "0");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_highlight_module_var() {
|
||||
test_highlight_expr("Foo.Bar.var", "Foo.Bar.var");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_highlight_if_expr() {
|
||||
test_highlight_expr(
|
||||
"if booly then 42 else 31415",
|
||||
"if booly then 42 else 31415\n",
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_highlight_defs() {
|
||||
let mut mark_node_pool = SlowPool::default();
|
||||
|
||||
let mut str_buffer = String::new();
|
||||
|
||||
node_to_string_w_children(
|
||||
*highlight_defs("a = 0", &mut mark_node_pool)
|
||||
.unwrap()
|
||||
.get(0)
|
||||
.unwrap(),
|
||||
&mut str_buffer,
|
||||
&mark_node_pool,
|
||||
);
|
||||
|
||||
assert_eq!(&str_buffer, "a = 0\n\n");
|
||||
}
|
||||
|
||||
/*#[test]
|
||||
fn test_highlight_defs() {
|
||||
let mut mark_node_pool = SlowPool::default();
|
||||
|
||||
let res =
|
||||
highlight_defs(
|
||||
r#"0
|
||||
1"#,
|
||||
&mut mark_node_pool
|
||||
);
|
||||
|
||||
assert!(
|
||||
all_highlight_style(res, HighlightStyle::Number, 2, &mark_node_pool)
|
||||
);
|
||||
}
|
||||
|
||||
fn all_highlight_style(parse_res: Result<Vec<MarkNodeId>, ParseError<usize>>, highlight_style: HighlightStyle, expected_len: usize, mark_node_pool: &SlowPool) -> bool {
|
||||
let node_vec = parse_res
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(node_vec.len(), expected_len);
|
||||
|
||||
node_vec
|
||||
.iter()
|
||||
.all(|m_node| has_highlight_style(mark_node_pool.get(*m_node), highlight_style))
|
||||
}
|
||||
|
||||
fn has_highlight_style(mark_node: &MarkupNode, highlight_style: HighlightStyle) -> bool {
|
||||
match *mark_node {
|
||||
MarkupNode::Text { syn_high_style, .. } => syn_high_style == highlight_style,
|
||||
_ => false,
|
||||
}
|
||||
}*/
|
||||
}
|
||||
|
@ -1,2 +1,2 @@
|
||||
pub mod highlight_parser;
|
||||
pub mod tokenizer;
|
||||
pub mod highlight_parser;
|
@ -1,8 +1,10 @@
|
||||
use std::cmp::Ordering;
|
||||
|
||||
#[repr(u8)]
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
/// Tokens are full of very dense information to make checking properties about them
|
||||
/// very fast.
|
||||
/// Some bits have specific meanings:
|
||||
/// Some bits have specific meanings:
|
||||
/// * 0b_001*_****: "Identifier-like" things
|
||||
/// * 0b_01**_****: "Punctuation"
|
||||
/// * 0b_0100_1***: []{}() INDENT/DEDENT
|
||||
@ -10,61 +12,61 @@
|
||||
/// * 0b_0100_1**1 ]})DEDENT
|
||||
/// * 0b_011*_**** Operators
|
||||
pub enum Token {
|
||||
LowercaseIdent = 0b_0010_0000,
|
||||
UppercaseIdent = 0b_0011_0011,
|
||||
MalformedIdent = 0b_0010_0001,
|
||||
LowercaseIdent = 0b_0010_0000,
|
||||
UppercaseIdent = 0b_0011_0011,
|
||||
MalformedIdent = 0b_0010_0001,
|
||||
|
||||
KeywordIf = 0b_0010_0010,
|
||||
KeywordThen = 0b_0010_0011,
|
||||
KeywordElse = 0b_0010_0100,
|
||||
KeywordWhen = 0b_0010_0101,
|
||||
KeywordAs = 0b_0010_0110,
|
||||
KeywordIs = 0b_0010_0111,
|
||||
KeywordExpect = 0b_0010_1000,
|
||||
KeywordApp = 0b_0010_1001,
|
||||
KeywordInterface = 0b_0010_1010,
|
||||
KeywordPackages = 0b_0010_1011,
|
||||
KeywordImports = 0b_0010_1100,
|
||||
KeywordProvides = 0b_0010_1101,
|
||||
KeywordTo = 0b_0010_1110,
|
||||
KeywordExposes = 0b_0010_1111,
|
||||
KeywordEffects = 0b_0011_0000,
|
||||
KeywordPlatform = 0b_0011_0001,
|
||||
KeywordRequires = 0b_0011_0010,
|
||||
KeywordIf = 0b_0010_0010,
|
||||
KeywordThen = 0b_0010_0011,
|
||||
KeywordElse = 0b_0010_0100,
|
||||
KeywordWhen = 0b_0010_0101,
|
||||
KeywordAs = 0b_0010_0110,
|
||||
KeywordIs = 0b_0010_0111,
|
||||
KeywordExpect = 0b_0010_1000,
|
||||
KeywordApp = 0b_0010_1001,
|
||||
KeywordInterface = 0b_0010_1010,
|
||||
KeywordPackages = 0b_0010_1011,
|
||||
KeywordImports = 0b_0010_1100,
|
||||
KeywordProvides = 0b_0010_1101,
|
||||
KeywordTo = 0b_0010_1110,
|
||||
KeywordExposes = 0b_0010_1111,
|
||||
KeywordEffects = 0b_0011_0000,
|
||||
KeywordPlatform = 0b_0011_0001,
|
||||
KeywordRequires = 0b_0011_0010,
|
||||
|
||||
Comma = 0b_0100_0000,
|
||||
Colon = 0b_0100_0001,
|
||||
Comma = 0b_0100_0000,
|
||||
Colon = 0b_0100_0001,
|
||||
|
||||
OpenParen = 0b_0100_1000,
|
||||
CloseParen = 0b_0100_1001,
|
||||
OpenCurly = 0b_0100_1010,
|
||||
CloseCurly = 0b_0100_1011,
|
||||
OpenSquare = 0b_0100_1100,
|
||||
CloseSquare = 0b_0100_1101,
|
||||
OpenIndent = 0b_0100_1110,
|
||||
CloseIndent = 0b_0100_1111,
|
||||
SameIndent = 0b_0101_0000,
|
||||
OpenParen = 0b_0100_1000,
|
||||
CloseParen = 0b_0100_1001,
|
||||
OpenCurly = 0b_0100_1010,
|
||||
CloseCurly = 0b_0100_1011,
|
||||
OpenSquare = 0b_0100_1100,
|
||||
CloseSquare = 0b_0100_1101,
|
||||
OpenIndent = 0b_0100_1110,
|
||||
CloseIndent = 0b_0100_1111,
|
||||
SameIndent = 0b_0101_0000,
|
||||
|
||||
OpPlus = 0b_0110_0000,
|
||||
OpMinus = 0b_0110_0001,
|
||||
OpSlash = 0b_0110_0010,
|
||||
OpPercent = 0b_0110_0011,
|
||||
OpCaret = 0b_0110_0100,
|
||||
OpGreaterThan = 0b_0110_0101,
|
||||
OpLessThan = 0b_0110_0110,
|
||||
OpAssignment = 0b_0110_0111,
|
||||
OpPizza = 0b_0110_1000,
|
||||
OpEquals = 0b_0110_1001,
|
||||
OpNotEquals = 0b_0110_1010,
|
||||
OpGreaterThanOrEq = 0b_0110_1011,
|
||||
OpLessThanOrEq = 0b_0110_1100,
|
||||
OpAnd = 0b_0110_1101,
|
||||
OpOr = 0b_0110_1110,
|
||||
OpDoubleSlash = 0b_0110_1111,
|
||||
OpDoublePercent = 0b_0111_0001,
|
||||
OpBackpassing = 0b_0111_1010,
|
||||
OpPlus = 0b_0110_0000,
|
||||
OpMinus = 0b_0110_0001,
|
||||
OpSlash = 0b_0110_0010,
|
||||
OpPercent = 0b_0110_0011,
|
||||
OpCaret = 0b_0110_0100,
|
||||
OpGreaterThan = 0b_0110_0101,
|
||||
OpLessThan = 0b_0110_0110,
|
||||
OpAssignment = 0b_0110_0111,
|
||||
OpPizza = 0b_0110_1000,
|
||||
OpEquals = 0b_0110_1001,
|
||||
OpNotEquals = 0b_0110_1010,
|
||||
OpGreaterThanOrEq = 0b_0110_1011,
|
||||
OpLessThanOrEq = 0b_0110_1100,
|
||||
OpAnd = 0b_0110_1101,
|
||||
OpOr = 0b_0110_1110,
|
||||
OpDoubleSlash = 0b_0110_1111,
|
||||
OpDoublePercent = 0b_0111_0001,
|
||||
OpBackpassing = 0b_0111_1010,
|
||||
|
||||
TodoNextThing = 0b_1000_0000,
|
||||
TodoNextThing = 0b_1000_0000,
|
||||
|
||||
Malformed,
|
||||
MalformedOperator,
|
||||
@ -98,6 +100,7 @@ pub struct TokenTable {
|
||||
pub lengths: Vec<usize>,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct LexState {
|
||||
indents: Vec<usize>,
|
||||
}
|
||||
@ -107,40 +110,32 @@ trait ConsumeToken {
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
struct TokenConsumer{
|
||||
token_table: TokenTable,
|
||||
struct TokenConsumer {
|
||||
token_table: TokenTable,
|
||||
}
|
||||
|
||||
impl ConsumeToken for TokenConsumer {
|
||||
fn token(&mut self, token: Token, offset: usize, length: usize){
|
||||
self.token_table.tokens.push(token);
|
||||
self.token_table.offsets.push(offset);
|
||||
self.token_table.lengths.push(length);
|
||||
}
|
||||
fn token(&mut self, token: Token, offset: usize, length: usize) {
|
||||
self.token_table.tokens.push(token);
|
||||
self.token_table.offsets.push(offset);
|
||||
self.token_table.lengths.push(length);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn tokenize(code_str: &str) -> Vec<Token> {
|
||||
full_tokenize(code_str).tokens
|
||||
full_tokenize(code_str).tokens
|
||||
}
|
||||
|
||||
pub fn full_tokenize(code_str: &str) -> TokenTable {
|
||||
let mut lex_state = LexState{ indents: Vec::new() };
|
||||
let mut consumer = TokenConsumer::default();
|
||||
let mut lex_state = LexState::default();
|
||||
let mut consumer = TokenConsumer::default();
|
||||
|
||||
consume_all_tokens(
|
||||
&mut lex_state,
|
||||
code_str.as_bytes(),
|
||||
&mut consumer
|
||||
);
|
||||
consume_all_tokens(&mut lex_state, code_str.as_bytes(), &mut consumer);
|
||||
|
||||
consumer.token_table
|
||||
consumer.token_table
|
||||
}
|
||||
|
||||
fn consume_all_tokens(
|
||||
state: &mut LexState,
|
||||
bytes: &[u8],
|
||||
consumer: &mut impl ConsumeToken,
|
||||
) {
|
||||
fn consume_all_tokens(state: &mut LexState, bytes: &[u8], consumer: &mut impl ConsumeToken) {
|
||||
let mut i = 0;
|
||||
|
||||
while i < bytes.len() {
|
||||
@ -159,36 +154,32 @@ fn consume_all_tokens(
|
||||
b'a'..=b'z' => lex_ident(false, bytes),
|
||||
b'A'..=b'Z' => lex_ident(true, bytes),
|
||||
b'0'..=b'9' => lex_number(bytes),
|
||||
b'-' | b':' | b'!' | b'.' | b'*' | b'/' | b'&' |
|
||||
b'%' | b'^' | b'+' | b'<' | b'=' | b'>' | b'|' | b'\\' => lex_operator(bytes),
|
||||
b' ' => {
|
||||
match skip_whitespace(bytes) {
|
||||
SpaceDotOrSpaces::SpacesWSpaceDot(skip) => {
|
||||
b'-' | b':' | b'!' | b'.' | b'*' | b'/' | b'&' | b'%' | b'^' | b'+' | b'<' | b'='
|
||||
| b'>' | b'|' | b'\\' => lex_operator(bytes),
|
||||
b' ' => match skip_whitespace(bytes) {
|
||||
SpaceDotOrSpaces::SpacesWSpaceDot(skip) => {
|
||||
i += skip;
|
||||
(Token::SpaceDot, 1)
|
||||
},
|
||||
SpaceDotOrSpaces::Spaces(skip) => {
|
||||
}
|
||||
SpaceDotOrSpaces::Spaces(skip) => {
|
||||
i += skip;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
},
|
||||
b'\n' => {
|
||||
// TODO: add newline to side_table
|
||||
let skip_newline_return = skip_newlines_and_comments(bytes);
|
||||
|
||||
match skip_newline_return {
|
||||
SkipNewlineReturn::SkipWIndent(skipped_lines, curr_line_indent) => {
|
||||
add_indents(skipped_lines, curr_line_indent, state, consumer, &mut i);
|
||||
continue;
|
||||
}
|
||||
SkipNewlineReturn::WSpaceDot(skipped_lines, curr_line_indent) => {
|
||||
add_indents(skipped_lines, curr_line_indent, state, consumer, &mut i);
|
||||
(Token::SpaceDot, 1)
|
||||
}
|
||||
SkipNewlineReturn::SkipWIndent(skipped_lines, curr_line_indent) => {
|
||||
add_indents(skipped_lines, curr_line_indent, state, consumer, &mut i);
|
||||
continue;
|
||||
}
|
||||
SkipNewlineReturn::WSpaceDot(skipped_lines, curr_line_indent) => {
|
||||
add_indents(skipped_lines, curr_line_indent, state, consumer, &mut i);
|
||||
(Token::SpaceDot, 1)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
b'#' => {
|
||||
// TODO: add comment to side_table
|
||||
@ -204,50 +195,53 @@ fn consume_all_tokens(
|
||||
}
|
||||
}
|
||||
|
||||
fn add_indents(skipped_lines: usize, curr_line_indent: usize, state: &mut LexState, consumer: &mut impl ConsumeToken, curr_byte_ctr: &mut usize) {
|
||||
*curr_byte_ctr += skipped_lines;
|
||||
fn add_indents(
|
||||
skipped_lines: usize,
|
||||
curr_line_indent: usize,
|
||||
state: &mut LexState,
|
||||
consumer: &mut impl ConsumeToken,
|
||||
curr_byte_ctr: &mut usize,
|
||||
) {
|
||||
*curr_byte_ctr += skipped_lines;
|
||||
|
||||
if let Some(&prev_indent) = state.indents.last() {
|
||||
if curr_line_indent > prev_indent {
|
||||
state.indents.push(curr_line_indent);
|
||||
consumer.token(Token::OpenIndent, *curr_byte_ctr, 0);
|
||||
} else {
|
||||
*curr_byte_ctr += curr_line_indent;
|
||||
if let Some(&prev_indent) = state.indents.last() {
|
||||
if curr_line_indent > prev_indent {
|
||||
state.indents.push(curr_line_indent);
|
||||
consumer.token(Token::OpenIndent, *curr_byte_ctr, 0);
|
||||
} else {
|
||||
*curr_byte_ctr += curr_line_indent;
|
||||
|
||||
if prev_indent == curr_line_indent {
|
||||
consumer.token(Token::SameIndent, *curr_byte_ctr, 0);
|
||||
} else if curr_line_indent < prev_indent {
|
||||
// safe unwrap because we check first
|
||||
while state.indents.last().is_some() && curr_line_indent < *state.indents.last().unwrap() {
|
||||
state.indents.pop();
|
||||
consumer.token(Token::CloseIndent, *curr_byte_ctr, 0);
|
||||
match prev_indent.cmp(&curr_line_indent) {
|
||||
Ordering::Equal => {
|
||||
consumer.token(Token::SameIndent, *curr_byte_ctr, 0);
|
||||
}
|
||||
Ordering::Greater => {
|
||||
// safe unwrap because we check first
|
||||
while state.indents.last().is_some()
|
||||
&& curr_line_indent < *state.indents.last().unwrap()
|
||||
{
|
||||
state.indents.pop();
|
||||
consumer.token(Token::CloseIndent, *curr_byte_ctr, 0);
|
||||
}
|
||||
}
|
||||
Ordering::Less => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
} else if curr_line_indent > 0 {
|
||||
state.indents.push(curr_line_indent);
|
||||
consumer.token(Token::OpenIndent, *curr_byte_ctr, 0);
|
||||
} else {
|
||||
consumer.token(Token::SameIndent, *curr_byte_ctr, 0);
|
||||
}
|
||||
} else if curr_line_indent > 0 {
|
||||
state.indents.push(curr_line_indent);
|
||||
consumer.token(Token::OpenIndent, *curr_byte_ctr, 0);
|
||||
} else {
|
||||
consumer.token(Token::SameIndent, *curr_byte_ctr, 0);
|
||||
}
|
||||
}
|
||||
|
||||
impl TokenTable {
|
||||
pub fn extract_str<'a>(&self, index: usize, content: &'a str) -> &'a str {
|
||||
// TODO remove unwrap
|
||||
let len = *self.lengths.get(index).unwrap();
|
||||
let offset = *self.offsets.get(index).unwrap();
|
||||
// TODO remove unwrap
|
||||
let len = *self.lengths.get(index).unwrap();
|
||||
let offset = *self.offsets.get(index).unwrap();
|
||||
|
||||
&content[offset..(offset + len)]
|
||||
}
|
||||
}
|
||||
|
||||
impl LexState {
|
||||
pub fn new() -> LexState {
|
||||
LexState {
|
||||
indents: Vec::new(),
|
||||
}
|
||||
&content[offset..(offset + len)]
|
||||
}
|
||||
}
|
||||
|
||||
@ -256,8 +250,8 @@ fn skip_comment(bytes: &[u8]) -> usize {
|
||||
while skip < bytes.len() && bytes[skip] != b'\n' {
|
||||
skip += 1;
|
||||
}
|
||||
if (skip + 1) < bytes.len() && bytes[skip] == b'\n' && bytes[skip+1] == b'#'{
|
||||
skip += 1;
|
||||
if (skip + 1) < bytes.len() && bytes[skip] == b'\n' && bytes[skip + 1] == b'#' {
|
||||
skip += 1;
|
||||
}
|
||||
|
||||
skip
|
||||
@ -267,8 +261,8 @@ fn skip_comment(bytes: &[u8]) -> usize {
|
||||
struct Indent(usize);
|
||||
|
||||
enum SpaceDotOrSpaces {
|
||||
SpacesWSpaceDot(usize),
|
||||
Spaces(usize)
|
||||
SpacesWSpaceDot(usize),
|
||||
Spaces(usize),
|
||||
}
|
||||
|
||||
fn skip_whitespace(bytes: &[u8]) -> SpaceDotOrSpaces {
|
||||
@ -280,15 +274,15 @@ fn skip_whitespace(bytes: &[u8]) -> SpaceDotOrSpaces {
|
||||
}
|
||||
|
||||
if skip < bytes.len() && bytes[skip] == b'.' {
|
||||
SpaceDotOrSpaces::SpacesWSpaceDot(skip)
|
||||
SpaceDotOrSpaces::SpacesWSpaceDot(skip)
|
||||
} else {
|
||||
SpaceDotOrSpaces::Spaces(skip)
|
||||
SpaceDotOrSpaces::Spaces(skip)
|
||||
}
|
||||
}
|
||||
|
||||
enum SkipNewlineReturn {
|
||||
SkipWIndent(usize, usize),
|
||||
WSpaceDot(usize, usize)
|
||||
SkipWIndent(usize, usize),
|
||||
WSpaceDot(usize, usize),
|
||||
}
|
||||
|
||||
// also skips lines that contain only whitespace
|
||||
@ -299,50 +293,64 @@ fn skip_newlines_and_comments(bytes: &[u8]) -> SkipNewlineReturn {
|
||||
while skip < bytes.len() && bytes[skip] == b'\n' {
|
||||
skip += indent + 1;
|
||||
|
||||
|
||||
if bytes.len() > skip {
|
||||
if bytes[skip] == b' ' {
|
||||
let space_dot_or_spaces = skip_whitespace(&bytes[skip..]);
|
||||
if bytes[skip] == b' ' {
|
||||
let space_dot_or_spaces = skip_whitespace(&bytes[skip..]);
|
||||
|
||||
match space_dot_or_spaces {
|
||||
SpaceDotOrSpaces::SpacesWSpaceDot(spaces) => {
|
||||
return SkipNewlineReturn::WSpaceDot(skip, spaces)
|
||||
}
|
||||
SpaceDotOrSpaces::Spaces(spaces) => {
|
||||
if bytes.len() > (skip + spaces) {
|
||||
if bytes[skip + spaces] == b'\n' {
|
||||
indent = 0;
|
||||
skip += spaces;
|
||||
} else if bytes[skip+spaces] == b'#' {
|
||||
let comment_skip = skip_comment(&bytes[(skip + spaces)..]);
|
||||
|
||||
indent = 0;
|
||||
skip += spaces + comment_skip;
|
||||
} else {
|
||||
indent = spaces;
|
||||
}
|
||||
} else {
|
||||
indent = spaces;
|
||||
match space_dot_or_spaces {
|
||||
SpaceDotOrSpaces::SpacesWSpaceDot(spaces) => {
|
||||
return SkipNewlineReturn::WSpaceDot(skip, spaces)
|
||||
}
|
||||
SpaceDotOrSpaces::Spaces(spaces) => {
|
||||
if bytes.len() > (skip + spaces) {
|
||||
if bytes[skip + spaces] == b'\n' {
|
||||
indent = 0;
|
||||
skip += spaces;
|
||||
} else if bytes[skip + spaces] == b'#' {
|
||||
let comment_skip = skip_comment(&bytes[(skip + spaces)..]);
|
||||
|
||||
indent = 0;
|
||||
skip += spaces + comment_skip;
|
||||
} else {
|
||||
indent = spaces;
|
||||
}
|
||||
} else {
|
||||
indent = spaces;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
while bytes[skip] == b'#' {
|
||||
let comment_skip = skip_comment(&bytes[skip..]);
|
||||
} else {
|
||||
while bytes[skip] == b'#' {
|
||||
let comment_skip = skip_comment(&bytes[skip..]);
|
||||
|
||||
indent = 0;
|
||||
skip += comment_skip;
|
||||
indent = 0;
|
||||
skip += comment_skip;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
SkipNewlineReturn::SkipWIndent(skip, indent)
|
||||
|
||||
SkipNewlineReturn::SkipWIndent(skip, indent)
|
||||
}
|
||||
|
||||
fn is_op_continue(ch: u8) -> bool {
|
||||
matches!(ch, b'-' | b':' | b'!' | b'.' | b'*' | b'/' | b'&' |
|
||||
b'%' | b'^' | b'+' | b'<' | b'=' | b'>' | b'|' | b'\\')
|
||||
matches!(
|
||||
ch,
|
||||
b'-' | b':'
|
||||
| b'!'
|
||||
| b'.'
|
||||
| b'*'
|
||||
| b'/'
|
||||
| b'&'
|
||||
| b'%'
|
||||
| b'^'
|
||||
| b'+'
|
||||
| b'<'
|
||||
| b'='
|
||||
| b'>'
|
||||
| b'|'
|
||||
| b'\\'
|
||||
)
|
||||
}
|
||||
|
||||
fn lex_operator(bytes: &[u8]) -> (Token, usize) {
|
||||
@ -429,7 +437,7 @@ fn lex_ident(uppercase: bool, bytes: &[u8]) -> (Token, usize) {
|
||||
} else {
|
||||
Token::LowercaseIdent
|
||||
}
|
||||
},
|
||||
}
|
||||
};
|
||||
(tok, i)
|
||||
}
|
||||
@ -483,94 +491,161 @@ fn lex_string(bytes: &[u8]) -> (Token, usize) {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tokenizer {
|
||||
use crate::tokenizer::tokenize;
|
||||
use super::Token;
|
||||
|
||||
use crate::tokenizer::tokenize;
|
||||
|
||||
type T = Token;
|
||||
|
||||
#[test]
|
||||
fn test_indent_tokenization_1() {
|
||||
let tokens = tokenize(r#"showBool = \b ->
|
||||
let tokens = tokenize(
|
||||
r#"showBool = \b ->
|
||||
when b is
|
||||
True ->
|
||||
"True""#);
|
||||
|
||||
assert_eq!(
|
||||
tokens,
|
||||
[T::LowercaseIdent, T::OpAssignment, T::LambdaStart, T::LowercaseIdent, T::Arrow,
|
||||
T::OpenIndent, T::KeywordWhen, T::LowercaseIdent, T::KeywordIs,
|
||||
T::OpenIndent, T::UppercaseIdent, T::Arrow,
|
||||
T::OpenIndent, T::String]
|
||||
);
|
||||
"True""#,
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
tokens,
|
||||
[
|
||||
T::LowercaseIdent,
|
||||
T::OpAssignment,
|
||||
T::LambdaStart,
|
||||
T::LowercaseIdent,
|
||||
T::Arrow,
|
||||
T::OpenIndent,
|
||||
T::KeywordWhen,
|
||||
T::LowercaseIdent,
|
||||
T::KeywordIs,
|
||||
T::OpenIndent,
|
||||
T::UppercaseIdent,
|
||||
T::Arrow,
|
||||
T::OpenIndent,
|
||||
T::String
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_indent_tokenization_2() {
|
||||
let tokens = tokenize(r#"showBool = \b ->
|
||||
let tokens = tokenize(
|
||||
r#"showBool = \b ->
|
||||
when b is
|
||||
True ->
|
||||
"True"
|
||||
"#);
|
||||
|
||||
assert_eq!(
|
||||
tokens,
|
||||
[T::LowercaseIdent, T::OpAssignment, T::LambdaStart, T::LowercaseIdent, T::Arrow,
|
||||
T::OpenIndent, T::KeywordWhen, T::LowercaseIdent, T::KeywordIs,
|
||||
T::OpenIndent, T::UppercaseIdent, T::Arrow,
|
||||
T::OpenIndent, T::String,
|
||||
T::CloseIndent, T::CloseIndent, T::CloseIndent]
|
||||
);
|
||||
"#,
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
tokens,
|
||||
[
|
||||
T::LowercaseIdent,
|
||||
T::OpAssignment,
|
||||
T::LambdaStart,
|
||||
T::LowercaseIdent,
|
||||
T::Arrow,
|
||||
T::OpenIndent,
|
||||
T::KeywordWhen,
|
||||
T::LowercaseIdent,
|
||||
T::KeywordIs,
|
||||
T::OpenIndent,
|
||||
T::UppercaseIdent,
|
||||
T::Arrow,
|
||||
T::OpenIndent,
|
||||
T::String,
|
||||
T::CloseIndent,
|
||||
T::CloseIndent,
|
||||
T::CloseIndent
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_tokenization_line_with_only_spaces() {
|
||||
let tokens = tokenize(r#"\key ->
|
||||
let tokens = tokenize(
|
||||
r#"\key ->
|
||||
when dict is
|
||||
Empty ->
|
||||
4
|
||||
|
||||
Node ->
|
||||
5"#);
|
||||
5"#,
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
tokens,
|
||||
[T::LambdaStart, T::LowercaseIdent, T::Arrow,
|
||||
T::OpenIndent, T::KeywordWhen, T::LowercaseIdent, T::KeywordIs,
|
||||
T::OpenIndent, T::UppercaseIdent, T::Arrow,
|
||||
T::OpenIndent, T::Number,
|
||||
T::CloseIndent,
|
||||
T::UppercaseIdent, T::Arrow,
|
||||
T::OpenIndent, T::Number]
|
||||
);
|
||||
assert_eq!(
|
||||
tokens,
|
||||
[
|
||||
T::LambdaStart,
|
||||
T::LowercaseIdent,
|
||||
T::Arrow,
|
||||
T::OpenIndent,
|
||||
T::KeywordWhen,
|
||||
T::LowercaseIdent,
|
||||
T::KeywordIs,
|
||||
T::OpenIndent,
|
||||
T::UppercaseIdent,
|
||||
T::Arrow,
|
||||
T::OpenIndent,
|
||||
T::Number,
|
||||
T::CloseIndent,
|
||||
T::UppercaseIdent,
|
||||
T::Arrow,
|
||||
T::OpenIndent,
|
||||
T::Number
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
#[test]
|
||||
fn test_tokenization_empty_lines_and_comments() {
|
||||
let tokens = tokenize(r#"a = 5
|
||||
let tokens = tokenize(
|
||||
r#"a = 5
|
||||
|
||||
# com1
|
||||
# com2
|
||||
b = 6"#);
|
||||
b = 6"#,
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
tokens,[T::LowercaseIdent, T::OpAssignment, T::Number,
|
||||
T::SameIndent, T::LowercaseIdent, T::OpAssignment, T::Number]);
|
||||
assert_eq!(
|
||||
tokens,
|
||||
[
|
||||
T::LowercaseIdent,
|
||||
T::OpAssignment,
|
||||
T::Number,
|
||||
T::SameIndent,
|
||||
T::LowercaseIdent,
|
||||
T::OpAssignment,
|
||||
T::Number
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
|
||||
#[test]
|
||||
fn test_tokenization_when_branch_comments() {
|
||||
let tokens = tokenize(r#"when errorCode is
|
||||
let tokens = tokenize(
|
||||
r#"when errorCode is
|
||||
# A -> Task.fail InvalidCharacter
|
||||
# B -> Task.fail IOError
|
||||
_ ->
|
||||
Task.succeed -1"#);
|
||||
Task.succeed -1"#,
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
tokens,[T::KeywordWhen, T::LowercaseIdent, T::KeywordIs,
|
||||
T::OpenIndent, T::Underscore, T::Arrow, T::OpenIndent, T::UppercaseIdent, T::Dot, T::LowercaseIdent, T::OpMinus, T::Number]);
|
||||
assert_eq!(
|
||||
tokens,
|
||||
[
|
||||
T::KeywordWhen,
|
||||
T::LowercaseIdent,
|
||||
T::KeywordIs,
|
||||
T::OpenIndent,
|
||||
T::Underscore,
|
||||
T::Arrow,
|
||||
T::OpenIndent,
|
||||
T::UppercaseIdent,
|
||||
T::Dot,
|
||||
T::LowercaseIdent,
|
||||
T::OpMinus,
|
||||
T::Number
|
||||
]
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -367,9 +367,7 @@ fn preprocess_impl(
|
||||
Some(section) => {
|
||||
let file_offset = match section.compressed_file_range() {
|
||||
Ok(
|
||||
range
|
||||
@
|
||||
CompressedFileRange {
|
||||
range @ CompressedFileRange {
|
||||
format: CompressionFormat::None,
|
||||
..
|
||||
},
|
||||
@ -494,9 +492,7 @@ fn preprocess_impl(
|
||||
for sec in text_sections {
|
||||
let (file_offset, compressed) = match sec.compressed_file_range() {
|
||||
Ok(
|
||||
range
|
||||
@
|
||||
CompressedFileRange {
|
||||
range @ CompressedFileRange {
|
||||
format: CompressionFormat::None,
|
||||
..
|
||||
},
|
||||
@ -626,9 +622,7 @@ fn preprocess_impl(
|
||||
};
|
||||
let dyn_offset = match dyn_sec.compressed_file_range() {
|
||||
Ok(
|
||||
range
|
||||
@
|
||||
CompressedFileRange {
|
||||
range @ CompressedFileRange {
|
||||
format: CompressionFormat::None,
|
||||
..
|
||||
},
|
||||
@ -714,9 +708,7 @@ fn preprocess_impl(
|
||||
};
|
||||
let symtab_offset = match symtab_sec.compressed_file_range() {
|
||||
Ok(
|
||||
range
|
||||
@
|
||||
CompressedFileRange {
|
||||
range @ CompressedFileRange {
|
||||
format: CompressionFormat::None,
|
||||
..
|
||||
},
|
||||
@ -738,9 +730,7 @@ fn preprocess_impl(
|
||||
};
|
||||
let dynsym_offset = match dynsym_sec.compressed_file_range() {
|
||||
Ok(
|
||||
range
|
||||
@
|
||||
CompressedFileRange {
|
||||
range @ CompressedFileRange {
|
||||
format: CompressionFormat::None,
|
||||
..
|
||||
},
|
||||
@ -759,9 +749,7 @@ fn preprocess_impl(
|
||||
{
|
||||
match sec.compressed_file_range() {
|
||||
Ok(
|
||||
range
|
||||
@
|
||||
CompressedFileRange {
|
||||
range @ CompressedFileRange {
|
||||
format: CompressionFormat::None,
|
||||
..
|
||||
},
|
||||
|
Loading…
Reference in New Issue
Block a user