mirror of
https://github.com/enso-org/enso.git
synced 2024-11-22 11:52:59 +03:00
Convert Array_Like_Helpers.map
to a builtin to reduce stack size (#11363)
The ultimate goal is to reduce the method calls necessary for `Vector.map`. # Important Notes - I managed to reduce the number of Java stack frames needed for each `Vector.map` call from **150** to **22** (See https://github.com/enso-org/enso/pull/11363#issuecomment-2432996902) - Introduced `Stack_Size_Spec` regression test that will ensure that Java stack frames needed for `Vector.map` method call does not exceed **40**.
This commit is contained in:
parent
9703cb1dd0
commit
701bba6504
@ -1,5 +1,5 @@
|
||||
COMP_PATH=$(dirname "$0")/../component
|
||||
|
||||
JAVA_OPTS="--add-opens=java.base/java.nio=ALL-UNNAMED -Xss16M $JAVA_OPTS"
|
||||
JAVA_OPTS="--add-opens=java.base/java.nio=ALL-UNNAMED $JAVA_OPTS"
|
||||
exec java --module-path $COMP_PATH $JAVA_OPTS -m org.enso.runner/org.enso.runner.Main "$@"
|
||||
exit
|
||||
|
@ -1,5 +1,5 @@
|
||||
@echo off
|
||||
set comp-dir=%~dp0\..\component
|
||||
set JAVA_OPTS=%JAVA_OPTS% --add-opens=java.base/java.nio=ALL-UNNAMED -Xss16M
|
||||
set JAVA_OPTS=%JAVA_OPTS% --add-opens=java.base/java.nio=ALL-UNNAMED
|
||||
java --module-path %comp-dir% -Dpolyglot.compiler.IterativePartialEscape=true %JAVA_OPTS% -m org.enso.runner/org.enso.runner.Main %*
|
||||
exit /B %errorlevel%
|
||||
|
@ -173,7 +173,7 @@ type JS_Object
|
||||
mapper = ObjectMapper.new
|
||||
new_object = mapper.createObjectNode
|
||||
keys = Vector.build builder->
|
||||
pairs.map on_problems=No_Wrap pair->
|
||||
pairs.map on_problems=No_Wrap.Value pair->
|
||||
case pair.first of
|
||||
text : Text ->
|
||||
## Ensure that any dataflow errors that could be stored in `pair.second` are propagated.
|
||||
|
@ -190,7 +190,7 @@ type Statistic
|
||||
compute_bulk : Vector -> Vector Statistic -> Vector Any
|
||||
compute_bulk data statistics:Vector=[Statistic.Count, Statistic.Sum] =
|
||||
resolved_stats = statistics.map (r-> r:Statistic)
|
||||
moment_order = resolved_stats.map on_problems=No_Wrap .order
|
||||
moment_order = resolved_stats.map on_problems=No_Wrap.Value .order
|
||||
has_min_max = resolved_stats.any (s-> s == Statistic.Minimum || s == Statistic.Maximum)
|
||||
has_product = resolved_stats.any (s-> s == Statistic.Product)
|
||||
max_moment_order = moment_order.filter (v-> v != Nothing) . fold 0 .max
|
||||
@ -202,7 +202,7 @@ type Statistic
|
||||
Error.throw (Illegal_Argument.Error ("Can only compute " + stat.to_text + " on numerical data sets."))
|
||||
|
||||
if max_moment_order > 0 && counter.moments.is_nothing then report_error resolved_stats else
|
||||
resolved_stats.map on_problems=No_Wrap statistic-> case statistic of
|
||||
resolved_stats.map on_problems=No_Wrap.Value statistic-> case statistic of
|
||||
Statistic.Covariance series -> check_if_empty counter.count <| calculate_correlation_statistics data series . covariance
|
||||
Statistic.Pearson series -> check_if_empty counter.count <| calculate_correlation_statistics data series . pearsonCorrelation
|
||||
Statistic.R_Squared series -> check_if_empty counter.count <| calculate_correlation_statistics data series . rSquared
|
||||
@ -229,7 +229,7 @@ type Statistic
|
||||
running_bulk data statistics=[Statistic.Count, Statistic.Sum] =
|
||||
resolved_stats = statistics.map (r-> r:Statistic)
|
||||
check_running_support resolved_stats <|
|
||||
moment_order = resolved_stats.map on_problems=No_Wrap .order
|
||||
moment_order = resolved_stats.map on_problems=No_Wrap.Value .order
|
||||
has_min_max = resolved_stats.any (s-> s == Statistic.Minimum || s == Statistic.Maximum)
|
||||
has_product = resolved_stats.any (s-> s == Statistic.Product)
|
||||
max_moment_order = moment_order.filter (v-> v != Nothing) . fold 0 .max
|
||||
@ -241,7 +241,7 @@ type Statistic
|
||||
data.fold counter current->value->
|
||||
result = compute_fold current value
|
||||
|
||||
row = Panic.rethrow_wrapped_if_error <| resolved_stats.map on_problems=No_Wrap s-> case s of
|
||||
row = Panic.rethrow_wrapped_if_error <| resolved_stats.map on_problems=No_Wrap.Value s-> case s of
|
||||
Statistic.Maximum -> if result.count == 0 then Nothing else result.maximum
|
||||
Statistic.Minimum -> if result.count == 0 then Nothing else result.minimum
|
||||
_ -> result.compute s
|
||||
|
@ -71,7 +71,7 @@ type Vector a
|
||||
|
||||
Vector.new my_vec.length (ix -> my_vec.at ix)
|
||||
new : Integer -> (Integer -> Any) -> Vector Any
|
||||
new length constructor = Array_Like_Helpers.vector_from_function length constructor
|
||||
new length constructor = Array_Like_Helpers.vector_from_function length constructor Problem_Behavior.Report_Error
|
||||
|
||||
## PRIVATE
|
||||
ADVANCED
|
||||
@ -700,7 +700,7 @@ type Vector a
|
||||
[1, 2, 3] . map +1
|
||||
map : (Any -> Any) -> Problem_Behavior | No_Wrap -> Vector Any
|
||||
map self function on_problems:(Problem_Behavior | No_Wrap)=..Report_Error =
|
||||
Array_Like_Helpers.map self function on_problems
|
||||
@Tail_Call Array_Like_Helpers.map self function on_problems
|
||||
|
||||
## ICON union
|
||||
Applies a function to each element of the vector, returning the `Vector`
|
||||
@ -1556,7 +1556,9 @@ type Map_Error
|
||||
|
||||
## PRIVATE
|
||||
Indicates that a method should not wrap thrown errors in `Map_Error`.
|
||||
@Builtin_Type
|
||||
type No_Wrap
|
||||
Value
|
||||
|
||||
## PRIVATE
|
||||
Wrapped_Error.from (that : Map_Error) = Wrapped_Error.Value that that.inner_error
|
||||
|
@ -510,6 +510,7 @@ type Missing_Argument
|
||||
Error.throw (Missing_Argument.Error argument_name function_name call_location)
|
||||
|
||||
## Warning when additional warnings occurred.
|
||||
@Builtin_Type
|
||||
type Additional_Warnings
|
||||
## PRIVATE
|
||||
Error (count:Integer)
|
||||
|
@ -4,6 +4,7 @@ import project.Error.Error
|
||||
import project.Warning.Warning
|
||||
|
||||
## Specifies how to handle problems.
|
||||
@Builtin_Type
|
||||
type Problem_Behavior
|
||||
## Ignore the problem and attempt to complete the operation
|
||||
Ignore
|
||||
|
@ -45,9 +45,6 @@ at array_like index = @Builtin_Method "Array_Like_Helpers.at"
|
||||
vector_to_array : (Vector | Array) -> Array
|
||||
vector_to_array array_like = @Builtin_Method "Array_Like_Helpers.vector_to_array"
|
||||
|
||||
vector_from_function_primitive : Integer -> (Integer -> Any) -> Vector Any
|
||||
vector_from_function_primitive length constructor = @Builtin_Method "Array_Like_Helpers.vector_from_function"
|
||||
|
||||
flatten : (Vector | Array) -> Vector
|
||||
flatten array_like = @Builtin_Method "Array_Like_Helpers.flatten"
|
||||
|
||||
@ -84,33 +81,7 @@ slice vector start end = @Builtin_Method "Array_Like_Helpers.slice"
|
||||
- Ignore: The result is `Nothing`, and the error is
|
||||
ignored.
|
||||
vector_from_function : Integer -> (Integer -> Any) -> Problem_Behavior | No_Wrap -> Vector Any
|
||||
vector_from_function length function on_problems:(Problem_Behavior | No_Wrap)=..Report_Error =
|
||||
num_errors = Ref.new 0
|
||||
wrapped_function i =
|
||||
result = function i
|
||||
if result.is_error.not then result else
|
||||
case on_problems of
|
||||
Problem_Behavior.Ignore ->
|
||||
Nothing
|
||||
Problem_Behavior.Report_Error ->
|
||||
result.catch_primitive caught->
|
||||
Error.throw (Map_Error.Error i caught)
|
||||
No_Wrap -> result
|
||||
Problem_Behavior.Report_Warning ->
|
||||
with_error_maybe = if num_errors.get >= MAX_MAP_WARNINGS then Nothing else
|
||||
result.catch_primitive caught->
|
||||
Warning.attach caught Nothing
|
||||
num_errors.modify (_+1)
|
||||
with_error_maybe
|
||||
results = vector_from_function_primitive length wrapped_function
|
||||
if num_errors.get <= MAX_MAP_WARNINGS then results else
|
||||
err = Additional_Warnings.Error num_errors.get-MAX_MAP_WARNINGS
|
||||
Warning.attach err results
|
||||
|
||||
## PRIVATE
|
||||
The maximum number of warnings attached to result values in
|
||||
`vector_from_function`.
|
||||
MAX_MAP_WARNINGS = 10
|
||||
vector_from_function length constructor on_problems = @Builtin_Method "Array_Like_Helpers.vector_from_function"
|
||||
|
||||
## PRIVATE
|
||||
Creates a new vector where for each range, a corresponding section of the
|
||||
@ -258,7 +229,7 @@ transpose vec_of_vecs =
|
||||
Vector.from_polyglot_array proxy
|
||||
|
||||
map vector function on_problems =
|
||||
vector_from_function vector.length (i-> function (vector.at i)) on_problems
|
||||
@Tail_Call vector_from_function vector.length (i-> function (vector.at i)) on_problems
|
||||
|
||||
map_with_index vector function on_problems =
|
||||
vector_from_function vector.length (i-> function i (vector.at i)) on_problems
|
||||
|
@ -22,6 +22,7 @@ polyglot java import java.time.LocalDate
|
||||
polyglot java import java.time.LocalDateTime
|
||||
polyglot java import java.util.function.Function
|
||||
polyglot java import java.lang.Exception as JException
|
||||
polyglot java import java.lang.RuntimeException as JRuntimeException
|
||||
polyglot java import java.lang.Thread
|
||||
polyglot java import java.lang.Thread.State
|
||||
polyglot java import java.lang.Float
|
||||
@ -39,3 +40,4 @@ CaseFoldedString=JCaseFoldedString
|
||||
Text_Utils=JText_Utils
|
||||
BreakIterator=JBreakIterator
|
||||
Exception=JException
|
||||
RuntimeException=JRuntimeException
|
||||
|
@ -165,7 +165,7 @@ type HTTP
|
||||
# Create Unified Header list
|
||||
boundary_header_list = if resolved_body.boundary.is_nothing then [] else [Header.multipart_form_data resolved_body.boundary]
|
||||
all_headers = headers + boundary_header_list
|
||||
mapped_headers = all_headers.map on_problems=No_Wrap .to_java_pair
|
||||
mapped_headers = all_headers.map on_problems=No_Wrap.Value .to_java_pair
|
||||
|
||||
response = Response.Value (EnsoSecretHelper.makeRequest (self.make_client self resolved_body.hash) builder req.uri.to_java_representation mapped_headers (cache_policy.should_use_cache req))
|
||||
if error_on_failure_code.not || response.code.is_success then response else
|
||||
|
@ -30,7 +30,7 @@ type Header
|
||||
`Header` values.
|
||||
unify_vector : Vector (Header | Pair Text Text | Vector) -> Vector Header
|
||||
unify_vector headers:Vector =
|
||||
headers . map on_problems=No_Wrap h-> case h of
|
||||
headers . map on_problems=No_Wrap.Value h-> case h of
|
||||
_ : Vector -> Header.new (h.at 0) (h.at 1)
|
||||
_ : Pair -> Header.new (h.at 0) (h.at 1)
|
||||
_ : Function -> h:Header
|
||||
|
@ -296,7 +296,7 @@ set_array value warnings = @Builtin_Method "Warning.set_array"
|
||||
map_attached_warnings_helper : (Any -> Maybe Any) -> Any -> Integer -> Any
|
||||
map_attached_warnings_helper mapper value frames_to_drop =
|
||||
warnings = Warning.get_all value
|
||||
mapped_warnings = warnings.map on_problems=No_Wrap warning->
|
||||
mapped_warnings = warnings.map on_problems=No_Wrap.Value warning->
|
||||
case mapper warning.value of
|
||||
Maybe.Some new_payload ->
|
||||
self_call_name = "Warning.map_attached_warnings_helper"
|
||||
|
@ -71,7 +71,7 @@ run_transaction_with_tables connection (tables : Vector Transactional_Table_Desc
|
||||
## PRIVATE
|
||||
private create_tables_inside_transaction connection (tables : Vector Transactional_Table_Description) (callback : Vector DB_Table -> Any) -> Any =
|
||||
connection.jdbc_connection.run_within_transaction <|
|
||||
created = tables.map on_problems=No_Wrap t-> t.create connection
|
||||
created = tables.map on_problems=No_Wrap.Value t-> t.create connection
|
||||
created.if_not_error <|
|
||||
result = callback created
|
||||
|
||||
@ -89,7 +89,7 @@ private create_tables_outside_transaction connection (tables : Vector Transactio
|
||||
Panic.throw caught_panic
|
||||
|
||||
Panic.catch Any handler=handle_panic <|
|
||||
created = tables.map on_problems=No_Wrap t->
|
||||
created = tables.map on_problems=No_Wrap.Value t->
|
||||
table = t.create connection
|
||||
# We only register a table for cleanup if it was successfully created.
|
||||
table.if_not_error <|
|
||||
|
@ -160,8 +160,8 @@ type Context
|
||||
rewrite_internal_column column =
|
||||
Internal_Column.Value column.name column.sql_type_reference (SQL_Expression.Column alias column.name)
|
||||
|
||||
new_columns = column_lists.map on_problems=No_Wrap columns->
|
||||
columns.map on_problems=No_Wrap rewrite_internal_column
|
||||
new_columns = column_lists.map on_problems=No_Wrap.Value columns->
|
||||
columns.map on_problems=No_Wrap.Value rewrite_internal_column
|
||||
|
||||
encapsulated_columns = column_lists.flat_map columns->
|
||||
columns.map column-> [column.name, column.expression]
|
||||
|
@ -34,7 +34,7 @@ check_target_table_for_update target_table ~action = case target_table of
|
||||
resolve_primary_key structure primary_key = case primary_key of
|
||||
Nothing -> Nothing
|
||||
_ : Vector -> if primary_key.is_empty then Nothing else
|
||||
validated = primary_key.map on_problems=No_Wrap key->
|
||||
validated = primary_key.map on_problems=No_Wrap.Value key->
|
||||
if key.is_a Text then key else
|
||||
Error.throw (Illegal_Argument.Error ("Primary key must be a vector of column names, instead got a " + (Meta.type_of key . to_display_text)))
|
||||
validated.if_not_error <|
|
||||
@ -74,6 +74,6 @@ check_update_arguments_structure_match source_table target_table key_columns upd
|
||||
if missing_target_key_columns.not_empty then Error.throw (Missing_Input_Columns.Error missing_target_key_columns.to_vector "the target table") else
|
||||
if (update_action != Update_Action.Insert) && key_columns.is_empty then Error.throw (Illegal_Argument.Error "For the `update_action = "+update_action.to_text+"`, the `key_columns` must be specified to define how to match the records.") else
|
||||
# Verify type matching
|
||||
problems = source_table.columns.flat_map on_problems=No_Wrap check_source_column
|
||||
problems = source_table.columns.flat_map on_problems=No_Wrap.Value check_source_column
|
||||
problems.if_not_error <|
|
||||
on_problems.attach_problems_before problems action
|
||||
|
@ -30,7 +30,7 @@ make_batched_insert_template connection table_name column_names =
|
||||
prepare_create_table_statement : Connection -> Text -> Vector Column_Description -> Vector Text -> Boolean -> Problem_Behavior -> SQL_Statement
|
||||
prepare_create_table_statement connection table_name columns primary_key temporary on_problems:Problem_Behavior =
|
||||
type_mapping = connection.dialect.get_type_mapping
|
||||
column_descriptors = columns.map on_problems=No_Wrap def->
|
||||
column_descriptors = columns.map on_problems=No_Wrap.Value def->
|
||||
sql_type = type_mapping.value_type_to_sql def.value_type on_problems
|
||||
sql_type_text = type_mapping.sql_type_to_text sql_type
|
||||
Create_Column_Descriptor.Value def.name sql_type_text def.constraints
|
||||
|
@ -31,7 +31,7 @@ take_drop_helper take_drop table selector:(Index_Sub_Range | Range | Integer) =
|
||||
row_column_name = table.make_temp_column_name
|
||||
table_with_row_number = table.add_row_number name=row_column_name from=0
|
||||
|
||||
subqueries = ranges.map on_problems=No_Wrap range->
|
||||
subqueries = ranges.map on_problems=No_Wrap.Value range->
|
||||
generate_subquery table_with_row_number row_column_name range
|
||||
combined = subqueries.reduce (a-> b-> a.union b)
|
||||
combined.remove_columns row_column_name
|
||||
|
@ -2101,7 +2101,7 @@ type Column
|
||||
map : (Any -> Any) -> Boolean -> Value_Type | Auto -> Column ! Invalid_Value_Type
|
||||
map self function skip_nothing=True expected_value_type=Auto =
|
||||
new_fn = if skip_nothing then (x-> if x.is_nothing then Nothing else function x) else function
|
||||
new_st = self.to_vector.map on_problems=No_Wrap new_fn
|
||||
new_st = self.to_vector.map on_problems=No_Wrap.Value new_fn
|
||||
Column.from_vector self.name new_st value_type=expected_value_type
|
||||
|
||||
## ALIAS combine, join by row position, merge
|
||||
@ -2149,7 +2149,7 @@ type Column
|
||||
function x y
|
||||
False -> function
|
||||
new_name = naming_helper.binary_operation_name "x" self that
|
||||
vec = self.to_vector.zip on_problems=No_Wrap that.to_vector new_fn
|
||||
vec = self.to_vector.zip on_problems=No_Wrap.Value that.to_vector new_fn
|
||||
Column.from_vector new_name vec value_type=expected_value_type
|
||||
|
||||
## GROUP Standard.Base.Metadata
|
||||
|
@ -129,7 +129,7 @@ type Data_Formatter
|
||||
vector = case formats of
|
||||
v : Vector -> v
|
||||
singleton -> [singleton]
|
||||
converted = vector.map on_problems=No_Wrap elem->
|
||||
converted = vector.map on_problems=No_Wrap.Value elem->
|
||||
## Ensure the element is a `Date_Time_Formatter` or is converted to it.
|
||||
We need to convert _each_ element - we cannot perform a 'bulk' conversion like `vector : Vector Date_Time_Formatter` because of erasure.
|
||||
checked = elem : Date_Time_Formatter
|
||||
@ -216,17 +216,17 @@ type Data_Formatter
|
||||
## PRIVATE
|
||||
make_date_parser self = self.wrap_base_parser <|
|
||||
Panic.catch JException handler=(caught_panic-> Error.throw (Illegal_Argument.Error caught_panic.payload.getMessage)) <|
|
||||
DateParser.new (self.date_formats.map on_problems=No_Wrap .get_java_formatter_for_parsing)
|
||||
DateParser.new (self.date_formats.map on_problems=No_Wrap.Value .get_java_formatter_for_parsing)
|
||||
|
||||
## PRIVATE
|
||||
make_date_time_parser self = self.wrap_base_parser <|
|
||||
Panic.catch JException handler=(caught_panic-> Error.throw (Illegal_Argument.Error caught_panic.payload.getMessage)) <|
|
||||
DateTimeParser.new (self.datetime_formats.map on_problems=No_Wrap .get_java_formatter_for_parsing)
|
||||
DateTimeParser.new (self.datetime_formats.map on_problems=No_Wrap.Value .get_java_formatter_for_parsing)
|
||||
|
||||
## PRIVATE
|
||||
make_time_of_day_parser self = self.wrap_base_parser <|
|
||||
Panic.catch JException handler=(caught_panic-> Error.throw (Illegal_Argument.Error caught_panic.payload.getMessage)) <|
|
||||
TimeOfDayParser.new (self.time_formats.map on_problems=No_Wrap .get_java_formatter_for_parsing)
|
||||
TimeOfDayParser.new (self.time_formats.map on_problems=No_Wrap.Value .get_java_formatter_for_parsing)
|
||||
|
||||
## PRIVATE
|
||||
make_identity_parser self = self.wrap_base_parser IdentityParser.new
|
||||
|
@ -67,7 +67,7 @@ prepare_aggregate_columns naming_helper group_by aggregates table error_on_missi
|
||||
assert (resolved_keys.contains Nothing . not)
|
||||
problem_builder = Problem_Builder.new error_on_missing_columns=error_on_missing_columns
|
||||
columns = if old_style then group_by else keys+aggregates
|
||||
valid_resolved_aggregate_columns = columns.map on_problems=No_Wrap (resolve_aggregate table problem_builder) . filter x-> x.is_nothing.not
|
||||
valid_resolved_aggregate_columns = columns.map on_problems=No_Wrap.Value (resolve_aggregate table problem_builder) . filter x-> x.is_nothing.not
|
||||
|
||||
# Grouping Key
|
||||
key_columns = resolved_keys.map .column
|
||||
@ -80,7 +80,7 @@ prepare_aggregate_columns naming_helper group_by aggregates table error_on_missi
|
||||
The second pass resolves the default names, ensuring that they do not
|
||||
clash with the user-specified names (ensuring that user-specified names
|
||||
take precedence).
|
||||
pass_1 = valid_resolved_aggregate_columns.map on_problems=No_Wrap c-> if c.as == "" then "" else
|
||||
pass_1 = valid_resolved_aggregate_columns.map on_problems=No_Wrap.Value c-> if c.as == "" then "" else
|
||||
# Verify if the user-provided name is valid and if not, throw an error.
|
||||
naming_helper.ensure_name_is_valid c.as <|
|
||||
unique.make_unique c.as
|
||||
|
@ -89,7 +89,7 @@ unpack_problem_summary problem_summary =
|
||||
## TODO [RW, GT] In the next iterations we will want to remove
|
||||
`translate_problem` in favour of constructing Enso problem instances
|
||||
directly in Java code. To do so, we will need https://github.com/enso-org/enso/issues/7797
|
||||
parsed = problems_array . map on_problems=No_Wrap translate_problem
|
||||
parsed = problems_array . map on_problems=No_Wrap.Value translate_problem
|
||||
if count == parsed.length then parsed else
|
||||
parsed + [Additional_Warnings.Error (count - parsed.length)]
|
||||
|
||||
|
@ -43,7 +43,7 @@ type Join_Condition_Resolver
|
||||
is_nothing column = case column of
|
||||
Nothing -> True
|
||||
_ -> False
|
||||
conditions_vector = Vector.unify_vector_or_element conditions . map on_problems=No_Wrap condition-> (condition:Join_Condition)
|
||||
conditions_vector = Vector.unify_vector_or_element conditions . map on_problems=No_Wrap.Value condition-> (condition:Join_Condition)
|
||||
handle_equals left_selector right_selector =
|
||||
left = resolve_left left_selector
|
||||
right = resolve_right right_selector
|
||||
@ -52,7 +52,7 @@ type Join_Condition_Resolver
|
||||
if left.name == right.name then
|
||||
redundant_names.append right.name
|
||||
self.make_equals problem_builder left right
|
||||
converted = conditions_vector.map on_problems=No_Wrap condition-> case condition of
|
||||
converted = conditions_vector.map on_problems=No_Wrap.Value condition-> case condition of
|
||||
Join_Condition.Equals left_selector right_selector ->
|
||||
right_resovled = if right_selector == "" then left_selector else right_selector
|
||||
handle_equals left_selector right_resovled
|
||||
|
@ -51,7 +51,7 @@ prepare_columns_for_lookup base_table lookup_table key_columns_selector add_new_
|
||||
problems_to_add = Builder.new
|
||||
key_columns.if_not_error <| lookup_table_key_columns.if_not_error <|
|
||||
key_set = Hashset.from_vector key_columns
|
||||
my_updated_columns = base_table.columns.map on_problems=No_Wrap base_column->
|
||||
my_updated_columns = base_table.columns.map on_problems=No_Wrap.Value base_column->
|
||||
base_column_name = base_column.name
|
||||
is_key = key_set.contains base_column_name
|
||||
case is_key of
|
||||
|
@ -582,5 +582,5 @@ replace_columns_with_transformed_columns : Table -> Text | Integer | Regex | Vec
|
||||
replace_columns_with_transformed_columns table selectors transformer error_on_missing_columns=True on_problems:Problem_Behavior=..Report_Warning =
|
||||
internal_columns = table.columns_helper.select_columns selectors Case_Sensitivity.Default reorder=False error_on_missing_columns=error_on_missing_columns on_problems=on_problems
|
||||
columns = internal_columns.map table.columns_helper.make_column
|
||||
new_columns = columns.map on_problems=No_Wrap transformer
|
||||
new_columns = columns.map on_problems=No_Wrap.Value transformer
|
||||
replace_columns_with_columns table columns new_columns
|
||||
|
@ -103,6 +103,6 @@ check_is_in_values operation_name values =
|
||||
Error.throw (Illegal_Argument.Error message)
|
||||
_ -> v
|
||||
case values of
|
||||
_ : Vector -> values.map on_problems=No_Wrap check_value
|
||||
_ : Array -> values.map on_problems=No_Wrap check_value
|
||||
_ : Vector -> values.map on_problems=No_Wrap.Value check_value
|
||||
_ : Array -> values.map on_problems=No_Wrap.Value check_value
|
||||
_ -> check_value values
|
||||
|
@ -273,7 +273,7 @@ check_binary_boolean_op arg1 arg2 ~action =
|
||||
- action: the action to run if the arguments are compatible.
|
||||
check_multi_argument_comparable_op column arg_or_args ~action =
|
||||
args = Vector.unify_vector_or_element arg_or_args
|
||||
checked = args.map on_problems=No_Wrap arg->
|
||||
checked = args.map on_problems=No_Wrap.Value arg->
|
||||
Value_Type.expect_comparable column arg <|
|
||||
True
|
||||
checked.if_not_error <|
|
||||
|
@ -140,7 +140,7 @@ type Table
|
||||
new columns =
|
||||
invalid_input_shape =
|
||||
Error.throw (Illegal_Argument.Error "Each column must be represented by a pair whose first element is the column name and the second element is a vector of elements that will constitute that column, or an existing column.")
|
||||
cols = columns.map on_problems=No_Wrap c->
|
||||
cols = columns.map on_problems=No_Wrap.Value c->
|
||||
case c of
|
||||
v : Vector ->
|
||||
if v.length != 2 then invalid_input_shape else
|
||||
@ -1512,7 +1512,7 @@ type Table
|
||||
False ->
|
||||
Dictionary.from_vector <| selected_columns.map c-> [c.name, True]
|
||||
|
||||
new_columns = self.columns.map on_problems=No_Wrap column-> if selected_column_names.contains_key column.name . not then column else
|
||||
new_columns = self.columns.map on_problems=No_Wrap.Value column-> if selected_column_names.contains_key column.name . not then column else
|
||||
Value_Type.expect_text column <|
|
||||
storage = column.java_column.getStorage
|
||||
new_storage = Java_Problems.with_problem_aggregator on_problems java_problem_aggregator->
|
||||
@ -2732,10 +2732,10 @@ type Table
|
||||
merge self lookup_table:Table key_columns:(Vector (Integer | Text | Regex) | Text | Integer | Regex) add_new_columns:Boolean=False allow_unmatched_rows:Boolean=True on_problems:Problem_Behavior=..Report_Warning =
|
||||
lookup_columns = Lookup_Helpers.prepare_columns_for_lookup self lookup_table key_columns add_new_columns allow_unmatched_rows on_problems
|
||||
|
||||
java_descriptions = lookup_columns.map make_java_lookup_column_description on_problems=No_Wrap
|
||||
java_descriptions = lookup_columns.map make_java_lookup_column_description on_problems=No_Wrap.Value
|
||||
|
||||
keys = lookup_columns.filter .is_key
|
||||
java_keys = keys.map on_problems=No_Wrap key_column->
|
||||
java_keys = keys.map on_problems=No_Wrap.Value key_column->
|
||||
Java_Join_Equals.new key_column.base_column.java_column key_column.lookup_column.java_column
|
||||
|
||||
handle_java_errors ~action =
|
||||
@ -3205,7 +3205,7 @@ type Table
|
||||
normalize_group_by input = case input of
|
||||
Aggregate_Column.Group_By c _ -> c
|
||||
_ : Aggregate_Column -> Error.throw (Illegal_Argument.Error "Only Aggregate_Column.Group_By can be used for cross_tab group_by clause.")
|
||||
_ : Vector -> input.map on_problems=No_Wrap normalize_group_by
|
||||
_ : Vector -> input.map on_problems=No_Wrap.Value normalize_group_by
|
||||
_ -> input
|
||||
|
||||
## validate the name and group_by columns
|
||||
@ -3217,7 +3217,7 @@ type Table
|
||||
grouping = columns_helper.select_columns_helper (normalize_group_by group_by) Case_Sensitivity.Default True problem_builder
|
||||
|
||||
## Validate the values
|
||||
resolved_values = Vector.unify_vector_or_element values . map on_problems=No_Wrap (Aggregate_Column_Helper.resolve_aggregate self problem_builder)
|
||||
resolved_values = Vector.unify_vector_or_element values . map on_problems=No_Wrap.Value (Aggregate_Column_Helper.resolve_aggregate self problem_builder)
|
||||
is_group_by c = case c of
|
||||
Aggregate_Column.Group_By _ _ -> True
|
||||
_ -> False
|
||||
|
@ -154,7 +154,7 @@ type Bench
|
||||
Duration.new seconds=total_seconds
|
||||
|
||||
## Run the specified set of benchmarks.
|
||||
run_main self =
|
||||
run_main self (filter : Text | Nothing = Nothing) =
|
||||
count = self.total_specs
|
||||
IO.println <| "Found " + count.to_text + " cases to execute (ETA " + self.estimated_runtime.to_display_text + ")"
|
||||
|
||||
@ -167,11 +167,19 @@ type Bench
|
||||
line = 'Label,Phase,"Invocations count","Average time (ms)","Time Stdev"'
|
||||
line.write f on_existing_file=Existing_File_Behavior.Backup
|
||||
|
||||
should_skip (bench_name : Text) -> Boolean =
|
||||
if filter == Nothing then False else
|
||||
bench_name.match filter . not
|
||||
|
||||
self.fold Nothing _-> g-> s->
|
||||
c = g.configuration
|
||||
bench_name = g.name + "." + s.name
|
||||
case should_skip bench_name of
|
||||
False ->
|
||||
IO.println <| "Benchmarking '" + bench_name + "' with configuration: " + c.to_text
|
||||
Bench.measure bench_name c.warmup c.measure (s.code 0)
|
||||
True ->
|
||||
IO.println <| "Skipping '" + bench_name + "' benchmark"
|
||||
|
||||
## Measure the amount of time it takes to execute a given computation.
|
||||
|
||||
|
@ -3,4 +3,3 @@ minimum-project-manager-version: 2023.2.1-nightly.2023.11.2
|
||||
jvm-options:
|
||||
- value: "-Dgraal.PrintGraph=Network"
|
||||
- value: "--add-opens=java.base/java.nio=ALL-UNNAMED"
|
||||
- value: "-Xss16M"
|
||||
|
@ -65,6 +65,27 @@ sbt:runtime-benchmarks> run -w 1 -i 1 -f 1 -jvmArgs -agentlib:jdwp=transport=dt_
|
||||
This command will run the `importStandardLibraries` benchmark in fork waiting
|
||||
for the debugger to attach.
|
||||
|
||||
### Dumping the compilation info of the benchmark
|
||||
|
||||
The following command enables the compilation tracing output from the Truffle
|
||||
compiler:
|
||||
|
||||
```
|
||||
sbt:runtime-benchmarks> run -jvmArgs -Dpolyglot.engine.TraceCompilation=true org.enso.interpreter.bench.benchmarks.semantic.IfVsCaseBenchmarks.ifBench6In
|
||||
```
|
||||
|
||||
The output will contain lines like:
|
||||
|
||||
```
|
||||
[error] [engine] opt done id=1067 ifBench6In.My_Type.Value |Tier 2|Time 22( 18+4 )ms|AST 1|Inlined 0Y 0N|IR 17/ 20|CodeSize 186|Addr 0x7acf0380f280|Timestamp 96474787822678|Src n/a
|
||||
```
|
||||
|
||||
You can, e.g., dump Graal graphs with:
|
||||
|
||||
```
|
||||
sbt:runtime-benchmarks> run -jvmArgs -Dgraal.Dump=Truffle:2 org.enso.interpreter.bench.benchmarks.semantic.IfVsCaseBenchmarks.ifBench6In
|
||||
```
|
||||
|
||||
## Standard library benchmarks
|
||||
|
||||
Unlike the Engine micro benchmarks, these benchmarks are written entirely in
|
||||
|
@ -4,6 +4,7 @@ import java.util.concurrent.TimeUnit;
|
||||
import java.util.function.Function;
|
||||
import org.enso.common.MethodNames.Module;
|
||||
import org.enso.compiler.benchmarks.Utils;
|
||||
import org.graalvm.polyglot.Context;
|
||||
import org.graalvm.polyglot.Value;
|
||||
import org.openjdk.jmh.annotations.Benchmark;
|
||||
import org.openjdk.jmh.annotations.BenchmarkMode;
|
||||
@ -14,6 +15,7 @@ import org.openjdk.jmh.annotations.OutputTimeUnit;
|
||||
import org.openjdk.jmh.annotations.Scope;
|
||||
import org.openjdk.jmh.annotations.Setup;
|
||||
import org.openjdk.jmh.annotations.State;
|
||||
import org.openjdk.jmh.annotations.TearDown;
|
||||
import org.openjdk.jmh.annotations.Warmup;
|
||||
import org.openjdk.jmh.infra.BenchmarkParams;
|
||||
import org.openjdk.jmh.infra.Blackhole;
|
||||
@ -28,10 +30,11 @@ public class TypePatternBenchmarks {
|
||||
private Value patternMatch;
|
||||
private Value avg;
|
||||
private Value vec;
|
||||
private Context ctx;
|
||||
|
||||
@Setup
|
||||
public void initializeBenchmark(BenchmarkParams params) throws Exception {
|
||||
var ctx = Utils.createDefaultContextBuilder().build();
|
||||
ctx = Utils.createDefaultContextBuilder().build();
|
||||
var code =
|
||||
"""
|
||||
from Standard.Base import Integer, Vector, Any, Float
|
||||
@ -76,6 +79,11 @@ public class TypePatternBenchmarks {
|
||||
this.avg = getMethod.apply("avg_pattern");
|
||||
}
|
||||
|
||||
@TearDown
|
||||
public void tearDown() {
|
||||
ctx.close();
|
||||
}
|
||||
|
||||
/**
|
||||
* Adding @ExplodeLoop in {@link
|
||||
* org.enso.interpreter.node.controlflow.caseexpr.CatchTypeBranchNode} specialization decreases
|
||||
|
@ -9,6 +9,7 @@ import java.net.URI;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import org.enso.interpreter.runtime.data.Type;
|
||||
import org.enso.interpreter.test.ValuesGenerator.Language;
|
||||
import org.enso.test.utils.ContextUtils;
|
||||
import org.graalvm.polyglot.Context;
|
||||
@ -259,6 +260,11 @@ public class MetaIsATest {
|
||||
if (v.equals(generator().typeAny())) {
|
||||
continue;
|
||||
}
|
||||
var unwrappedV = ContextUtils.unwrapValue(ctx, v);
|
||||
if (unwrappedV instanceof Type type && type.isEigenType()) {
|
||||
// Skip singleton types
|
||||
continue;
|
||||
}
|
||||
var r = isACheck.execute(v, v);
|
||||
if (v.equals(generator().typeNothing())) {
|
||||
assertTrue("Nothing is instance of itself", r.asBoolean());
|
||||
|
@ -16,6 +16,7 @@ import java.util.HashSet;
|
||||
import java.util.LinkedHashSet;
|
||||
import java.util.Set;
|
||||
import org.enso.common.MethodNames;
|
||||
import org.enso.interpreter.runtime.data.Type;
|
||||
import org.enso.interpreter.runtime.type.ConstantsGen;
|
||||
import org.enso.interpreter.test.ValuesGenerator.Language;
|
||||
import org.enso.test.utils.ContextUtils;
|
||||
@ -383,6 +384,11 @@ main = Nothing
|
||||
// skip Nothing
|
||||
continue;
|
||||
}
|
||||
var type = (Type) ContextUtils.unwrapValue(ctx, typ);
|
||||
if (type.isEigenType()) {
|
||||
// Skip singleton types
|
||||
continue;
|
||||
}
|
||||
|
||||
var simpleName = sn.execute(typ).asString();
|
||||
var metaName = typ.getMetaSimpleName() + ".type";
|
||||
@ -410,6 +416,11 @@ main = Nothing
|
||||
if (t.isNull()) {
|
||||
continue;
|
||||
}
|
||||
var type = (Type) ContextUtils.unwrapValue(ctx, t);
|
||||
if (type.isEigenType()) {
|
||||
// Skip checking singleton types
|
||||
continue;
|
||||
}
|
||||
switch (t.getMetaSimpleName()) {
|
||||
// represented as primitive values without meta object
|
||||
case "Float" -> {}
|
||||
@ -432,6 +443,17 @@ main = Nothing
|
||||
|
||||
@FunctionalInterface
|
||||
interface Check {
|
||||
|
||||
/**
|
||||
* @param v Instance of the type
|
||||
* @param type Type. Nullable.
|
||||
* @param expecting Set of types that are tested. The check should remove the currently tested
|
||||
* type from this set.
|
||||
* @param successfullyRemoved Set of types that already were tested. The check should add the
|
||||
* currently tested type to this set.
|
||||
* @param w StringBuilder for the error message that will be printed at the end in case of a
|
||||
* failure.
|
||||
*/
|
||||
void check(
|
||||
Value v, Value type, Set<Value> expecting, Set<Value> successfullyRemoved, StringBuilder w);
|
||||
}
|
||||
|
@ -47,6 +47,11 @@ public final class ValuesGenerator {
|
||||
this.languages = languages;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param type Either an atom or a type.
|
||||
* @param check An executable that checks if the value is of the given type. Takes a single
|
||||
* parameter.
|
||||
*/
|
||||
private record ValueInfo(Value type, Value check) {}
|
||||
|
||||
public static ValuesGenerator create(Context ctx, Language... langs) {
|
||||
@ -61,6 +66,13 @@ public final class ValuesGenerator {
|
||||
return v(key, prelude, typeOrValue, key != null ? typeOrValue : null);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param key Used as a key in {@link #values}. If the key is already there, nothing is created.
|
||||
* Nullable.
|
||||
* @param prelude Additional code inserted before {@code typeOrValue} expression, like imports.
|
||||
* @param typeOrValue An expression that results in an atom or a type.
|
||||
* @param typeCheck If not null, this is used as a type name and the value is checked against it.
|
||||
*/
|
||||
private ValueInfo v(String key, String prelude, String typeOrValue, String typeCheck) {
|
||||
if (key == null) {
|
||||
key = typeOrValue;
|
||||
@ -302,6 +314,23 @@ public final class ValuesGenerator {
|
||||
.type();
|
||||
}
|
||||
|
||||
public Value typeNoWrap() {
|
||||
return v("typeNoWrap", """
|
||||
import Standard.Base.Data.Vector.No_Wrap
|
||||
""", "No_Wrap")
|
||||
.type();
|
||||
}
|
||||
|
||||
public Value typeProblemBehavior() {
|
||||
return v(
|
||||
"typeProblemBehavior",
|
||||
"""
|
||||
import Standard.Base.Errors.Problem_Behavior.Problem_Behavior
|
||||
""",
|
||||
"Problem_Behavior")
|
||||
.type();
|
||||
}
|
||||
|
||||
public Value typeMap() {
|
||||
return v(
|
||||
"typeMap",
|
||||
@ -823,8 +852,25 @@ public final class ValuesGenerator {
|
||||
collect.add(typeNothing());
|
||||
}
|
||||
|
||||
if (languages.contains(Language.JAVA)) {}
|
||||
return collect;
|
||||
}
|
||||
|
||||
public List<Value> problemBehaviors() {
|
||||
var collect = new ArrayList<Value>();
|
||||
if (languages.contains(Language.ENSO)) {
|
||||
var prelude = "import Standard.Base.Errors.Problem_Behavior.Problem_Behavior";
|
||||
collect.add(v(null, prelude, "Problem_Behavior.Report_Error").type());
|
||||
collect.add(v(null, prelude, "Problem_Behavior.Ignore").type());
|
||||
}
|
||||
return collect;
|
||||
}
|
||||
|
||||
public List<Value> noWrap() {
|
||||
var collect = new ArrayList<Value>();
|
||||
if (languages.contains(Language.ENSO)) {
|
||||
var prelude = "import Standard.Base.Data.Vector.No_Wrap";
|
||||
collect.add(v(null, prelude, "No_Wrap.Value").type());
|
||||
}
|
||||
return collect;
|
||||
}
|
||||
|
||||
|
@ -0,0 +1,19 @@
|
||||
package org.enso.interpreter.node.expression.builtin.error;
|
||||
|
||||
import java.util.List;
|
||||
import org.enso.interpreter.dsl.BuiltinType;
|
||||
import org.enso.interpreter.node.expression.builtin.UniquelyConstructibleBuiltin;
|
||||
|
||||
@BuiltinType
|
||||
public class AdditionalWarnings extends UniquelyConstructibleBuiltin {
|
||||
|
||||
@Override
|
||||
protected String getConstructorName() {
|
||||
return "Error";
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<String> getConstructorParamNames() {
|
||||
return List.of("count");
|
||||
}
|
||||
}
|
@ -0,0 +1,19 @@
|
||||
package org.enso.interpreter.node.expression.builtin.error;
|
||||
|
||||
import java.util.List;
|
||||
import org.enso.interpreter.dsl.BuiltinType;
|
||||
import org.enso.interpreter.node.expression.builtin.UniquelyConstructibleBuiltin;
|
||||
|
||||
@BuiltinType(name = "Standard.Base.Data.Vector.No_Wrap")
|
||||
public class NoWrap extends UniquelyConstructibleBuiltin {
|
||||
|
||||
@Override
|
||||
protected String getConstructorName() {
|
||||
return "Value";
|
||||
}
|
||||
|
||||
@Override
|
||||
protected List<String> getConstructorParamNames() {
|
||||
return List.of();
|
||||
}
|
||||
}
|
@ -0,0 +1,27 @@
|
||||
package org.enso.interpreter.node.expression.builtin.error;
|
||||
|
||||
import java.util.List;
|
||||
import org.enso.interpreter.dsl.BuiltinType;
|
||||
import org.enso.interpreter.node.expression.builtin.Builtin;
|
||||
import org.enso.interpreter.runtime.data.atom.AtomConstructor;
|
||||
|
||||
@BuiltinType(name = "Standard.Base.Errors.Problem_Behavior.Problem_Behavior")
|
||||
public class ProblemBehavior extends Builtin {
|
||||
|
||||
@Override
|
||||
protected List<Cons> getDeclaredConstructors() {
|
||||
return List.of(new Cons("Ignore"), new Cons("Report_Warning"), new Cons("Report_Error"));
|
||||
}
|
||||
|
||||
public AtomConstructor getIgnore() {
|
||||
return getConstructors()[0];
|
||||
}
|
||||
|
||||
public AtomConstructor getReportWarning() {
|
||||
return getConstructors()[1];
|
||||
}
|
||||
|
||||
public AtomConstructor getReportError() {
|
||||
return getConstructors()[2];
|
||||
}
|
||||
}
|
@ -32,7 +32,10 @@ import org.enso.interpreter.node.expression.builtin.BuiltinRootNode;
|
||||
import org.enso.interpreter.node.expression.builtin.Nothing;
|
||||
import org.enso.interpreter.node.expression.builtin.Polyglot;
|
||||
import org.enso.interpreter.node.expression.builtin.debug.Debug;
|
||||
import org.enso.interpreter.node.expression.builtin.error.AdditionalWarnings;
|
||||
import org.enso.interpreter.node.expression.builtin.error.CaughtPanic;
|
||||
import org.enso.interpreter.node.expression.builtin.error.NoWrap;
|
||||
import org.enso.interpreter.node.expression.builtin.error.ProblemBehavior;
|
||||
import org.enso.interpreter.node.expression.builtin.error.Warning;
|
||||
import org.enso.interpreter.node.expression.builtin.immutable.Vector;
|
||||
import org.enso.interpreter.node.expression.builtin.io.File;
|
||||
@ -117,6 +120,9 @@ public final class Builtins {
|
||||
private final Builtin timeOfDay;
|
||||
private final Builtin timeZone;
|
||||
private final Builtin warning;
|
||||
private final NoWrap noWrap;
|
||||
private final ProblemBehavior problemBehavior;
|
||||
private final AdditionalWarnings additionalWarnings;
|
||||
|
||||
/**
|
||||
* Creates an instance with builtin methods installed.
|
||||
@ -168,6 +174,9 @@ public final class Builtins {
|
||||
timeOfDay = builtins.get(org.enso.interpreter.node.expression.builtin.date.TimeOfDay.class);
|
||||
timeZone = builtins.get(org.enso.interpreter.node.expression.builtin.date.TimeZone.class);
|
||||
warning = builtins.get(Warning.class);
|
||||
noWrap = getBuiltinType(NoWrap.class);
|
||||
problemBehavior = getBuiltinType(ProblemBehavior.class);
|
||||
additionalWarnings = getBuiltinType(AdditionalWarnings.class);
|
||||
|
||||
error = new Error(this, context);
|
||||
system = new System(this);
|
||||
@ -602,6 +611,21 @@ public final class Builtins {
|
||||
return warning.getType();
|
||||
}
|
||||
|
||||
/** Returns the {@code Problem_Behavior} type. */
|
||||
public ProblemBehavior problemBehavior() {
|
||||
return problemBehavior;
|
||||
}
|
||||
|
||||
/** Returns the {@code No_Wrap} atom constructor. */
|
||||
public NoWrap noWrap() {
|
||||
return noWrap;
|
||||
}
|
||||
|
||||
/** Returns the {@code Additional_Warnings} atom constructor. */
|
||||
public AdditionalWarnings additionalWarnings() {
|
||||
return additionalWarnings;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the {@code File} atom constructor.
|
||||
*
|
||||
|
@ -1,18 +1,11 @@
|
||||
package org.enso.interpreter.runtime.data.vector;
|
||||
|
||||
import com.oracle.truffle.api.CompilerDirectives;
|
||||
import com.oracle.truffle.api.dsl.Cached;
|
||||
import com.oracle.truffle.api.frame.VirtualFrame;
|
||||
import com.oracle.truffle.api.interop.InteropLibrary;
|
||||
import com.oracle.truffle.api.library.CachedLibrary;
|
||||
import java.nio.ByteBuffer;
|
||||
import org.enso.interpreter.dsl.Builtin;
|
||||
import org.enso.interpreter.node.callable.dispatch.InvokeFunctionNode;
|
||||
import org.enso.interpreter.runtime.callable.function.Function;
|
||||
import org.enso.interpreter.runtime.data.EnsoObject;
|
||||
import org.enso.interpreter.runtime.error.DataflowError;
|
||||
import org.enso.interpreter.runtime.state.State;
|
||||
import org.enso.interpreter.runtime.warning.WarningsLibrary;
|
||||
|
||||
/** Publicly available operations on array-like classes. */
|
||||
@Builtin(pkg = "immutable", stdlibName = "Standard.Base.Internal.Array_Like_Helpers")
|
||||
@ -75,31 +68,6 @@ public final class ArrayLikeHelpers {
|
||||
return Array.allocate(size);
|
||||
}
|
||||
|
||||
@Builtin.Method(
|
||||
name = "vector_from_function",
|
||||
description = "Creates new Vector with given length and provided elements.",
|
||||
autoRegister = false)
|
||||
@Builtin.Specialize()
|
||||
@SuppressWarnings("generic-enso-builtin-type")
|
||||
public static Object vectorFromFunction(
|
||||
VirtualFrame frame,
|
||||
long length,
|
||||
Function fun,
|
||||
State state,
|
||||
@Cached("buildWithArity(1)") InvokeFunctionNode invokeFunctionNode,
|
||||
@CachedLibrary(limit = "3") WarningsLibrary warnings) {
|
||||
var len = Math.toIntExact(length);
|
||||
var target = ArrayBuilder.newBuilder(len);
|
||||
for (int i = 0; i < len; i++) {
|
||||
var value = invokeFunctionNode.execute(fun, frame, state, new Long[] {(long) i});
|
||||
if (value instanceof DataflowError) {
|
||||
return value;
|
||||
}
|
||||
target.add(value, warnings);
|
||||
}
|
||||
return target.asVector(true);
|
||||
}
|
||||
|
||||
@Builtin.Method(
|
||||
name = "vector_to_array",
|
||||
description = "Returns an Array representation of this Vector.")
|
||||
|
@ -0,0 +1,162 @@
|
||||
package org.enso.interpreter.runtime.data.vector;
|
||||
|
||||
import com.oracle.truffle.api.CompilerDirectives.TruffleBoundary;
|
||||
import com.oracle.truffle.api.dsl.Cached;
|
||||
import com.oracle.truffle.api.dsl.Specialization;
|
||||
import com.oracle.truffle.api.frame.VirtualFrame;
|
||||
import com.oracle.truffle.api.library.CachedLibrary;
|
||||
import com.oracle.truffle.api.nodes.Node;
|
||||
import com.oracle.truffle.api.profiles.BranchProfile;
|
||||
import com.oracle.truffle.api.profiles.LoopConditionProfile;
|
||||
import org.enso.interpreter.dsl.BuiltinMethod;
|
||||
import org.enso.interpreter.node.callable.dispatch.InvokeFunctionNode;
|
||||
import org.enso.interpreter.runtime.EnsoContext;
|
||||
import org.enso.interpreter.runtime.callable.function.Function;
|
||||
import org.enso.interpreter.runtime.data.atom.Atom;
|
||||
import org.enso.interpreter.runtime.data.atom.AtomConstructor;
|
||||
import org.enso.interpreter.runtime.error.DataflowError;
|
||||
import org.enso.interpreter.runtime.error.PanicException;
|
||||
import org.enso.interpreter.runtime.state.HasContextEnabledNode;
|
||||
import org.enso.interpreter.runtime.state.State;
|
||||
import org.enso.interpreter.runtime.warning.AppendWarningNode;
|
||||
import org.enso.interpreter.runtime.warning.Warning;
|
||||
import org.enso.interpreter.runtime.warning.WarningsLibrary;
|
||||
|
||||
@BuiltinMethod(
|
||||
type = "Array_Like_Helpers",
|
||||
name = "vector_from_function",
|
||||
description = "Creates a vector from a function.")
|
||||
public abstract class VectorFromFunctionNode extends Node {
|
||||
public static VectorFromFunctionNode build() {
|
||||
return VectorFromFunctionNodeGen.create();
|
||||
}
|
||||
|
||||
private static final int MAX_MAP_WARNINGS = 10;
|
||||
|
||||
/**
|
||||
* @param length Length of the vector to create.
|
||||
* @param func Callback function called with index as argument.
|
||||
* @param onProblems Can be either an atom of type {@code Problem_Behavior} or {@code No_Wrap}
|
||||
* type.
|
||||
* @return Vector constructed from the given function.
|
||||
*/
|
||||
abstract Object execute(
|
||||
VirtualFrame frame, State state, long length, Function func, Object onProblems);
|
||||
|
||||
@Specialization(
|
||||
guards = "getCtor(onProblemsAtom) == onProblemsAtomCtorCached",
|
||||
limit = "onProblemsCtorsCount()")
|
||||
Object doItCached(
|
||||
VirtualFrame frame,
|
||||
State state,
|
||||
long length,
|
||||
Function func,
|
||||
Atom onProblemsAtom,
|
||||
@Cached("getCtor(onProblemsAtom)") AtomConstructor onProblemsAtomCtorCached,
|
||||
@Cached("processOnProblemsArg(onProblemsAtomCtorCached)") OnProblems onProblems,
|
||||
@Cached("buildWithArity(1)") InvokeFunctionNode invokeFunctionNode,
|
||||
@Cached("build()") AppendWarningNode appendWarningNode,
|
||||
@CachedLibrary(limit = "3") WarningsLibrary warnsLib,
|
||||
@Cached BranchProfile errorEncounteredProfile,
|
||||
@Cached HasContextEnabledNode hasContextEnabledNode,
|
||||
@Cached LoopConditionProfile loopConditionProfile) {
|
||||
var ctx = EnsoContext.get(this);
|
||||
var len = (int) length;
|
||||
var nothing = ctx.getNothing();
|
||||
var target = ArrayBuilder.newBuilder(len);
|
||||
var errorsEncountered = 0;
|
||||
loopConditionProfile.profileCounted(len);
|
||||
for (int i = 0; loopConditionProfile.inject(i < len); i++) {
|
||||
var value = invokeFunctionNode.execute(func, frame, state, new Long[] {(long) i});
|
||||
Object valueToAdd = value;
|
||||
if (value instanceof DataflowError err) {
|
||||
errorEncounteredProfile.enter();
|
||||
switch (onProblems) {
|
||||
case IGNORE -> valueToAdd = nothing;
|
||||
case REPORT_ERROR -> {
|
||||
var mapErr = ctx.getBuiltins().error().makeMapError(i, err.getPayload());
|
||||
return DataflowError.withDefaultTrace(state, mapErr, this, hasContextEnabledNode);
|
||||
}
|
||||
case REPORT_WARNING -> {
|
||||
errorsEncountered++;
|
||||
if (errorsEncountered > MAX_MAP_WARNINGS) {
|
||||
valueToAdd = nothing;
|
||||
} else {
|
||||
var wrappedInWarn =
|
||||
Warning.attach(ctx, nothing, err.getPayload(), null, appendWarningNode);
|
||||
valueToAdd = wrappedInWarn;
|
||||
}
|
||||
}
|
||||
case NO_WRAP -> {
|
||||
return err;
|
||||
}
|
||||
}
|
||||
}
|
||||
target.add(valueToAdd, warnsLib);
|
||||
}
|
||||
var vector = target.asVector(true);
|
||||
if (errorsEncountered >= MAX_MAP_WARNINGS) {
|
||||
var additionalWarnsBuiltin = ctx.getBuiltins().additionalWarnings();
|
||||
long additionalWarnsCnt = errorsEncountered - MAX_MAP_WARNINGS;
|
||||
var additionalWarns = additionalWarnsBuiltin.newInstance(additionalWarnsCnt);
|
||||
var vecWithAdditionalWarns =
|
||||
Warning.attach(ctx, vector, additionalWarns, null, appendWarningNode);
|
||||
return vecWithAdditionalWarns;
|
||||
} else {
|
||||
return vector;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Unreachable: The {@code doItCached} specialization has the same limit of instantiations as
|
||||
* there are possible onProblems arguments. So this specialization is only reached if {@code
|
||||
* onProblems} argument is an unexpected type.
|
||||
*
|
||||
* @return Just throws Type_Error dataflow error.
|
||||
*/
|
||||
@Specialization(replaces = "doItCached")
|
||||
Object unreachable(
|
||||
VirtualFrame frame, State state, long length, Function func, Object onProblems) {
|
||||
var problemBehaviorBuiltin = EnsoContext.get(this).getBuiltins().problemBehavior();
|
||||
throw makeTypeError(problemBehaviorBuiltin.getType(), onProblems, "onProblems");
|
||||
}
|
||||
|
||||
protected OnProblems processOnProblemsArg(AtomConstructor onProblems) {
|
||||
var ctx = EnsoContext.get(this);
|
||||
var problemBehaviorBuiltin = ctx.getBuiltins().problemBehavior();
|
||||
var noWrapBuiltin = ctx.getBuiltins().noWrap();
|
||||
if (onProblems == problemBehaviorBuiltin.getIgnore()) {
|
||||
return OnProblems.IGNORE;
|
||||
} else if (onProblems == problemBehaviorBuiltin.getReportError()) {
|
||||
return OnProblems.REPORT_ERROR;
|
||||
} else if (onProblems == problemBehaviorBuiltin.getReportWarning()) {
|
||||
return OnProblems.REPORT_WARNING;
|
||||
} else if (onProblems == noWrapBuiltin.getUniqueConstructor()) {
|
||||
return OnProblems.NO_WRAP;
|
||||
}
|
||||
throw makeTypeError(problemBehaviorBuiltin.getType(), onProblems, "onProblems");
|
||||
}
|
||||
|
||||
protected static AtomConstructor getCtor(Atom atom) {
|
||||
return atom.getConstructor();
|
||||
}
|
||||
|
||||
protected static int onProblemsCtorsCount() {
|
||||
return OnProblems.values().length;
|
||||
}
|
||||
|
||||
@TruffleBoundary
|
||||
private PanicException makeTypeError(Object expected, Object actual, String name) {
|
||||
var ctx = EnsoContext.get(this);
|
||||
var typeError = ctx.getBuiltins().error().makeTypeError(expected, actual, name);
|
||||
return new PanicException(typeError, this);
|
||||
}
|
||||
|
||||
/** All the possible values for the {@code onProblems} argument. */
|
||||
protected enum OnProblems {
|
||||
IGNORE,
|
||||
REPORT_ERROR,
|
||||
REPORT_WARNING,
|
||||
NO_WRAP
|
||||
}
|
||||
}
|
@ -3,6 +3,7 @@ package org.enso.interpreter.runtime.state;
|
||||
import com.oracle.truffle.api.CompilerDirectives;
|
||||
import com.oracle.truffle.api.dsl.Cached;
|
||||
import com.oracle.truffle.api.dsl.GenerateUncached;
|
||||
import com.oracle.truffle.api.dsl.NeverDefault;
|
||||
import com.oracle.truffle.api.dsl.Specialization;
|
||||
import com.oracle.truffle.api.nodes.Node;
|
||||
import org.enso.interpreter.runtime.EnsoContext;
|
||||
@ -18,6 +19,7 @@ public abstract class HasContextEnabledNode extends Node {
|
||||
return HasContextEnabledNodeGen.getUncached();
|
||||
}
|
||||
|
||||
@NeverDefault
|
||||
public static HasContextEnabledNode create() {
|
||||
return HasContextEnabledNodeGen.create();
|
||||
}
|
||||
|
@ -4,6 +4,7 @@ import com.oracle.truffle.api.CompilerDirectives;
|
||||
import com.oracle.truffle.api.dsl.Cached;
|
||||
import com.oracle.truffle.api.dsl.Cached.Shared;
|
||||
import com.oracle.truffle.api.dsl.GenerateUncached;
|
||||
import com.oracle.truffle.api.dsl.NeverDefault;
|
||||
import com.oracle.truffle.api.dsl.Specialization;
|
||||
import com.oracle.truffle.api.frame.VirtualFrame;
|
||||
import com.oracle.truffle.api.interop.InteropLibrary;
|
||||
@ -24,6 +25,7 @@ import org.enso.interpreter.runtime.error.DataflowError;
|
||||
@GenerateUncached
|
||||
public abstract class AppendWarningNode extends Node {
|
||||
|
||||
@NeverDefault
|
||||
public static AppendWarningNode build() {
|
||||
return AppendWarningNodeGen.create();
|
||||
}
|
||||
|
@ -389,7 +389,7 @@ type_spec suite_builder name alter = suite_builder.group name group_builder->
|
||||
|
||||
group_builder.specify "should allow map on_problems=No_Wrap, returning a new vector" <|
|
||||
vec = alter [1, 2, 3, 4]
|
||||
mapped = vec.map on_problems=No_Wrap x-> x * x
|
||||
mapped = vec.map on_problems=No_Wrap.Value x-> x * x
|
||||
vec.to_text.should_equal "[1, 2, 3, 4]"
|
||||
mapped.to_text.should_equal "[1, 4, 9, 16]"
|
||||
|
||||
@ -801,11 +801,11 @@ type_spec suite_builder name alter = suite_builder.group name group_builder->
|
||||
alter [] . zip [4, 5, 6] (+) . should_equal []
|
||||
|
||||
group_builder.specify "should zip elements with zip on_problems=No_Wrap" <|
|
||||
alter [1, 2, 3] . zip on_problems=No_Wrap [] (+) . should_equal []
|
||||
alter [1, 2, 3] . zip on_problems=No_Wrap [4] (+) . should_equal [5]
|
||||
alter [1, 2, 3] . zip on_problems=No_Wrap [4, 5, 6] (+) . should_equal [5, 7, 9]
|
||||
alter [1, 2, 3] . zip on_problems=No_Wrap [4, 5, 6, 7] (+) . should_equal [5, 7, 9]
|
||||
alter [] . zip on_problems=No_Wrap [4, 5, 6] (+) . should_equal []
|
||||
alter [1, 2, 3] . zip on_problems=No_Wrap.Value [] (+) . should_equal []
|
||||
alter [1, 2, 3] . zip on_problems=No_Wrap.Value [4] (+) . should_equal [5]
|
||||
alter [1, 2, 3] . zip on_problems=No_Wrap.Value [4, 5, 6] (+) . should_equal [5, 7, 9]
|
||||
alter [1, 2, 3] . zip on_problems=No_Wrap.Value [4, 5, 6, 7] (+) . should_equal [5, 7, 9]
|
||||
alter [] . zip on_problems=No_Wrap.Value [4, 5, 6] (+) . should_equal []
|
||||
|
||||
group_builder.specify "should flat_map elements" <|
|
||||
alter [1, 2, 3] . flat_map (_ -> []) . should_equal []
|
||||
@ -815,11 +815,11 @@ type_spec suite_builder name alter = suite_builder.group name group_builder->
|
||||
alter [0, 0, 0] . flat_map (i -> [i]) . should_equal [0, 0, 0]
|
||||
|
||||
group_builder.specify "should flat_map elements with flat_map on_problems=No_Wrap" <|
|
||||
alter [1, 2, 3] . flat_map on_problems=No_Wrap (_ -> []) . should_equal []
|
||||
alter [1, 2, 3] . flat_map on_problems=No_Wrap (_ -> [0, 1]) . should_equal [0, 1, 0, 1, 0, 1]
|
||||
alter [1, 2, 3] . flat_map on_problems=No_Wrap (_ -> [0, [1]]) . should_equal [0, [1], 0, [1], 0, [1]]
|
||||
alter [0, 1, 0] . flat_map on_problems=No_Wrap (i -> if i == 1 then [1, 1] else [i]) . should_equal [0, 1, 1, 0]
|
||||
alter [0, 0, 0] . flat_map on_problems=No_Wrap (i -> [i]) . should_equal [0, 0, 0]
|
||||
alter [1, 2, 3] . flat_map on_problems=No_Wrap.Value (_ -> []) . should_equal []
|
||||
alter [1, 2, 3] . flat_map on_problems=No_Wrap.Value (_ -> [0, 1]) . should_equal [0, 1, 0, 1, 0, 1]
|
||||
alter [1, 2, 3] . flat_map on_problems=No_Wrap.Value (_ -> [0, [1]]) . should_equal [0, [1], 0, [1], 0, [1]]
|
||||
alter [0, 1, 0] . flat_map on_problems=No_Wrap.Value (i -> if i == 1 then [1, 1] else [i]) . should_equal [0, 1, 1, 0]
|
||||
alter [0, 0, 0] . flat_map on_problems=No_Wrap.Value (i -> [i]) . should_equal [0, 0, 0]
|
||||
|
||||
group_builder.specify "should allow inserting elements" <|
|
||||
alter [2, 3] . insert . should_equal [2, 3, Nothing]
|
||||
@ -910,11 +910,11 @@ type_spec suite_builder name alter = suite_builder.group name group_builder->
|
||||
alter [10, 20, 30, 40] . map map_fun . should_fail_with (Map_Error.Error 2 (My_Error.Error 30)) unwrap_errors=False
|
||||
|
||||
group_builder.specify "an error thrown inside map on_problems=No_Wrap should be caught as a My_Error" <|
|
||||
alter [10, 20, 30, 40] . map on_problems=No_Wrap map_fun . should_fail_with My_Error
|
||||
alter [10, 20, 30, 40] . map on_problems=No_Wrap.Value map_fun . should_fail_with My_Error
|
||||
|
||||
group_builder.specify "an error thrown inside map_with_index on_problems=No_Wrap should be caught as a My_Error" <|
|
||||
map_with_index_fun _ a = if a == 30 then Error.throw (My_Error.Error a) else a+1
|
||||
alter [10, 20, 30, 40] . map_with_index on_problems=No_Wrap map_with_index_fun . should_fail_with My_Error
|
||||
alter [10, 20, 30, 40] . map_with_index on_problems=No_Wrap.Value map_with_index_fun . should_fail_with My_Error
|
||||
|
||||
group_builder.specify "an error thrown inside map and caught (without error parameter) should be caught as a Map_Error" <|
|
||||
alter [10, 20, 30, 40] . map map_fun . catch . should_equal (Map_Error.Error 2 (My_Error.Error 30))
|
||||
@ -972,16 +972,16 @@ type_spec suite_builder name alter = suite_builder.group name group_builder->
|
||||
Warning.get_all result wrap_errors=True . map .value . should_equal expected_warnings
|
||||
|
||||
group_builder.specify "map on_problems=No_Wrap does not do error wrapping" <|
|
||||
alter [10, 20, 30, 40] . map on_problems=No_Wrap map_fun . catch . should_equal (My_Error.Error 30)
|
||||
alter [10, 20, 30, 40] . map on_problems=No_Wrap.Value map_fun . catch . should_equal (My_Error.Error 30)
|
||||
|
||||
group_builder.specify "zip on_problems=No_Wrap does not do error wrapping" <|
|
||||
zip_fun a _ = if a == 30 then Error.throw (My_Error.Error a) else a+1
|
||||
arr = alter [10, 20, 30, 40]
|
||||
arr . zip on_problems=No_Wrap arr zip_fun . catch . should_equal (My_Error.Error 30)
|
||||
arr . zip on_problems=No_Wrap.Value arr zip_fun . catch . should_equal (My_Error.Error 30)
|
||||
|
||||
group_builder.specify "flat_map on_problems=No_Wrap does not do error wrapping" <|
|
||||
vec = alter [1, 2, 0, 3]
|
||||
vec.flat_map on_problems=No_Wrap (n-> Error.throw (My_Error.Error n)) . catch . should_equal (My_Error.Error 1)
|
||||
vec.flat_map on_problems=No_Wrap.Value (n-> Error.throw (My_Error.Error n)) . catch . should_equal (My_Error.Error 1)
|
||||
|
||||
group_builder.specify "can transpose a vector of vectors" <|
|
||||
mat = alter [alter [0, 1, 2], alter [3, 4, 5], alter [6, 7, 8]]
|
||||
|
@ -78,6 +78,7 @@ import project.Runtime.Missing_Required_Arguments_Spec
|
||||
import project.Runtime.Ref_Spec
|
||||
import project.Runtime.State_Spec
|
||||
import project.Runtime.Stack_Traces_Spec
|
||||
import project.Runtime.Stack_Size_Spec
|
||||
|
||||
import project.System.Environment_Spec
|
||||
import project.System.File_Spec
|
||||
@ -160,6 +161,7 @@ main filter=Nothing =
|
||||
Missing_Required_Arguments_Spec.add_specs suite_builder
|
||||
Lazy_Generator_Spec.add_specs suite_builder
|
||||
Stack_Traces_Spec.add_specs suite_builder
|
||||
Stack_Size_Spec.add_specs suite_builder
|
||||
Text_Spec.add_specs suite_builder
|
||||
Time_Spec.add_specs suite_builder
|
||||
URI_Spec.add_specs suite_builder
|
||||
|
137
test/Base_Tests/src/Runtime/Stack_Size_Spec.enso
Normal file
137
test/Base_Tests/src/Runtime/Stack_Size_Spec.enso
Normal file
@ -0,0 +1,137 @@
|
||||
# Tests regression of the overall stack trace size when calling nested
|
||||
# `Vector.map`. It is tested by invoking a subprocess on a generated code
|
||||
# that contains `n` nested `Vector.map` calls.
|
||||
# The subprocess has Truffle compiler disabled with `-Dpolyglot.engine.Compiler=false`
|
||||
# to ensure there are no (Java) stack frames dropped. Moreover, we
|
||||
# set explicitly `-XX:MaxJavaStackTraceDepth=...` for the subprocess to overcome
|
||||
# the default length (1024) of `RuntimeException.getStackTrace` which is too low.
|
||||
#
|
||||
# The test runs two subprocesses with different nesting and computes the
|
||||
# difference of Java stack sizes. This difference must not exceed certain limit.
|
||||
|
||||
|
||||
private
|
||||
|
||||
from Standard.Base import all
|
||||
import Standard.Base.Runtime.Ref.Ref
|
||||
import Standard.Base.System.Process.Process_Builder.Process_Result
|
||||
|
||||
from Standard.Test import all
|
||||
|
||||
|
||||
## Find the Enso binary under the `built-distribution` directory
|
||||
enso_bin -> File =
|
||||
find_prefix dir prefix =
|
||||
vec = dir.list name_filter=prefix+"*"
|
||||
if vec.length == 1 then vec.at 0 else
|
||||
msg = "Cannot find " + prefix + "* in " + dir.to_text + '\n'
|
||||
err = dir.list.fold msg t-> f->
|
||||
t + f.to_text + '\n'
|
||||
Panic.throw err
|
||||
|
||||
project_root = File.new enso_project.root.to_text
|
||||
repository_root = project_root . parent . parent
|
||||
built_distribution = find_prefix repository_root "built-distribution"
|
||||
enso_engine = find_prefix built_distribution "enso-engine-"
|
||||
enso = find_prefix enso_engine "enso-"
|
||||
bin = find_prefix enso "bin"
|
||||
|
||||
exe = File.new bin / if Platform.os == Platform.OS.Windows then "enso.bat" else "enso"
|
||||
|
||||
if exe.is_regular_file.not then Panic.throw "Cannot find "+exe.to_text
|
||||
|
||||
exe
|
||||
|
||||
## Generates code for mapping over a vector with the given nesting level.
|
||||
Returns code of the main method that is meant to be pasted into a separate module.
|
||||
The code prints the count of Java frames to stdout in the deepest `Vector.map` call.
|
||||
|
||||
Example of the code is (for nesting_level 2):
|
||||
```
|
||||
main =
|
||||
vec = [[42]]
|
||||
vec.map e0->
|
||||
e0.map e1->
|
||||
cnt = RuntimeException.new.getStackTrace.length
|
||||
IO.println 'java_stack_frames='+cnt.to_text
|
||||
```
|
||||
|
||||
Arguments:
|
||||
- nesting_level How many times should the vector be nested
|
||||
generate_code nesting_level:Integer -> Text =
|
||||
bldr = Vector.Builder.new
|
||||
bldr.append "from Standard.Base import all"
|
||||
bldr.append "import Standard.Base.Internal.Extra_Imports.RuntimeException"
|
||||
bldr.append '\n'
|
||||
bldr.append <| "main = "
|
||||
bldr.append <| " "
|
||||
+ "vec = "
|
||||
+ ("[" * nesting_level)
|
||||
+ "42"
|
||||
+ ("]" * nesting_level)
|
||||
bldr.append <| " "
|
||||
+ "vec.map e0->"
|
||||
0.up_to (nesting_level - 1) . each \i ->
|
||||
bldr.append <| (" " * (i + 2))
|
||||
+ "e"
|
||||
+ i.to_text
|
||||
+ ".map e"
|
||||
+ (i + 1).to_text
|
||||
+ "-> "
|
||||
bldr.append <| (" " * (nesting_level + 1))
|
||||
+ "cnt = RuntimeException.new.getStackTrace.length"
|
||||
bldr.append <| (" " * (nesting_level + 1))
|
||||
+ "IO.println <| 'java_stack_frames=' + cnt.to_text"
|
||||
+ '\n'
|
||||
bldr.to_vector.reduce \first_line:Text second_line:Text ->
|
||||
first_line + '\n' + second_line
|
||||
|
||||
|
||||
## Runs Enso subprocess with disabled Truffle compiler, with
|
||||
larger thread stack and also with larger stack trace element collected
|
||||
(which is needed for `new RuntimeException().getStackTrace().length`)
|
||||
as this value is by default set only to 1024.
|
||||
|
||||
The thread stack size is also set to a sufficiently large value
|
||||
to ensure there is no StackOverflow.
|
||||
run_without_compiler enso_args:Vector -> Process_Result =
|
||||
java_opts = "-Dpolyglot.engine.Compilation=false "
|
||||
+ "-XX:MaxJavaStackTraceDepth=18000 "
|
||||
+ "-Xms16M"
|
||||
args = ["JAVA_OPTS="+java_opts, enso_bin.path] + enso_args
|
||||
Process.run "env" (args + enso_args)
|
||||
|
||||
|
||||
## Runs enso as a subprocess with the specified nesting level of `Vector.map` calls.
|
||||
Returns count of Java stack frames from the deepest `Vector.map` call.
|
||||
|
||||
Arguments:
|
||||
- nesting Level of nesting of `Vector.map` method calls.
|
||||
run nesting:Integer -> Integer =
|
||||
tmp_file = File.create_temporary_file suffix=".enso"
|
||||
code = generate_code nesting
|
||||
code.write tmp_file
|
||||
proc_res = run_without_compiler ["--run", tmp_file.path]
|
||||
# FInd and parse a specific line from the process stdout
|
||||
j_frames_line = proc_res.stdout.split '\n' . find \line ->
|
||||
line.contains "java_stack_frames"
|
||||
j_frames_line.split '=' . last . parse_integer
|
||||
|
||||
|
||||
only_on_linux = if System.os == "linux" then Nothing else "Test runs only on Linux"
|
||||
|
||||
|
||||
add_specs suite_builder =
|
||||
suite_builder.group "Stack size" pending=only_on_linux \group_builder ->
|
||||
group_builder.specify "Java stack size of nested Vector.map should be kept reasonably low" <|
|
||||
nesting_10 = run 10
|
||||
nesting_11 = run 11
|
||||
stack_size = nesting_11 - nesting_10
|
||||
Test.with_clue ("Stack size of `Vector.map` should be low, but was " + stack_size.to_text + " ") <|
|
||||
(stack_size < 40) . should_be_true
|
||||
|
||||
|
||||
main filter=Nothing =
|
||||
suite = Test.build \suite_builder ->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter filter
|
@ -86,4 +86,5 @@ collect_benches = Bench.build builder->
|
||||
State.run Number 0 <| random_vec.each stateful_fun
|
||||
|
||||
|
||||
main = collect_benches . run_main
|
||||
main filter=Nothing =
|
||||
collect_benches . run_main filter
|
||||
|
Loading…
Reference in New Issue
Block a user