mirror of
https://github.com/enso-org/enso.git
synced 2024-11-23 08:08:34 +03:00
Use Vector.from_polyglot_array to make Vectors backed by polyglot arrays (#3628)
Use Proxy_Polyglot_Array as a proxy for polyglot arrays, thus unifying the way the underlying array is accessed in Vector. Used the opportunity to cleanup builtin lookup, which now actually respects what is defined in the body of @Builtin_Method annotation. Also discovered that polyglot null values (in JS, Python and R) were leaking to Enso. Fixed that by doing explicit translation to `Nothing`. https://www.pivotaltracker.com/story/show/181123986
This commit is contained in:
parent
8c504bd4b0
commit
4b9c91626e
@ -177,6 +177,8 @@
|
||||
type from `Standard.Table`.][3601]
|
||||
- [Created `Index_Sub_Range` type and updated `Text.take` and
|
||||
`Text.drop`.][3617]
|
||||
- [Added `Vector.from_polyglot_array` to make `Vector`s backed by polyglot
|
||||
Arrays][3628]
|
||||
- [Updated `Vector.take` and `Vector.drop` and removed their obsolete
|
||||
counterparts.][3629]
|
||||
- [Short-hand syntax for `order_by` added.][3643]
|
||||
@ -282,6 +284,7 @@
|
||||
[3593]: https://github.com/enso-org/enso/pull/3593
|
||||
[3601]: https://github.com/enso-org/enso/pull/3601
|
||||
[3617]: https://github.com/enso-org/enso/pull/3617
|
||||
[3628]: https://github.com/enso-org/enso/pull/3628
|
||||
[3629]: https://github.com/enso-org/enso/pull/3629
|
||||
[3643]: https://github.com/enso-org/enso/pull/3643
|
||||
[3644]: https://github.com/enso-org/enso/pull/3644
|
||||
@ -357,7 +360,8 @@
|
||||
[3531]: https://github.com/enso-org/enso/pull/3531
|
||||
[3562]: https://github.com/enso-org/enso/pull/3562
|
||||
[3538]: https://github.com/enso-org/enso/pull/3538
|
||||
[3538]: https://github.com/enso-org/enso/pull/3569
|
||||
[3569]: https://github.com/enso-org/enso/pull/3569
|
||||
[3578]: https://github.com/enso-org/enso/pull/3578
|
||||
[3618]: https://github.com/enso-org/enso/pull/3618
|
||||
[3608]: https://github.com/enso-org/enso/pull/3608
|
||||
[3608]: https://github.com/enso-org/enso/pull/3608
|
||||
|
@ -545,7 +545,7 @@ type Decimal
|
||||
|
||||
4.736.ceil
|
||||
ceil : Integer
|
||||
ceil self = @Builtin_Method "Integer.ceil"
|
||||
ceil self = @Builtin_Method "Decimal.ceil"
|
||||
|
||||
## Compares the two operands to determine the ordering of this with
|
||||
respect to that.
|
||||
|
@ -207,7 +207,7 @@ calculate_correlation_statistics_matrix : [Vector] -> [CorrelationStatistics]
|
||||
calculate_correlation_statistics_matrix data =
|
||||
data_array = Vector.new data.length i->(data.at i).to_array . to_array
|
||||
stats_array = wrap_java_call <| CorrelationStatistics.computeMatrix data_array
|
||||
Vector.new stats_array.length i->(Vector.Vector (stats_array.at i))
|
||||
Vector.new stats_array.length i->(Vector.from_polyglot_array (stats_array.at i))
|
||||
|
||||
|
||||
## Compute a single statistic on the vector.
|
||||
@ -248,4 +248,4 @@ rank_data input method=Rank_Method.Average =
|
||||
|
||||
handle_classcast <| handle_nullpointer <|
|
||||
java_ranks = Rank.rank input.to_array Comparator.new java_method
|
||||
Vector.Vector java_ranks
|
||||
Vector.from_polyglot_array java_ranks
|
||||
|
@ -9,7 +9,7 @@ polyglot java import org.enso.base.Text_Utils
|
||||
all_character_sets : Vector.Vector Text
|
||||
all_character_sets =
|
||||
java_array = Charset.availableCharsets.keySet.toArray
|
||||
Vector.Vector java_array
|
||||
Vector.from_polyglot_array java_array
|
||||
|
||||
## Get all available Encodings.
|
||||
all_encodings : Vector Encoding
|
||||
|
@ -364,7 +364,7 @@ Text.split : Text -> (Text_Matcher | Regex_Matcher) -> Vector.Vector Text
|
||||
Text.split self delimiter="," matcher=Text_Matcher = if delimiter.is_empty then Error.throw (Illegal_Argument_Error "The delimiter cannot be empty.") else
|
||||
case matcher of
|
||||
Text_Matcher case_sensitivity ->
|
||||
delimiters = Vector.Vector <| case case_sensitivity of
|
||||
delimiters = Vector.from_polyglot_array <| case case_sensitivity of
|
||||
True ->
|
||||
Text_Utils.span_of_all self delimiter
|
||||
Case_Insensitive locale ->
|
||||
@ -552,7 +552,7 @@ Text.words self keep_whitespace=False =
|
||||
'\na\nb\n'.lines keep_endings=True == ['\n', 'a\n', 'b\n']
|
||||
Text.lines : Boolean -> Vector.Vector Text
|
||||
Text.lines self keep_endings=False =
|
||||
Vector.Vector (Text_Utils.split_on_lines self keep_endings)
|
||||
Vector.from_polyglot_array (Text_Utils.split_on_lines self keep_endings)
|
||||
|
||||
## Checks whether `self` is equal to `that`, ignoring the case of the texts.
|
||||
|
||||
@ -739,7 +739,7 @@ Text.is_whitespace self =
|
||||
Text.bytes : Encoding -> Problem_Behavior -> Vector.Vector Byte
|
||||
Text.bytes self encoding on_problems=Report_Warning =
|
||||
result = Encoding_Utils.get_bytes self (encoding . to_java_charset)
|
||||
vector = Vector.Vector result.result
|
||||
vector = Vector.from_polyglot_array result.result
|
||||
if result.warnings.is_nothing then vector else
|
||||
on_problems.attach_problems_after vector [Encoding_Error result.warnings]
|
||||
|
||||
@ -817,7 +817,7 @@ Text.from_utf_8 bytes on_problems=Report_Warning =
|
||||
|
||||
"Hello".char_vector
|
||||
Text.char_vector : Vector.Vector Integer
|
||||
Text.char_vector self = Vector.Vector (Text_Utils.get_chars self)
|
||||
Text.char_vector self = Vector.from_polyglot_array (Text_Utils.get_chars self)
|
||||
|
||||
## Takes a vector of characters and returns the text that results from it.
|
||||
|
||||
@ -840,7 +840,7 @@ Text.from_char_vector chars = Text_Utils.from_chars chars.to_array
|
||||
|
||||
"Hello".codepoints
|
||||
Text.codepoints : Vector.Vector Integer
|
||||
Text.codepoints self = Vector.Vector (Text_Utils.get_codepoints self)
|
||||
Text.codepoints self = Vector.from_polyglot_array (Text_Utils.get_codepoints self)
|
||||
|
||||
## Takes an array of numbers and returns the text resulting from interpreting it
|
||||
as a sequence of Unicode codepoints.
|
||||
@ -1481,8 +1481,8 @@ Text.location_of_all : Text -> Matcher -> [Span]
|
||||
Text.location_of_all self term="" matcher=Text_Matcher = case matcher of
|
||||
Text_Matcher case_sensitive -> if term.is_empty then Vector.new (self.length + 1) (ix -> Span (Range ix ix) self) else case case_sensitive of
|
||||
True ->
|
||||
codepoint_spans = Vector.Vector <| Text_Utils.span_of_all self term
|
||||
grahpeme_ixes = Vector.Vector <| Text_Utils.utf16_indices_to_grapheme_indices self (codepoint_spans.map .codeunit_start).to_array
|
||||
codepoint_spans = Vector.from_polyglot_array <| Text_Utils.span_of_all self term
|
||||
grahpeme_ixes = Vector.from_polyglot_array <| Text_Utils.utf16_indices_to_grapheme_indices self (codepoint_spans.map .codeunit_start).to_array
|
||||
## While the codepoint_spans may have different code unit lengths
|
||||
from our term, the `length` counted in grapheme clusters is
|
||||
guaranteed to be the same.
|
||||
@ -1491,7 +1491,7 @@ Text.location_of_all self term="" matcher=Text_Matcher = case matcher of
|
||||
end = start+offset
|
||||
Span (Range start end) self
|
||||
Case_Insensitive locale ->
|
||||
grapheme_spans = Vector.Vector <| Text_Utils.span_of_all_case_insensitive self term locale.java_locale
|
||||
grapheme_spans = Vector.from_polyglot_array <| Text_Utils.span_of_all_case_insensitive self term locale.java_locale
|
||||
grapheme_spans.map grapheme_span->
|
||||
Span (Range grapheme_span.grapheme_start grapheme_span.grapheme_end) self
|
||||
Regex_Matcher _ _ _ _ _ ->
|
||||
|
@ -477,7 +477,7 @@ type Pattern
|
||||
Mode_Error "Splitting on a bounded region is not well-defined."
|
||||
|
||||
splits = self.internal_pattern.split input limit
|
||||
Vector.Vector splits
|
||||
Vector.from_polyglot_array splits
|
||||
|
||||
## ADVANCED
|
||||
|
||||
@ -683,7 +683,7 @@ type Match
|
||||
matcg.named_groups default="UNMATCHED"
|
||||
named_groups : (a : Any) -> Map Text (Text | a)
|
||||
named_groups self default=Nothing =
|
||||
group_names = Vector.Vector <|
|
||||
group_names = Vector.from_polyglot_array <|
|
||||
Regex_Utils.get_group_names self.internal_match.pattern
|
||||
pairs = group_names.map name->
|
||||
value = case self.group name of
|
||||
|
@ -2,6 +2,7 @@ from Standard.Base import all
|
||||
import Standard.Base.Runtime.Ref
|
||||
import Standard.Base.Runtime.Unsafe
|
||||
from Standard.Base.Data.Index_Sub_Range import While, By_Index, Sample, Every
|
||||
import Standard.Base.Polyglot.Proxy_Polyglot_Array
|
||||
import Standard.Base.Random
|
||||
|
||||
## Creates a new vector of the given length, initializing elements using
|
||||
@ -86,15 +87,8 @@ new_builder (capacity=1) = Builder.new capacity
|
||||
|
||||
A vector allows to store an arbitrary number of elements in linear memory. It
|
||||
is the recommended data structure for most applications.
|
||||
|
||||
! Value Copying
|
||||
As Enso vectors implement immutable semantics, this constructor function
|
||||
makes a copy of each value in the argument array.
|
||||
|
||||
If this didn't happen then it would be possible for the underlying array to
|
||||
be mutated under the hood, and sneak mutability into our immutable data.
|
||||
from_array : Any -> Vector Any
|
||||
from_array arr = new (Polyglot.get_array_size arr) (arr.at _)
|
||||
from_polyglot_array : Any -> Vector Any
|
||||
from_polyglot_array arr = Vector (Proxy_Polyglot_Array.Proxy_Polyglot_Array arr)
|
||||
|
||||
## The basic, immutable, vector type.
|
||||
type Vector
|
||||
@ -104,7 +98,7 @@ type Vector
|
||||
The basic, immutable, vector type.
|
||||
|
||||
Arguments:
|
||||
- to_array: The underlying array.
|
||||
- storage: The underlying storage.
|
||||
|
||||
A vector allows to store an arbitrary number of elements, in linear memory.
|
||||
It is the recommended data structure for most applications.
|
||||
@ -117,7 +111,18 @@ type Vector
|
||||
A vector containing 50 elements, each being the number `42`, can be
|
||||
created by:
|
||||
Vector.fill length=50 item=42
|
||||
type Vector to_array
|
||||
type Vector storage
|
||||
|
||||
to_array self =
|
||||
arr = self.storage.to_array
|
||||
case arr of
|
||||
Array ->
|
||||
arr
|
||||
_ ->
|
||||
len = self.storage.length
|
||||
a = Array.new len
|
||||
Array.copy arr 0 a 0 len
|
||||
a
|
||||
|
||||
## Returns the number of elements stored in this vector.
|
||||
|
||||
@ -126,7 +131,8 @@ type Vector
|
||||
|
||||
[1, 2, 3, 4].length
|
||||
length : Number
|
||||
length self = Polyglot.get_array_size self.to_array
|
||||
length self =
|
||||
self.storage.length
|
||||
|
||||
## Gets an element from the vector at a specified index (0-based).
|
||||
|
||||
@ -159,7 +165,7 @@ type Vector
|
||||
bounds or with additional error handling.
|
||||
unsafe_at : Integer -> Any
|
||||
unsafe_at self index =
|
||||
self.to_array.at index
|
||||
self.storage.at index
|
||||
|
||||
## Combines all the elements of the vector, by iteratively applying the
|
||||
passed function with next elements of the vector.
|
||||
@ -179,8 +185,7 @@ type Vector
|
||||
[0, 1, 2] . fold 0 (+)
|
||||
fold : Any -> (Any -> Any -> Any) -> Any
|
||||
fold self init function =
|
||||
arr = self.to_array
|
||||
f = acc -> ix -> function acc (arr.at ix)
|
||||
f = acc -> ix -> function acc (self.storage.at ix)
|
||||
0.up_to self.length . fold init f
|
||||
|
||||
## Combines all the elements of the vector, by iteratively applying the
|
||||
@ -194,11 +199,10 @@ type Vector
|
||||
> Example
|
||||
Compute the sum of all of the elements and indexes in a vector.
|
||||
|
||||
[0, 1, 2] . fold 0 (s->i->e->s+i+e)
|
||||
[0, 1, 2] . fold_with_index 0 (s->i->e->s+i+e)
|
||||
fold_with_index : Any -> (Any -> Integer -> Any -> Any) -> Any
|
||||
fold_with_index self init function =
|
||||
arr = self.to_array
|
||||
f = acc -> ix -> function acc ix (arr.at ix)
|
||||
f = acc -> ix -> function acc ix (self.storage.at ix)
|
||||
0.up_to self.length . fold init f
|
||||
|
||||
## Combines all the elements of a non-empty vector using a binary operation.
|
||||
@ -216,8 +220,7 @@ type Vector
|
||||
reduce self function =
|
||||
case self.not_empty of
|
||||
True -> if self.length == 1 then self.unsafe_at 0 else
|
||||
arr = self.to_array
|
||||
f = acc -> ix -> function acc (arr.at ix)
|
||||
f = acc -> ix -> function acc (self.storage.at ix)
|
||||
1.up_to self.length . fold (self.unsafe_at 0) f
|
||||
False -> Error.throw Empty_Error
|
||||
|
||||
|
@ -28,7 +28,7 @@ type Response
|
||||
example_headers = Examples.get_response.headers
|
||||
headers : Vector.Vector
|
||||
headers self =
|
||||
header_entries = Vector.Vector (Http_Utils.get_headers self.internal_http_response.headers)
|
||||
header_entries = Vector.from_polyglot_array (Http_Utils.get_headers self.internal_http_response.headers)
|
||||
header_entries.map e-> Header.new e.getKey e.getValue
|
||||
|
||||
## Get the response body.
|
||||
@ -41,7 +41,7 @@ type Response
|
||||
|
||||
example_body = Examples.get_response.body
|
||||
body : Response_Body
|
||||
body self = Response_Body.Body (Vector.Vector self.internal_http_response.body)
|
||||
body self = Response_Body.Body (Vector.from_polyglot_array self.internal_http_response.body)
|
||||
|
||||
## Get the response status code.
|
||||
|
||||
|
@ -15,6 +15,13 @@ type Polyglot
|
||||
get_array_size : Any -> Integer
|
||||
get_array_size array = @Builtin_Method "Polyglot.get_array_size"
|
||||
|
||||
## Reads the element in a given polyglot array object.
|
||||
|
||||
Arguments:
|
||||
- index: The index to get the element from.
|
||||
read_array_element : Any -> Integer -> Any
|
||||
read_array_element array index = @Builtin_Method "Polyglot.read_array_element"
|
||||
|
||||
## Executes a polyglot function object (e.g. a lambda).
|
||||
|
||||
Arguments:
|
||||
|
@ -0,0 +1,24 @@
|
||||
from Standard.Base import Polyglot, Array
|
||||
|
||||
## Advanced
|
||||
|
||||
Wrapper for Polyglot Arrays
|
||||
type Proxy_Polyglot_Array
|
||||
|
||||
type Proxy_Polyglot_Array arr
|
||||
|
||||
## Returns the number of elements stored in this Polyglot Array.
|
||||
|
||||
length : Number
|
||||
length self =
|
||||
Polyglot.get_array_size self.arr
|
||||
|
||||
## Gets an element from this Polyglot Array at a specified index (0-based).
|
||||
|
||||
at : Number -> Any
|
||||
at self index =
|
||||
Polyglot.read_array_element self.arr index
|
||||
|
||||
to_array : Array Any
|
||||
to_array self =
|
||||
self.arr
|
@ -28,7 +28,7 @@ type Random_Number_Generator
|
||||
sample : Vector Any -> Integer -> Random_Number_Generator -> Vector Any
|
||||
sample vector k rng =
|
||||
new_array = Random_Utils.sample vector.to_array k rng.java_random
|
||||
Vector.Vector new_array
|
||||
Vector.from_polyglot_array new_array
|
||||
|
||||
## Returns `k` indices sampled from the range [0, n-1] without replacement.
|
||||
|
||||
@ -36,4 +36,4 @@ sample vector k rng =
|
||||
random_indices : Integer -> Integer -> Random_Number_Generator -> Vector Integer
|
||||
random_indices n k rng =
|
||||
array = Random_Utils.random_indices n k rng.java_random
|
||||
Vector.Vector array
|
||||
Vector.from_polyglot_array array
|
||||
|
@ -50,10 +50,6 @@ nano_time = @Builtin_Method "System.nano_time"
|
||||
os : Text
|
||||
os = @Builtin_Method "System.os"
|
||||
|
||||
## Check if the operating system is UNIX.
|
||||
is_unix : Boolean
|
||||
is_unix = @Builtin_Method "System.is_unix"
|
||||
|
||||
## PRIVATE
|
||||
Returns the default line separator for the platform that the program is
|
||||
currently running on.
|
||||
|
@ -546,7 +546,7 @@ type File
|
||||
Builtin method that deletes the file.
|
||||
Recommended to use `File.delete` instead which handles potential exceptions.
|
||||
delete_builtin : Nothing
|
||||
delete_builtin self = @Builtin_Method "File.delete"
|
||||
delete_builtin self = @Builtin_Method "File.delete_builtin"
|
||||
|
||||
## Moves the file to the specified destination.
|
||||
|
||||
@ -646,7 +646,7 @@ type File
|
||||
read_last_bytes : Integer -> Vector ! File_Error
|
||||
read_last_bytes self n =
|
||||
handle_java_exceptions self <|
|
||||
Vector.Vector (self.read_last_bytes_builtin n)
|
||||
Vector.from_polyglot_array (self.read_last_bytes_builtin n)
|
||||
|
||||
## PRIVATE
|
||||
read_last_bytes_builtin : Integer -> Array
|
||||
@ -837,7 +837,7 @@ type Output_Stream
|
||||
replacement_sequence = Encoding_Utils.INVALID_CHARACTER.bytes encoding on_problems=Problem_Behavior.Ignore
|
||||
java_charset = encoding.to_java_charset
|
||||
results = Encoding_Utils.with_stream_encoder java_stream java_charset replacement_sequence.to_array action
|
||||
problems = Vector.Vector results.problems . map Encoding_Error
|
||||
problems = Vector.from_polyglot_array results.problems . map Encoding_Error
|
||||
on_problems.attach_problems_after results.result problems
|
||||
|
||||
## An input stream, allowing for interactive reading of contents from an open
|
||||
@ -874,7 +874,7 @@ type Input_Stream
|
||||
read_all_bytes : Vector.Vector ! File_Error
|
||||
read_all_bytes self = self.stream_resource . with java_stream->
|
||||
handle_java_exceptions self.file <|
|
||||
Vector.Vector java_stream.readAllBytes
|
||||
Vector.from_polyglot_array java_stream.readAllBytes
|
||||
|
||||
## ADVANCED
|
||||
|
||||
@ -1122,4 +1122,4 @@ Vector.Vector.write_bytes self path on_existing_file=Existing_File_Behavior.Back
|
||||
|
||||
file = new path
|
||||
on_existing_file.write file stream->
|
||||
stream.write_bytes (Vector.Vector byte_array)
|
||||
stream.write_bytes (Vector.from_polyglot_array byte_array)
|
||||
|
@ -26,9 +26,9 @@ type Os
|
||||
os : Os
|
||||
os = from_text System.os
|
||||
|
||||
## Check if the currently running platform is a UNIX platform.
|
||||
## Check if the operating system is UNIX.
|
||||
is_unix : Boolean
|
||||
is_unix = System.is_unix
|
||||
is_unix = @Builtin_Method "System.is_unix"
|
||||
|
||||
## PRIVATE
|
||||
|
||||
|
@ -64,7 +64,7 @@ type Connection
|
||||
self.connection_resource.with connection->
|
||||
metadata = connection.getMetaData
|
||||
schema_result_set = metadata.getCatalogs
|
||||
Vector.Vector (JDBCProxy.getStringColumn schema_result_set "TABLE_CAT")
|
||||
Vector.from_polyglot_array (JDBCProxy.getStringColumn schema_result_set "TABLE_CAT")
|
||||
|
||||
## Returns the list of schemas for the connection within the current database (or catalog).
|
||||
schemas : [Text]
|
||||
@ -73,7 +73,7 @@ type Connection
|
||||
self.connection_resource.with connection->
|
||||
metadata = connection.getMetaData
|
||||
schema_result_set = metadata.getSchemas
|
||||
Vector.Vector (JDBCProxy.getStringColumn schema_result_set "TABLE_SCHEM")
|
||||
Vector.from_polyglot_array (JDBCProxy.getStringColumn schema_result_set "TABLE_SCHEM")
|
||||
|
||||
## ADVANCED
|
||||
|
||||
@ -208,8 +208,8 @@ type Connection
|
||||
Resource.bracket (java_connection.prepareStatement insert_template) .close stmt->
|
||||
num_rows = table.row_count
|
||||
columns = table.columns
|
||||
check_rows updates_array expected_size =
|
||||
updates = Vector.Vector updates_array
|
||||
check_rows updates_polyglot_array expected_size =
|
||||
updates = Vector.from_polyglot_array updates_polyglot_array
|
||||
if updates.length != expected_size then Panic.throw <| Illegal_State_Error "The batch update unexpectedly affected "+updates.length.to_text+" rows instead of "+expected_size.to_text+"." else
|
||||
updates.each affected_rows->
|
||||
if affected_rows != 1 then
|
||||
|
@ -52,4 +52,4 @@ type Histogram
|
||||
Image.Image.histogram : Integer -> Histogram
|
||||
Image.Image.histogram self channel =
|
||||
hist = Java_Histogram.calculate self.opencv_mat channel
|
||||
Histogram channel (Vector.Vector hist.get_data)
|
||||
Histogram channel (Vector.from_polyglot_array hist.get_data)
|
||||
|
@ -101,7 +101,7 @@ type Image
|
||||
if (row < 0) || (row >= self.rows) then Error.throw (Matrix.Index_Out_Of_Bounds_Error self.rows self.columns row) else
|
||||
if (column < 0) || (column >= self.columns) then Error.throw (Matrix.Index_Out_Of_Bounds_Error self.rows self.columns column) else
|
||||
arr = Java_Image.get self.opencv_mat row column
|
||||
Vector.Vector arr
|
||||
Vector.from_polyglot_array arr
|
||||
|
||||
## UNSTABLE
|
||||
|
||||
@ -344,7 +344,7 @@ type Image
|
||||
to_vector : Vector
|
||||
to_vector self =
|
||||
arr = Java_Image.to_vector self.opencv_mat
|
||||
Vector.Vector arr
|
||||
Vector.from_polyglot_array arr
|
||||
|
||||
## UNSTABLE
|
||||
|
||||
|
@ -152,7 +152,7 @@ type Matrix
|
||||
if (row < 0) || (row >= self.rows) then Error.throw (Index_Out_Of_Bounds_Error self.rows self.columns row) else
|
||||
if (column < 0) || (column >= self.columns) then Error.throw (Index_Out_Of_Bounds_Error self.rows self.columns column) else
|
||||
arr = Java_Matrix.get self.opencv_mat row column
|
||||
Vector.Vector arr
|
||||
Vector.from_polyglot_array arr
|
||||
|
||||
## UNSTABLE
|
||||
|
||||
@ -413,7 +413,7 @@ type Matrix
|
||||
to_vector : Vector
|
||||
to_vector self =
|
||||
arr = Java_Matrix.to_vector self.opencv_mat
|
||||
Vector.Vector arr
|
||||
Vector.from_polyglot_array arr
|
||||
|
||||
## UNSTABLE
|
||||
|
||||
|
@ -767,7 +767,7 @@ type Column
|
||||
|
||||
example_to_vector = Examples.integer_column.to_vector
|
||||
to_vector : Vector
|
||||
to_vector self = Vector.Vector self.java_column.getStorage.toList
|
||||
to_vector self = Vector.from_polyglot_array self.java_column.getStorage.toList
|
||||
|
||||
## Returns the underlying storage type of this column.
|
||||
|
||||
@ -875,7 +875,7 @@ type Column
|
||||
|
||||
example_sum = Examples.integer_column.sum
|
||||
sum : Any
|
||||
sum self = self.java_column.aggregate 'sum' (x-> Vector.Vector x . reduce (+)) True
|
||||
sum self = self.java_column.aggregate 'sum' (x-> Vector.from_polyglot_array x . reduce (+)) True
|
||||
|
||||
## ALIAS Max Columns
|
||||
|
||||
@ -889,7 +889,7 @@ type Column
|
||||
example_max = Examples.integer_column.max
|
||||
max : Any
|
||||
max self =
|
||||
self.java_column.aggregate 'max' (x-> Vector.Vector x . reduce Math.max) True
|
||||
self.java_column.aggregate 'max' (x-> Vector.from_polyglot_array x . reduce Math.max) True
|
||||
|
||||
## ALIAS Min Columns
|
||||
|
||||
@ -903,7 +903,7 @@ type Column
|
||||
example_min = Examples.integer_column.min
|
||||
min : Any
|
||||
min self =
|
||||
self.java_column.aggregate 'min' (x-> Vector.Vector x . reduce Math.min) True
|
||||
self.java_column.aggregate 'min' (x-> Vector.from_polyglot_array x . reduce Math.min) True
|
||||
|
||||
## ALIAS Mean Columns
|
||||
|
||||
@ -918,7 +918,7 @@ type Column
|
||||
mean : Any
|
||||
mean self =
|
||||
vec_mean v = if v.length == 0 then Nothing else
|
||||
(Vector.Vector v).reduce (+) / v.length
|
||||
(Vector.from_polyglot_array v).reduce (+) / v.length
|
||||
self.java_column.aggregate 'mean' vec_mean True
|
||||
|
||||
## Computes the variance of the sample represented by this column.
|
||||
@ -1153,7 +1153,7 @@ type Aggregate_Column
|
||||
Examples.aggregate_column.reduce .length . rename "transaction_count"
|
||||
reduce : (Vector.Vector -> Any) -> Boolean -> Text -> Column
|
||||
reduce self function skip_missing=True name_suffix="_result" =
|
||||
f arr = function (Vector.Vector arr)
|
||||
f arr = function (Vector.from_polyglot_array arr)
|
||||
r = self.java_column.aggregate Nothing name_suffix f skip_missing
|
||||
Column r
|
||||
|
||||
@ -1172,7 +1172,7 @@ type Aggregate_Column
|
||||
example_sum = Examples.aggregate_column.sum . rename "id_sum"
|
||||
sum : Text -> Column
|
||||
sum self name_suffix='_sum' =
|
||||
r = self.java_column.aggregate 'sum' name_suffix (x-> Vector.Vector x . reduce (+)) True
|
||||
r = self.java_column.aggregate 'sum' name_suffix (x-> Vector.from_polyglot_array x . reduce (+)) True
|
||||
Column r
|
||||
|
||||
## Computes the maximum element of each group.
|
||||
@ -1189,7 +1189,7 @@ type Aggregate_Column
|
||||
example_max = Examples.aggregate_column.max . rename "latest_transaction"
|
||||
max : Text -> Column
|
||||
max self name_suffix='_max' =
|
||||
r = self.java_column.aggregate 'max' name_suffix (x-> Vector.Vector x . reduce Math.max) True
|
||||
r = self.java_column.aggregate 'max' name_suffix (x-> Vector.from_polyglot_array x . reduce Math.max) True
|
||||
Column r
|
||||
|
||||
## Computes the minimum element of each group.
|
||||
@ -1206,7 +1206,7 @@ type Aggregate_Column
|
||||
example_min = Examples.aggregate_column.min . rename "first_transaction"
|
||||
min : Text -> Column
|
||||
min self name_suffix='_min' =
|
||||
r = self.java_column.aggregate 'min' name_suffix (x-> Vector.Vector x . reduce Math.min) True
|
||||
r = self.java_column.aggregate 'min' name_suffix (x-> Vector.from_polyglot_array x . reduce Math.min) True
|
||||
Column r
|
||||
|
||||
## Computes the number of non-missing elements in each group.
|
||||
@ -1242,7 +1242,7 @@ type Aggregate_Column
|
||||
mean : Text -> Column
|
||||
mean self name_suffix='_mean' =
|
||||
vec_mean v = if v.length == 0 then Nothing else
|
||||
(Vector.Vector v).reduce (+) / v.length
|
||||
(Vector.from_polyglot_array v).reduce (+) / v.length
|
||||
r = self.java_column.aggregate 'mean' name_suffix vec_mean True
|
||||
Column r
|
||||
|
||||
@ -1261,7 +1261,7 @@ type Aggregate_Column
|
||||
example_values = Examples.aggregate_column.values
|
||||
values : Text -> Column
|
||||
values self name_suffix='_values' =
|
||||
r = self.java_column.aggregate Nothing name_suffix Vector.Vector False
|
||||
r = self.java_column.aggregate Nothing name_suffix Vector.from_polyglot_array False
|
||||
Column r
|
||||
|
||||
## Prints an ASCII-art column with this data to the standard output.
|
||||
|
@ -73,7 +73,7 @@ type Data_Formatter
|
||||
Auto -> self.make_auto_parser
|
||||
_ -> self.make_datatype_parser datatype
|
||||
result = parser.parseIndependentValue text
|
||||
problems = Vector.Vector result.problems . map (Parse_Values_Helper.translate_parsing_problem datatype)
|
||||
problems = Vector.from_polyglot_array result.problems . map (Parse_Values_Helper.translate_parsing_problem datatype)
|
||||
on_problems.attach_problems_after result.value problems
|
||||
|
||||
## Format a value into a Text.
|
||||
|
@ -153,7 +153,7 @@ type Table
|
||||
example_display = Examples.inventory_table.display
|
||||
display : Integer -> Boolean -> Text
|
||||
display self show_rows=10 format_terminal=False =
|
||||
cols = Vector.Vector self.java_table.getColumns
|
||||
cols = Vector.from_polyglot_array self.java_table.getColumns
|
||||
index = self.java_table.getIndex
|
||||
col_names = [index.getName] + cols.map .getName
|
||||
col_vals = cols.map .getStorage
|
||||
@ -710,7 +710,7 @@ type Table
|
||||
storage = column.java_column.getStorage
|
||||
new_storage_and_problems = parser.parseColumn column.name storage
|
||||
new_storage = new_storage_and_problems.value
|
||||
problems = Vector.Vector new_storage_and_problems.problems . map (Parse_Values_Helper.translate_parsing_problem expected_type)
|
||||
problems = Vector.from_polyglot_array new_storage_and_problems.problems . map (Parse_Values_Helper.translate_parsing_problem expected_type)
|
||||
problems.each problem_builder.append
|
||||
Column.Column (Java_Column.new column.name column.java_column.getIndex new_storage)
|
||||
|
||||
@ -782,7 +782,7 @@ type Table
|
||||
|
||||
example_columns = Examples.inventory_table.columns
|
||||
columns : Vector
|
||||
columns self = Vector.Vector self.java_table.getColumns . map Column.Column
|
||||
columns self = Vector.from_polyglot_array self.java_table.getColumns . map Column.Column
|
||||
|
||||
## Sets the index of this table, using the column with the provided name.
|
||||
|
||||
|
@ -217,7 +217,7 @@ Column_Name_Mismatch.to_display_text self = self.message
|
||||
Column_Name_Mismatch.handle_java_exception =
|
||||
throw_column_name_mismatch caught_panic =
|
||||
cause = caught_panic.payload.cause
|
||||
Error.throw (Column_Name_Mismatch (Vector.Vector cause.getMissing) (Vector.Vector cause.getExtras) cause.getMessage)
|
||||
Error.throw (Column_Name_Mismatch (Vector.from_polyglot_array cause.getMissing) (Vector.from_polyglot_array cause.getExtras) cause.getMessage)
|
||||
Panic.catch ColumnNameMismatchException handler=throw_column_name_mismatch
|
||||
|
||||
## UNSTABLE
|
||||
|
@ -182,8 +182,8 @@ validate validation ~error_message ~wrapped =
|
||||
read_excel : File -> Excel_Section -> (Boolean|Infer) -> Problem_Behavior -> Boolean -> (Table | Vector)
|
||||
read_excel file section headers on_problems xls_format=False =
|
||||
reader stream = case section of
|
||||
Sheet_Names -> Vector.Vector (ExcelReader.readSheetNames stream xls_format)
|
||||
Range_Names -> Vector.Vector (ExcelReader.readRangeNames stream xls_format)
|
||||
Sheet_Names -> Vector.from_polyglot_array (ExcelReader.readSheetNames stream xls_format)
|
||||
Range_Names -> Vector.from_polyglot_array (ExcelReader.readRangeNames stream xls_format)
|
||||
Sheet sheet skip_rows row_limit ->
|
||||
prepare_reader_table on_problems <| case sheet of
|
||||
Integer -> ExcelReader.readSheetByIndex stream sheet (make_java_headers headers) skip_rows row_limit xls_format
|
||||
@ -227,10 +227,10 @@ write_excel file table on_existing_file section headers match_columns _ xls_form
|
||||
prepare_reader_table : Problem_Behavior -> Any -> Table
|
||||
prepare_reader_table on_problems result_with_problems =
|
||||
map_problem java_problem =
|
||||
if Java.is_instance java_problem DuplicateNames then Duplicate_Output_Column_Names (Vector.Vector java_problem.duplicatedNames) else
|
||||
if Java.is_instance java_problem InvalidNames then Invalid_Output_Column_Names (Vector.Vector java_problem.invalidNames) else
|
||||
if Java.is_instance java_problem DuplicateNames then Duplicate_Output_Column_Names (Vector.from_polyglot_array java_problem.duplicatedNames) else
|
||||
if Java.is_instance java_problem InvalidNames then Invalid_Output_Column_Names (Vector.from_polyglot_array java_problem.invalidNames) else
|
||||
java_problem
|
||||
parsing_problems = Vector.Vector (result_with_problems.problems) . map map_problem
|
||||
parsing_problems = Vector.from_polyglot_array (result_with_problems.problems) . map map_problem
|
||||
on_problems.attach_problems_after (Table.Table result_with_problems.value) parsing_problems
|
||||
|
||||
## PRIVATE
|
||||
|
@ -218,9 +218,9 @@ parse_aggregated_problems problems =
|
||||
problems_array = problems.getProblems
|
||||
parsed = Vector.new problems_array.length i->
|
||||
p = problems_array.at i
|
||||
if Java.is_instance p InvalidAggregation then Invalid_Aggregation p.getColumnName (Vector.Vector p.getRows) p.getMessage else
|
||||
if Java.is_instance p FloatingPointGrouping then Floating_Point_Grouping p.getColumnName (Vector.Vector p.getRows) else
|
||||
if Java.is_instance p UnquotedDelimiter then Unquoted_Delimiter p.getColumnName (Vector.Vector p.getRows) else
|
||||
if Java.is_instance p InvalidAggregation then Invalid_Aggregation p.getColumnName (Vector.from_polyglot_array p.getRows) p.getMessage else
|
||||
if Java.is_instance p FloatingPointGrouping then Floating_Point_Grouping p.getColumnName (Vector.from_polyglot_array p.getRows) else
|
||||
if Java.is_instance p UnquotedDelimiter then Unquoted_Delimiter p.getColumnName (Vector.from_polyglot_array p.getRows) else
|
||||
Invalid_Aggregation Nothing -1 "Unknown Error"
|
||||
|
||||
if problems.getCount == problems_array.length then parsed else
|
||||
|
@ -91,7 +91,7 @@ read_from_reader format java_reader on_problems max_columns=4096 =
|
||||
Illegal_Argument_Error.handle_java_exception <| handle_parsing_failure <| handle_parsing_exception <|
|
||||
reader = prepare_delimited_reader java_reader format max_columns on_problems
|
||||
result_with_problems = reader.read
|
||||
parsing_problems = Vector.Vector (result_with_problems.problems) . map translate_reader_problem
|
||||
parsing_problems = Vector.from_polyglot_array (result_with_problems.problems) . map translate_reader_problem
|
||||
on_problems.attach_problems_after (Table.Table result_with_problems.value) parsing_problems
|
||||
|
||||
## PRIVATE
|
||||
@ -124,11 +124,11 @@ prepare_delimited_reader java_reader format max_columns on_problems newline_over
|
||||
DelimitedReader.new java_reader format.delimiter quote_characters.first quote_characters.second java_headers format.skip_rows row_limit max_columns value_parser cell_type_guesser format.keep_invalid_rows newline format.comment_character warnings_as_errors
|
||||
|
||||
translate_reader_problem problem =
|
||||
invalid_row = [InvalidRow, (java_problem-> Invalid_Row java_problem.source_row java_problem.table_index (Vector.Vector java_problem.row))]
|
||||
invalid_row = [InvalidRow, (java_problem-> Invalid_Row java_problem.source_row java_problem.table_index (Vector.from_polyglot_array java_problem.row))]
|
||||
additional_invalid_rows = [AdditionalInvalidRows, (java_problem-> Additional_Invalid_Rows java_problem.count)]
|
||||
mismatched_quote = [MismatchedQuote, (_-> Mismatched_Quote)]
|
||||
duplicate_names = [DuplicateNames, (java_problem-> Duplicate_Output_Column_Names (Vector.Vector java_problem.duplicatedNames))]
|
||||
invalid_names = [InvalidNames, (java_problem-> Invalid_Output_Column_Names (Vector.Vector java_problem.invalidNames))]
|
||||
duplicate_names = [DuplicateNames, (java_problem-> Duplicate_Output_Column_Names (Vector.from_polyglot_array java_problem.duplicatedNames))]
|
||||
invalid_names = [InvalidNames, (java_problem-> Invalid_Output_Column_Names (Vector.from_polyglot_array java_problem.invalidNames))]
|
||||
|
||||
translations = [invalid_row, additional_invalid_rows, mismatched_quote, duplicate_names, invalid_names]
|
||||
found = translations.find t-> Java.is_instance problem t.first
|
||||
@ -185,7 +185,7 @@ detect_metadata file format =
|
||||
column_count = reader.getColumnCount
|
||||
if column_count == 0 then Nothing else
|
||||
No_Headers column_count
|
||||
_ -> Existing_Headers (Vector.Vector defined_columns)
|
||||
_ -> Existing_Headers (Vector.from_polyglot_array defined_columns)
|
||||
line_separator_from_parser = reader.getEffectiveLineSeparator
|
||||
has_seen_newline = newline_detecting_reader.newlineEncountered
|
||||
## If the parser has seen a newline, we can trust that it
|
||||
|
@ -6,8 +6,8 @@ polyglot java import org.enso.table.parsing.problems.InvalidFormat
|
||||
polyglot java import org.enso.table.parsing.problems.LeadingZeros
|
||||
|
||||
translate_parsing_problem expected_datatype problem =
|
||||
invalid_format = [InvalidFormat, (java_problem-> Invalid_Format java_problem.column expected_datatype (Vector.Vector java_problem.cells))]
|
||||
leading_zeros = [LeadingZeros, (java_problem-> Leading_Zeros java_problem.column expected_datatype (Vector.Vector java_problem.cells))]
|
||||
invalid_format = [InvalidFormat, (java_problem-> Invalid_Format java_problem.column expected_datatype (Vector.from_polyglot_array java_problem.cells))]
|
||||
leading_zeros = [LeadingZeros, (java_problem-> Leading_Zeros java_problem.column expected_datatype (Vector.from_polyglot_array java_problem.cells))]
|
||||
|
||||
translations = [invalid_format, leading_zeros]
|
||||
found = translations.find t-> Java.is_instance problem t.first
|
||||
|
@ -29,12 +29,12 @@ type Unique_Name_Strategy
|
||||
|
||||
## Vector of any duplicates renamed
|
||||
renames : Vector
|
||||
renames self = Vector.Vector self.deduplicator.getDuplicatedNames
|
||||
renames self = Vector.from_polyglot_array self.deduplicator.getDuplicatedNames
|
||||
|
||||
|
||||
## Vector of any invalid names
|
||||
invalid_names : Vector
|
||||
invalid_names self = Vector.Vector self.deduplicator.getInvalidNames
|
||||
invalid_names self = Vector.from_polyglot_array self.deduplicator.getInvalidNames
|
||||
|
||||
|
||||
## Takes a value and converts to a valid (but not necessarily unique) name
|
||||
|
@ -64,6 +64,19 @@ public abstract class CoercePrimitiveNode extends Node {
|
||||
}
|
||||
}
|
||||
|
||||
@Specialization(guards = {"characters.isString(character)", "isChar(character)"})
|
||||
long doChar(Object character, @CachedLibrary(limit = "5") InteropLibrary characters) {
|
||||
try {
|
||||
return characters.asString(character).charAt(0);
|
||||
} catch (UnsupportedMessageException e) {
|
||||
throw new IllegalStateException("Impossible, `character` is checked to be a long");
|
||||
}
|
||||
}
|
||||
|
||||
static boolean isChar(Object s) {
|
||||
return s instanceof Character;
|
||||
}
|
||||
|
||||
@Fallback
|
||||
Object doNonPrimitive(Object value) {
|
||||
return value;
|
||||
|
@ -12,6 +12,8 @@ import java.util.Arrays;
|
||||
@NodeField(name = "arity", type = int.class)
|
||||
public abstract class JsForeignNode extends ForeignFunctionCallNode {
|
||||
|
||||
private @Child CoercePrimitiveNode coercePrimitiveNode = CoercePrimitiveNode.build();
|
||||
|
||||
abstract Object getForeignFunction();
|
||||
|
||||
abstract int getArity();
|
||||
@ -35,8 +37,9 @@ public abstract class JsForeignNode extends ForeignFunctionCallNode {
|
||||
Object[] positionalArgs = new Object[newLength];
|
||||
System.arraycopy(arguments, 1, positionalArgs, 0, newLength);
|
||||
try {
|
||||
return interopLibrary.invokeMember(
|
||||
getForeignFunction(), "apply", arguments[0], new ReadOnlyArray(positionalArgs));
|
||||
return coercePrimitiveNode.execute(
|
||||
interopLibrary.invokeMember(
|
||||
getForeignFunction(), "apply", arguments[0], new ReadOnlyArray(positionalArgs)));
|
||||
} catch (UnsupportedMessageException
|
||||
| UnknownIdentifierException
|
||||
| ArityException
|
||||
|
@ -11,13 +11,15 @@ import com.oracle.truffle.api.library.CachedLibrary;
|
||||
@NodeField(name = "foreignFunction", type = Object.class)
|
||||
public abstract class RForeignNode extends ForeignFunctionCallNode {
|
||||
|
||||
private @Child CoercePrimitiveNode coercePrimitiveNode = CoercePrimitiveNode.build();
|
||||
|
||||
abstract Object getForeignFunction();
|
||||
|
||||
@Specialization
|
||||
public Object doExecute(
|
||||
Object[] arguments, @CachedLibrary("foreignFunction") InteropLibrary interopLibrary) {
|
||||
try {
|
||||
return interopLibrary.execute(getForeignFunction(), arguments);
|
||||
return coercePrimitiveNode.execute(interopLibrary.execute(getForeignFunction(), arguments));
|
||||
} catch (UnsupportedMessageException | UnsupportedTypeException | ArityException e) {
|
||||
throw new IllegalStateException("R parser returned a malformed object", e);
|
||||
}
|
||||
|
@ -0,0 +1,42 @@
|
||||
package org.enso.interpreter.node.expression.builtin.interop.generic;
|
||||
|
||||
import com.oracle.truffle.api.interop.InteropLibrary;
|
||||
import com.oracle.truffle.api.interop.InvalidArrayIndexException;
|
||||
import com.oracle.truffle.api.interop.UnsupportedMessageException;
|
||||
import com.oracle.truffle.api.nodes.Node;
|
||||
import com.oracle.truffle.api.profiles.BranchProfile;
|
||||
import org.enso.interpreter.Constants;
|
||||
import org.enso.interpreter.dsl.BuiltinMethod;
|
||||
import org.enso.interpreter.epb.node.CoercePrimitiveNode;
|
||||
import org.enso.interpreter.node.expression.foreign.CoerceNothing;
|
||||
import org.enso.interpreter.runtime.Context;
|
||||
import org.enso.interpreter.runtime.builtin.Builtins;
|
||||
import org.enso.interpreter.runtime.error.PanicException;
|
||||
|
||||
@BuiltinMethod(
|
||||
type = "Polyglot",
|
||||
name = "read_array_element",
|
||||
description = "Read a value from the array specified by the index.")
|
||||
public class ReadArrayElementNode extends Node {
|
||||
private @Child InteropLibrary library =
|
||||
InteropLibrary.getFactory().createDispatched(Constants.CacheSizes.BUILTIN_INTEROP_DISPATCH);
|
||||
|
||||
private @Child CoercePrimitiveNode coercion = CoercePrimitiveNode.build();
|
||||
private @Child CoerceNothing nothingCoercion = CoerceNothing.build();
|
||||
private final BranchProfile err = BranchProfile.create();
|
||||
|
||||
Object execute(Object array, long index) {
|
||||
try {
|
||||
return nothingCoercion.execute(coercion.execute(library.readArrayElement(array, index)));
|
||||
} catch (UnsupportedMessageException e) {
|
||||
err.enter();
|
||||
Builtins builtins = Context.get(this).getBuiltins();
|
||||
throw new PanicException(
|
||||
builtins.error().makeTypeError(builtins.array(), array, "array"), this);
|
||||
} catch (InvalidArrayIndexException e) {
|
||||
err.enter();
|
||||
Builtins builtins = Context.get(this).getBuiltins();
|
||||
throw new PanicException(builtins.error().makeInvalidArrayIndexError(array, index), this);
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,33 @@
|
||||
package org.enso.interpreter.node.expression.foreign;
|
||||
|
||||
import com.oracle.truffle.api.dsl.Fallback;
|
||||
import com.oracle.truffle.api.dsl.Specialization;
|
||||
import com.oracle.truffle.api.interop.InteropLibrary;
|
||||
import com.oracle.truffle.api.library.CachedLibrary;
|
||||
import org.enso.interpreter.runtime.Context;
|
||||
import com.oracle.truffle.api.nodes.Node;
|
||||
|
||||
public abstract class CoerceNothing extends Node {
|
||||
public static CoerceNothing build() {
|
||||
return CoerceNothingNodeGen.create();
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts an null polyglot representation into an equivalent Nothing representation in Enso
|
||||
* context.
|
||||
*
|
||||
* @param value the polyglot value to perform coercion on
|
||||
* @return {@code value} coerced to an Enso primitive where applicable
|
||||
*/
|
||||
public abstract Object execute(Object value);
|
||||
|
||||
@Specialization(guards = "interop.isNull(value)")
|
||||
public Object doNothing(Object value, @CachedLibrary(limit = "1") InteropLibrary interop) {
|
||||
return Context.get(this).getBuiltins().nothing().newInstance();
|
||||
}
|
||||
|
||||
@Fallback
|
||||
public Object doOther(Object value) {
|
||||
return value;
|
||||
}
|
||||
}
|
@ -6,17 +6,20 @@ import com.oracle.truffle.api.nodes.DirectCallNode;
|
||||
import com.oracle.truffle.api.nodes.ExplodeLoop;
|
||||
import com.oracle.truffle.api.profiles.BranchProfile;
|
||||
import org.enso.interpreter.node.ExpressionNode;
|
||||
import org.enso.interpreter.runtime.Context;
|
||||
import org.enso.interpreter.runtime.error.DataflowError;
|
||||
|
||||
/** Performs a call into a given foreign call target. */
|
||||
public class ForeignMethodCallNode extends ExpressionNode {
|
||||
private @Children ExpressionNode[] arguments;
|
||||
private @Child DirectCallNode callNode;
|
||||
private @Child CoerceNothing coerceNothingNode;
|
||||
private final BranchProfile[] errorProfiles;
|
||||
|
||||
ForeignMethodCallNode(ExpressionNode[] arguments, CallTarget foreignCt) {
|
||||
this.arguments = arguments;
|
||||
this.callNode = DirectCallNode.create(foreignCt);
|
||||
this.coerceNothingNode = CoerceNothing.build();
|
||||
|
||||
this.errorProfiles = new BranchProfile[arguments.length];
|
||||
for (int i = 0; i < arguments.length; i++) {
|
||||
@ -46,6 +49,6 @@ public class ForeignMethodCallNode extends ExpressionNode {
|
||||
return args[i];
|
||||
}
|
||||
}
|
||||
return callNode.call(args);
|
||||
return coerceNothingNode.execute(callNode.call(args));
|
||||
}
|
||||
}
|
||||
|
@ -316,8 +316,12 @@ public class Builtins {
|
||||
*/
|
||||
public Optional<Function> getBuiltinFunction(
|
||||
AtomConstructor atom, String methodName, Language language) {
|
||||
// TODO: move away from String mapping once Builtins is gone
|
||||
Map<String, Class<BuiltinRootNode>> atomNodes = builtinMethodNodes.get(atom.getName());
|
||||
return getBuiltinFunction(atom.getName(), methodName, language);
|
||||
}
|
||||
|
||||
public Optional<Function> getBuiltinFunction(
|
||||
String methodOwner, String methodName, Language language) {
|
||||
Map<String, Class<BuiltinRootNode>> atomNodes = builtinMethodNodes.get(methodOwner);
|
||||
if (atomNodes == null) return Optional.empty();
|
||||
Class<BuiltinRootNode> clazz = atomNodes.get(methodName);
|
||||
if (clazz == null) return Optional.empty();
|
||||
|
@ -300,13 +300,25 @@ class IrToTruffle(
|
||||
// and not attempt to register it in the scope (can't redefined methods).
|
||||
// For non-builtin types (or modules) that own the builtin method
|
||||
// we have to look up the function and register it in the scope.
|
||||
val x = methodDef.body.asInstanceOf[IR.Function.Lambda].body
|
||||
val fullMethodName = x.asInstanceOf[IR.Literal.Text]
|
||||
|
||||
val builtinNameElements = fullMethodName.text.split('.')
|
||||
if (builtinNameElements.length != 2) {
|
||||
throw new CompilerError(
|
||||
s"Unknwon builtin method ${fullMethodName.text}"
|
||||
)
|
||||
}
|
||||
val methodName = builtinNameElements(1)
|
||||
val methodOwnerName = builtinNameElements(0)
|
||||
|
||||
val builtinFunction = context.getBuiltins
|
||||
.getBuiltinFunction(cons, methodDef.methodName.name, language)
|
||||
.getBuiltinFunction(methodOwnerName, methodName, language)
|
||||
builtinFunction.toScala
|
||||
.map(Some(_))
|
||||
.toRight(
|
||||
new CompilerError(
|
||||
s"Unable to find Truffle Node for method ${cons.getName()}.${methodDef.methodName.name}"
|
||||
s"Unable to find Truffle Node for method ${cons.getName}.${methodDef.methodName.name}"
|
||||
)
|
||||
)
|
||||
.left
|
||||
|
@ -21,7 +21,7 @@ spec = Test.group "Arrays" <|
|
||||
arr = [1, 2, 3] . to_array
|
||||
arr.set_at 1 10
|
||||
arr.at 1 . should_equal 10
|
||||
Vector.from_array arr . should_equal [1, 10, 3]
|
||||
Vector.from_polyglot_array arr . should_equal [1, 10, 3]
|
||||
|
||||
Test.specify "should panic on out of bounds access" <|
|
||||
arr = [1, 2, 3] . to_array
|
||||
|
@ -36,7 +36,7 @@ spec =
|
||||
|
||||
Test.specify "should correctly translate a series of codepoint indices to a grapheme indices in a batch" <|
|
||||
translate_indices text ixes =
|
||||
Vector.Vector <| Text_Utils.utf16_indices_to_grapheme_indices text ixes.to_array
|
||||
Vector.from_polyglot_array <| Text_Utils.utf16_indices_to_grapheme_indices text ixes.to_array
|
||||
codepoint_indices = Vector.new text.char_vector.length ix->ix
|
||||
translate_indices text codepoint_indices . should_equal codepoints_to_graphemes
|
||||
|
||||
|
@ -26,7 +26,18 @@ compare_tco a b = case a.vec.length == b.vec.length of
|
||||
foreign js generate_js_array = """
|
||||
return [1, 2, 3, 4, 5]
|
||||
|
||||
foreign js generate_nested_js_array = """
|
||||
return [[1, 2, 3], [4, 5]]
|
||||
|
||||
foreign python generate_py_array = """
|
||||
return [1, 2, 3, 4, None]
|
||||
|
||||
foreign python generate_nested_py_array = """
|
||||
return [[1, 2, 3], [4, 5]]
|
||||
|
||||
spec = Test.group "Vectors" <|
|
||||
pending_python_missing = if Polyglot.is_language_installed "python" then Nothing else """
|
||||
Can't run Python tests, Python is not installed.
|
||||
|
||||
Test.specify "text bytes" <|
|
||||
"Lore".utf_8 . should_equal [76, 111, 114, 101]
|
||||
@ -38,8 +49,18 @@ spec = Test.group "Vectors" <|
|
||||
Vector.fill 100 1 . fold (0) (+) . should_equal 100
|
||||
|
||||
Test.specify "should allow creation from arrays without mutability" <|
|
||||
built = Vector.from_array generate_js_array
|
||||
built . should_equal [1, 2, 3, 4, 5]
|
||||
built_from_js = Vector.from_polyglot_array generate_js_array
|
||||
built_from_js . should_equal [1, 2, 3, 4, 5]
|
||||
|
||||
Test.specify "should allow creation from arrays without mutability in Python" pending=pending_python_missing <|
|
||||
built_from_py = Vector.from_polyglot_array generate_py_array
|
||||
built_from_py . should_equal [1, 2, 3, 4, Nothing]
|
||||
|
||||
Test.specify "should allow creation from arrays without mutability for nested arrays" pending="Polyglot Arrays/Vector rewrite" <|
|
||||
built_from_js = Vector.from_polyglot_array generate_nested_js_array
|
||||
built_from_js . should_equal [[1, 2, 3], [4, 5]]
|
||||
built_from_py = Vector.from_polyglot_array generate_nested_py_array
|
||||
built_from_py . should_equal [[1, 2, 3], [4, 5]]
|
||||
|
||||
Test.specify "should allow accessing elements" <|
|
||||
[1,2,3].at 0 . should_equal 1
|
||||
|
@ -45,6 +45,9 @@ foreign js make_object = """
|
||||
foreign js make_array = """
|
||||
return [{ x: 10}, {x: 20}, {x: 30}];
|
||||
|
||||
foreign js make_simple_array = """
|
||||
return [10, 20, 30];
|
||||
|
||||
foreign js make_str str = """
|
||||
return "foo " + str + " bar"
|
||||
|
||||
@ -60,6 +63,9 @@ foreign js make_false = """
|
||||
foreign js make_double = """
|
||||
return 10.5
|
||||
|
||||
foreign js make_null = """
|
||||
return null;
|
||||
|
||||
foreign js does_not_parse = """
|
||||
return { x
|
||||
|
||||
@ -90,8 +96,10 @@ spec = Test.group "Polyglot JS" <|
|
||||
obj.compare 11 . should_be_true
|
||||
|
||||
Test.specify "should expose array interfaces for JS arrays" <|
|
||||
vec = Vector.Vector make_array
|
||||
vec = Vector.from_polyglot_array make_array
|
||||
vec.map .x . should_equal [10, 20, 30]
|
||||
vec2 = Vector.from_polyglot_array make_simple_array
|
||||
vec2.to_array.at 0 . should_equal 10
|
||||
|
||||
Test.specify "should correctly marshall strings" <|
|
||||
str = make_str "x" + " baz"
|
||||
@ -131,6 +139,10 @@ spec = Test.group "Polyglot JS" <|
|
||||
_ -> False
|
||||
r.should_be_true
|
||||
|
||||
Test.specify "should make JS null values equal to Nothing" <|
|
||||
js_null = make_null
|
||||
js_null . should_equal Nothing
|
||||
|
||||
Test.specify "should make JS numbers type pattern-matchable" <|
|
||||
int_match = case make_int of
|
||||
Integer -> True
|
||||
|
@ -85,7 +85,7 @@ spec =
|
||||
obj.compare 11 . should_be_true
|
||||
|
||||
Test.specify "should expose array interfaces for Python arrays" <|
|
||||
vec = Vector.Vector make_array
|
||||
vec = Vector.from_polyglot_array make_array
|
||||
vec.map .x . should_equal [10, 20, 30]
|
||||
|
||||
Test.specify "should correctly marshall strings" <|
|
||||
@ -140,6 +140,10 @@ spec =
|
||||
Number -> True
|
||||
num_double_match.should_be_true
|
||||
|
||||
Test.specify "should make Python None values equal to Nothing" <|
|
||||
py_null = make_null
|
||||
py_null . should_equal Nothing
|
||||
|
||||
Test.specify "should allow Enso to catch Python exceptions" <|
|
||||
value = My_Type 1 2
|
||||
result = Panic.recover Any <| value.my_throw
|
||||
|
@ -50,6 +50,9 @@ foreign r make_true = """
|
||||
foreign r make_false = """
|
||||
FALSE
|
||||
|
||||
foreign r make_null = """
|
||||
NULL
|
||||
|
||||
spec =
|
||||
pending = if Polyglot.is_language_installed "R" then Nothing else """
|
||||
Can't run R tests, R is not installed.
|
||||
@ -68,7 +71,7 @@ spec =
|
||||
obj.compare 11 . should_be_true
|
||||
|
||||
Test.specify "should expose array interfaces for R arrays" <|
|
||||
vec = Vector.Vector make_array
|
||||
vec = Vector.from_polyglot_array make_array
|
||||
vec.map .x . should_equal [10, 20, 30]
|
||||
|
||||
Test.specify "should correctly marshall strings" <|
|
||||
@ -123,6 +126,10 @@ spec =
|
||||
Number -> True
|
||||
num_double_match.should_be_true
|
||||
|
||||
Test.specify "should make R null objects equal to Nothing" <|
|
||||
r_null = make_null
|
||||
r_null . should_equal Nothing
|
||||
|
||||
Test.specify "should allow Enso to catch R exceptions" <|
|
||||
value = My_Type 1 2
|
||||
result = Panic.recover Any <| value.my_throw
|
||||
|
@ -53,7 +53,7 @@ visualization_spec connection =
|
||||
|
||||
g = t.aggregate [Group_By "A", Group_By "B", Average "C"] . at "Average C"
|
||||
vis2 = Visualization.prepare_visualization g 1
|
||||
json2 = make_json header=["Average C"] data=[[4.0]] all_rows=2 ixes_header=[] ixes=[]
|
||||
json2 = make_json header=["Average C"] data=[[4]] all_rows=2 ixes_header=[] ixes=[]
|
||||
vis2 . should_equal json2
|
||||
|
||||
t2 = Dataframe_Table.new [["A", [1, 2, 3]], ["B", [4, 5, 6]], ["C", [7, 8, 9]]]
|
||||
|
Loading…
Reference in New Issue
Block a user