mirror of
https://github.com/enso-org/enso.git
synced 2024-12-22 18:01:38 +03:00
Rename Map
to Dictionary
and Set
to Hashset
. (#10474)
- Rename `Map` to `Dictionary`. - Rename `Set` to `Hashset`. - Add a deprecated place holder for the static method of `Map`.
This commit is contained in:
parent
b2c4559678
commit
4b3e4ae15e
@ -14,11 +14,13 @@
|
|||||||
`Location.Right`.][10445]
|
`Location.Right`.][10445]
|
||||||
- [Renamed `Postgres_Details.Postgres` to `Postgres.Server`.][10466]
|
- [Renamed `Postgres_Details.Postgres` to `Postgres.Server`.][10466]
|
||||||
- [Remove `First` and `Last` from namespace, use auto-scoped.][10467]
|
- [Remove `First` and `Last` from namespace, use auto-scoped.][10467]
|
||||||
|
- [Rename `Map` to `Dictionary` and `Set` to `Hashset`.][10474]
|
||||||
|
|
||||||
[10434]: https://github.com/enso-org/enso/pull/10434
|
[10434]: https://github.com/enso-org/enso/pull/10434
|
||||||
[10445]: https://github.com/enso-org/enso/pull/10445
|
[10445]: https://github.com/enso-org/enso/pull/10445
|
||||||
[10466]: https://github.com/enso-org/enso/pull/10466
|
[10466]: https://github.com/enso-org/enso/pull/10466
|
||||||
[10467]: https://github.com/enso-org/enso/pull/10467
|
[10467]: https://github.com/enso-org/enso/pull/10467
|
||||||
|
[10474]: https://github.com/enso-org/enso/pull/10474
|
||||||
|
|
||||||
# Enso 2024.2
|
# Enso 2024.2
|
||||||
|
|
||||||
|
@ -96,11 +96,11 @@ read_bucket bucket prefix="" credentials:AWS_Credential=AWS_Credential.Default d
|
|||||||
- key: the key of the object.
|
- key: the key of the object.
|
||||||
- credentials: AWS credentials. If not provided, the default credentials will
|
- credentials: AWS credentials. If not provided, the default credentials will
|
||||||
be used.
|
be used.
|
||||||
head : Text -> Text -> AWS_Credential -> Map Text Any ! S3_Error
|
head : Text -> Text -> AWS_Credential -> Dictionary Text Any ! S3_Error
|
||||||
head bucket key="" credentials:AWS_Credential=AWS_Credential.Default =
|
head bucket key="" credentials:AWS_Credential=AWS_Credential.Default =
|
||||||
response = raw_head bucket key credentials
|
response = raw_head bucket key credentials
|
||||||
pairs = response.sdkFields.map f-> [f.memberName, f.getValueOrDefault response]
|
pairs = response.sdkFields.map f-> [f.memberName, f.getValueOrDefault response]
|
||||||
Map.from_vector pairs
|
Dictionary.from_vector pairs
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
Gets the raw metadata of a bucket or object.
|
Gets the raw metadata of a bucket or object.
|
||||||
@ -109,7 +109,7 @@ head bucket key="" credentials:AWS_Credential=AWS_Credential.Default =
|
|||||||
- bucket: the name of the bucket.
|
- bucket: the name of the bucket.
|
||||||
- key: the key of the object.
|
- key: the key of the object.
|
||||||
- credentials: AWS credentials.
|
- credentials: AWS credentials.
|
||||||
raw_head : Text -> Text -> AWS_Credential -> Map Text Any ! S3_Error
|
raw_head : Text -> Text -> AWS_Credential -> Dictionary Text Any ! S3_Error
|
||||||
raw_head bucket key credentials =
|
raw_head bucket key credentials =
|
||||||
client = make_client_for_bucket bucket credentials
|
client = make_client_for_bucket bucket credentials
|
||||||
case key == "" of
|
case key == "" of
|
||||||
|
@ -392,8 +392,8 @@ type Any
|
|||||||
from Standard.Examples import Example_Error_Type
|
from Standard.Examples import Example_Error_Type
|
||||||
|
|
||||||
example_map_error =
|
example_map_error =
|
||||||
my_map = Map.empty
|
my_dictionary = Dictionary.empty
|
||||||
error = my_map.at "x"
|
error = my_dictionary.at "x"
|
||||||
error.map_error (_ -> Example_Error_Type "x is missing")
|
error.map_error (_ -> Example_Error_Type "x is missing")
|
||||||
map_error : (Error -> Error) -> Any
|
map_error : (Error -> Error) -> Any
|
||||||
map_error self ~f =
|
map_error self ~f =
|
||||||
|
@ -310,7 +310,7 @@ fetch (uri:(URI | Text)) (method:HTTP_Method=HTTP_Method.Get) (headers:(Vector (
|
|||||||
|
|
||||||
import Standard.Base.Data
|
import Standard.Base.Data
|
||||||
test_file = enso_project.data / "sample.png"
|
test_file = enso_project.data / "sample.png"
|
||||||
form_data = Map.from_vector [["key", "val"], ["a_file", test_file]]
|
form_data = Dictionary.from_vector [["key", "val"], ["a_file", test_file]]
|
||||||
response = Data.post url_post (Request_Body.Form_Data form_data)
|
response = Data.post url_post (Request_Body.Form_Data form_data)
|
||||||
|
|
||||||
> Example
|
> Example
|
||||||
@ -318,7 +318,7 @@ fetch (uri:(URI | Text)) (method:HTTP_Method=HTTP_Method.Get) (headers:(Vector (
|
|||||||
|
|
||||||
import Standard.Base.Data
|
import Standard.Base.Data
|
||||||
test_file = enso_project.data / "sample.txt"
|
test_file = enso_project.data / "sample.txt"
|
||||||
form_data = Map.from_vector [["key", "val"], ["a_file", test_file]]
|
form_data = Dictionary.from_vector [["key", "val"], ["a_file", test_file]]
|
||||||
response = Data.post url_post (Request_Body.Form_Data form_data url_encoded=True)
|
response = Data.post url_post (Request_Body.Form_Data form_data url_encoded=True)
|
||||||
@uri Text_Input
|
@uri Text_Input
|
||||||
@response_format Data_Read_Helpers.format_widget_with_raw_response
|
@response_format Data_Read_Helpers.format_widget_with_raw_response
|
||||||
|
@ -0,0 +1,425 @@
|
|||||||
|
import project.Any.Any
|
||||||
|
import project.Data.Numbers.Integer
|
||||||
|
import project.Data.Pair.Pair
|
||||||
|
import project.Data.Text.Text
|
||||||
|
import project.Data.Vector.Vector
|
||||||
|
import project.Error.Error
|
||||||
|
import project.Errors.Illegal_Argument.Illegal_Argument
|
||||||
|
import project.Errors.No_Such_Key.No_Such_Key
|
||||||
|
import project.Nothing.Nothing
|
||||||
|
import project.Panic.Panic
|
||||||
|
from project.Data.Boolean import Boolean, False, True
|
||||||
|
from project.Data.Text.Extensions import all
|
||||||
|
|
||||||
|
## A key-value store. It is possible to use any type as keys and values and mix
|
||||||
|
them in one Dictionary. Keys are checked for equality based on their hash
|
||||||
|
code and `==` operator, which is both an internal part of Enso. Enso is
|
||||||
|
capable of computing a hash code, and checking for equality any objects that
|
||||||
|
can appear in Enso - primitives, Atoms, values coming from different
|
||||||
|
languages, etc.
|
||||||
|
|
||||||
|
For keys that are not reflexive, like `Number.nan`,
|
||||||
|
[Same Value equality specification](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Equality_comparisons_and_sameness#same-value-zero_equality)
|
||||||
|
is used. This means that both `Number.nan` and types with comparators that
|
||||||
|
violate reflexivity (e.g. their `compare` method always returns `Nothing`)
|
||||||
|
can be used as keys in the Dictionary.
|
||||||
|
|
||||||
|
A single key-value pair is called an *entry*.
|
||||||
|
|
||||||
|
It is possible to pass a Dictionary created in Enso to foreign functions,
|
||||||
|
where it will be treated as appropriate map structures - in Python that is a
|
||||||
|
dictionary, and in JavaScript, it is a `Map`. And likewise, it is possible
|
||||||
|
to pass a foreign map into Enso, where it will be treated as a Dictionary.
|
||||||
|
@Builtin_Type
|
||||||
|
type Dictionary key value
|
||||||
|
## PRIVATE
|
||||||
|
ADVANCED
|
||||||
|
Returns an empty dictionary.
|
||||||
|
empty : Dictionary
|
||||||
|
empty = @Builtin_Method "Dictionary.empty"
|
||||||
|
|
||||||
|
## PRIVATE
|
||||||
|
ADVANCED
|
||||||
|
Returns a single-element dictionary with the given key and value.
|
||||||
|
A Call to `Dictionary.singleton key value` is the same as a call to
|
||||||
|
`Dictionary.empty.insert key value`.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
- key: The key to to use for `value` in the dictionary.
|
||||||
|
- value: The value to store under 'key' in the dictionary.
|
||||||
|
|
||||||
|
> Example
|
||||||
|
Create a single element dictionary storing the key "my_key" and the
|
||||||
|
value 2.
|
||||||
|
|
||||||
|
example_singleton = Dictionary.singleton "my_key" 2
|
||||||
|
singleton : Any -> Any -> Dictionary
|
||||||
|
singleton key value = Dictionary.empty.insert key value
|
||||||
|
|
||||||
|
## ALIAS dictionary, lookup table
|
||||||
|
GROUP Constants
|
||||||
|
ICON convert
|
||||||
|
Builds a dictionary from two Vectors. The first vector contains the keys,
|
||||||
|
and the second vector contains the values. The two vectors must be of the
|
||||||
|
same length.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
- keys: A vector of keys.
|
||||||
|
- values: A vector of values.
|
||||||
|
- error_on_duplicates: A flag which specifies if duplicate keys on the
|
||||||
|
input vector should result in an error. By default, set to `True`,
|
||||||
|
meaning that if two entries in the vector share the same key, an
|
||||||
|
`Illegal_Argument` error is raised. If set to `False`, the last entry
|
||||||
|
with a given key will be kept.
|
||||||
|
from_keys_and_values : Vector Any -> Vector Any -> Boolean -> Dictionary ! Illegal_Argument
|
||||||
|
from_keys_and_values keys:Vector values:Vector error_on_duplicates:Boolean=True =
|
||||||
|
if keys.length != values.length then Error.throw (Illegal_Argument.Error "`Dictionary.from_keys_and_values` encountered two vectors of different lengths.") else
|
||||||
|
keys.fold_with_index Dictionary.empty current-> idx-> key->
|
||||||
|
if error_on_duplicates.not || (current.contains_key key . not) then current.insert key (values.at idx) else
|
||||||
|
Error.throw (Illegal_Argument.Error "`Dictionary.from_keys_and_values` encountered duplicate key: "+key.to_display_text)
|
||||||
|
|
||||||
|
## ALIAS dictionary, lookup table
|
||||||
|
GROUP Constants
|
||||||
|
ICON convert
|
||||||
|
Builds a dictionary from a vector of key-value pairs, with each key-value
|
||||||
|
pair represented as a 2 element vector.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
- vec: A vector of key-value pairs (2 element vectors).
|
||||||
|
- error_on_duplicates: A flag which specifies if duplicate keys on the
|
||||||
|
input vector should result in an error. By default, set to `True`,
|
||||||
|
meaning that if two entries in the vector share the same key, an
|
||||||
|
`Illegal_Argument` error is raised. If set to `False`, the last entry
|
||||||
|
with a given key will be kept.
|
||||||
|
|
||||||
|
> Example
|
||||||
|
Building a dictionary containing two key-value pairs.
|
||||||
|
|
||||||
|
example_from_vector = Dictionary.from_vector [["A", 1], ["B", 2]]
|
||||||
|
from_vector : Vector Any -> Boolean -> Dictionary ! Illegal_Argument
|
||||||
|
from_vector vec error_on_duplicates=True =
|
||||||
|
vec.fold Dictionary.empty m-> el-> if el.length != 2 then Error.throw (Illegal_Argument.Error "`Dictionary.from_vector` encountered an invalid value. Each value in the vector has to be a key-value pair - it must have exactly 2 elements.") else
|
||||||
|
key = el.at 0
|
||||||
|
value = el.at 1
|
||||||
|
if error_on_duplicates.not || (m.contains_key key . not) then m.insert key value else
|
||||||
|
Error.throw (Illegal_Argument.Error "`Dictionary.from_vector` encountered duplicate key: "+key.to_display_text)
|
||||||
|
|
||||||
|
## GROUP Logical
|
||||||
|
ICON metadata
|
||||||
|
Returns True if the Dictionary is empty, i.e. does not have any entries.
|
||||||
|
is_empty : Boolean
|
||||||
|
is_empty self = self.size == 0
|
||||||
|
|
||||||
|
## GROUP Logical
|
||||||
|
ICON metadata
|
||||||
|
Returns True if the Dictionary is not empty, i.e. has at least one entry.
|
||||||
|
not_empty : Boolean
|
||||||
|
not_empty self = self.is_empty.not
|
||||||
|
|
||||||
|
## GROUP Metadata
|
||||||
|
ICON metadata
|
||||||
|
Returns the number of entries in this dictionary.
|
||||||
|
size : Integer
|
||||||
|
size self = @Builtin_Method "Dictionary.size"
|
||||||
|
|
||||||
|
## GROUP Metadata
|
||||||
|
ICON metadata
|
||||||
|
Returns the number of entries in this dictionary.
|
||||||
|
length : Integer
|
||||||
|
length self = self.size
|
||||||
|
|
||||||
|
## GROUP Calculations
|
||||||
|
ICON row_add
|
||||||
|
Inserts a key-value mapping into this dictionary, overriding any existing
|
||||||
|
instance of `key` with the new `value`.
|
||||||
|
|
||||||
|
Note that since the return type is also a `Dictionary`, multiple `insert`
|
||||||
|
calls can be chained, e.g., `dictionary.insert "A" 1 . insert "B" 2`.
|
||||||
|
|
||||||
|
Due to the limitation of the current implementation, inserts with a
|
||||||
|
key that is already contained in the dictionary, or insert on a
|
||||||
|
dictionary instance that is re-used in other computations, have a linear
|
||||||
|
time complexity. For all the other cases, the time complexity of this
|
||||||
|
method is constant.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
- key: The key to insert the value for.
|
||||||
|
- value: The value to associate with the `key`.
|
||||||
|
|
||||||
|
> Example
|
||||||
|
Insert the value "seven" into the dictionary for the key 7.
|
||||||
|
|
||||||
|
import Standard.Examples
|
||||||
|
|
||||||
|
example_insert = Examples.dictionary.insert 7 "seven"
|
||||||
|
insert : Any -> Any -> Dictionary
|
||||||
|
insert self key value = @Builtin_Method "Dictionary.insert"
|
||||||
|
|
||||||
|
## GROUP Selections
|
||||||
|
ICON table_clean
|
||||||
|
Removes an entry specified by the given key from this dictionary, and
|
||||||
|
returns a new dictionary without this entry. Throw `No_Such_Key.Error` if
|
||||||
|
`key` is not present.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
- key: The key to look up in the dictionary.
|
||||||
|
|
||||||
|
> Example
|
||||||
|
Remove key "A" from a dictionary
|
||||||
|
|
||||||
|
import Standard.Examples
|
||||||
|
|
||||||
|
Examples.dictionary.remove "A"
|
||||||
|
remove : Any -> Dictionary ! No_Such_Key
|
||||||
|
remove self key =
|
||||||
|
Panic.catch Any (self.remove_builtin key) _->
|
||||||
|
Error.throw (No_Such_Key.Error self key)
|
||||||
|
|
||||||
|
## GROUP Selections
|
||||||
|
ICON parse3
|
||||||
|
Gets the value associated with `key` in this dictionary, or throws a
|
||||||
|
`No_Such_Key.Error` if `key` is not present.
|
||||||
|
|
||||||
|
This method has a constant time complexity.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
- key: The key to look up in the dictionary.
|
||||||
|
|
||||||
|
> Example
|
||||||
|
Looks up the value for the key "A" in a dictionary.
|
||||||
|
|
||||||
|
import Standard.Examples
|
||||||
|
|
||||||
|
example_at = Examples.dictionary.at "A"
|
||||||
|
at : Any -> Any ! No_Such_Key
|
||||||
|
at self key = self.get key (Error.throw (No_Such_Key.Error self key))
|
||||||
|
|
||||||
|
## ICON parse3
|
||||||
|
Gets the value associated with `key` in this dictionary, or returns
|
||||||
|
`if_missing` if it isn't present.
|
||||||
|
|
||||||
|
This method has a constant time complexity.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
- key: The key to look up in the dictionary.
|
||||||
|
- if_missing: The value to use if the key isn't present.
|
||||||
|
|
||||||
|
> Example
|
||||||
|
Get the value for the key 2 in a dictionary or instead return "zero" if it
|
||||||
|
isn't present.
|
||||||
|
|
||||||
|
import Standard.Examples
|
||||||
|
|
||||||
|
example_get = Examples.dictionary.get 2 "zero"
|
||||||
|
get : Any -> Any -> Any
|
||||||
|
get self key ~if_missing=Nothing = self.get_builtin key if_missing
|
||||||
|
|
||||||
|
## GROUP Logical
|
||||||
|
ICON preparation
|
||||||
|
Returns True iff the Dictionary contains the given `key`.
|
||||||
|
contains_key : Any -> Boolean
|
||||||
|
contains_key self key = @Builtin_Method "Dictionary.contains_key"
|
||||||
|
|
||||||
|
## GROUP Selections
|
||||||
|
ICON select_column
|
||||||
|
Returns an unsorted vector of all the keys in this Dictionary.
|
||||||
|
keys : Vector Any
|
||||||
|
keys self = self.to_vector.map pair-> pair.at 0
|
||||||
|
|
||||||
|
## GROUP Selections
|
||||||
|
ICON select_column
|
||||||
|
Returns an unsorted vector of all the values in this Dictionary.
|
||||||
|
values : Vector Any
|
||||||
|
values self = self.to_vector.map pair-> pair.at 1
|
||||||
|
|
||||||
|
## ICON column_add
|
||||||
|
Maps a function over each value in this dictionary.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
- function: The function to apply to each value in the dictionary, taking
|
||||||
|
a value and returning a value.
|
||||||
|
|
||||||
|
> Example
|
||||||
|
Append "_word" to all values in the dictionary.
|
||||||
|
|
||||||
|
import Standard.Examples
|
||||||
|
|
||||||
|
example_map = Examples.dictionary.map (+ "_word")
|
||||||
|
map : (Any -> Any) -> Dictionary
|
||||||
|
map self function =
|
||||||
|
kv_func = _ -> function
|
||||||
|
self.map_with_key kv_func
|
||||||
|
|
||||||
|
## ICON column_add
|
||||||
|
Maps a function over each key-value pair in the dictionary, transforming
|
||||||
|
the value.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
- function: Function to apply to each key and value in the dictionary,
|
||||||
|
taking a key and a value and returning a value.
|
||||||
|
|
||||||
|
> Example
|
||||||
|
Prepend the keys to the values in the dictionary.
|
||||||
|
|
||||||
|
import Standard.Examples
|
||||||
|
|
||||||
|
example_map_with_key =
|
||||||
|
Examples.dictionary.map_with_key (k -> v -> k.to_text + "-" + v)
|
||||||
|
map_with_key : (Any -> Any -> Any) -> Dictionary
|
||||||
|
map_with_key self function =
|
||||||
|
Dictionary.from_vector <| self.to_vector.map pair->
|
||||||
|
key = pair.first
|
||||||
|
value = pair.last
|
||||||
|
[key, (function key value)]
|
||||||
|
|
||||||
|
## ICON column_add
|
||||||
|
Maps a function over each key in this dictionary.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
- function: The function to apply to each key in the dictionary, taking a
|
||||||
|
key and returning a new key.
|
||||||
|
|
||||||
|
> Example
|
||||||
|
Doubling all keys in the dictionary.
|
||||||
|
|
||||||
|
import Standard.Examples
|
||||||
|
|
||||||
|
example_map_keys = Examples.dictionary.map_keys *2
|
||||||
|
map_keys : (Any -> Any) -> Dictionary
|
||||||
|
map_keys self function =
|
||||||
|
trans_function = k -> v -> [function k, v]
|
||||||
|
self.transform trans_function
|
||||||
|
|
||||||
|
## ICON column_add
|
||||||
|
Transforms the map's keys and values to create a new dictionary.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
- function: The function used to transform the dictionary, taking a key
|
||||||
|
and a value and returning a pair of `[key, value]`.
|
||||||
|
|
||||||
|
! Error Conditions
|
||||||
|
- If multiple dictionary entries end up with duplicate keys after the
|
||||||
|
transformation, an `Illegal_Argument.Error` is thrown.
|
||||||
|
|
||||||
|
> Example
|
||||||
|
Turn all keys into `Text` and append "_word" to the values in the
|
||||||
|
dictionary.
|
||||||
|
|
||||||
|
import Standard.Examples
|
||||||
|
|
||||||
|
example_transform =
|
||||||
|
Examples.dictionary.transform (k -> v -> [k.to_text, v + "_word"])
|
||||||
|
transform : (Any -> Any -> [Any, Any]) -> Dictionary
|
||||||
|
transform self function =
|
||||||
|
func_pairs = p -> function (p.at 0) (p.at 1)
|
||||||
|
vec_transformed = self.to_vector.map func_pairs
|
||||||
|
new_dictionary = Dictionary.from_vector vec_transformed error_on_duplicates=True
|
||||||
|
new_dictionary.catch Illegal_Argument error->
|
||||||
|
case error.message.starts_with "`Dictionary.from_vector` encountered duplicate key" of
|
||||||
|
True ->
|
||||||
|
new_message = error.message.replace "from_vector" "transform"
|
||||||
|
Error.throw (Illegal_Argument.Error new_message error.cause)
|
||||||
|
False -> new_dictionary
|
||||||
|
|
||||||
|
## ICON transform4
|
||||||
|
Combines the values in the dictionary.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
- init: The initial value for the fold.
|
||||||
|
- function: A binary function to apply to pairs of values.
|
||||||
|
|
||||||
|
> Example
|
||||||
|
Find the length of the longest word in the dictionary.
|
||||||
|
|
||||||
|
import Standard.Examples
|
||||||
|
|
||||||
|
example_fold = Examples.dictionary.fold 0 (l -> r -> l.max r.length)
|
||||||
|
fold : Any -> (Any -> Any -> Any) -> Any
|
||||||
|
fold self init function = self.values.fold init function
|
||||||
|
|
||||||
|
## ICON transform4
|
||||||
|
Combines the key-value pairs in the dictionary.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
- init: The initial value for the fold.
|
||||||
|
- function: A function taking the left value, the current key, and the
|
||||||
|
current value, and combining them to yield a single value.
|
||||||
|
|
||||||
|
> Example
|
||||||
|
Glue the values in the dictionary together with the keys.
|
||||||
|
|
||||||
|
import Standard.Examples
|
||||||
|
|
||||||
|
example_fold_with_key =
|
||||||
|
Examples.dictionary.fold_with_key "" (l -> k -> v -> l + k.to_text + v)
|
||||||
|
fold_with_key : Any -> (Any -> Any -> Any -> Any) -> Any
|
||||||
|
fold_with_key self init function =
|
||||||
|
self.to_vector.fold init acc-> pair->
|
||||||
|
function acc pair.first pair.last
|
||||||
|
|
||||||
|
## PRIVATE
|
||||||
|
ADVANCED
|
||||||
|
Applies a function to each value in the dictionary.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
- function: The function to apply to each value in the dictionary, taking
|
||||||
|
a value and returning anything.
|
||||||
|
|
||||||
|
This method does not return the results, so it is only useful for performing
|
||||||
|
computations with side-effects.
|
||||||
|
|
||||||
|
If the function returns a dataflow error, the error is converted to a
|
||||||
|
panic and thrown immediately stopping further processing.
|
||||||
|
|
||||||
|
> Example
|
||||||
|
Printing each value in the dictionary.
|
||||||
|
|
||||||
|
import Standard.Examples
|
||||||
|
|
||||||
|
example_each = Examples.dictionary.each IO.println
|
||||||
|
each : (Any -> Any) -> Nothing
|
||||||
|
each self function =
|
||||||
|
kv_func = _ -> function
|
||||||
|
self.each_with_key kv_func
|
||||||
|
|
||||||
|
## PRIVATE
|
||||||
|
ADVANCED
|
||||||
|
Applies a function to each key-value pair in the dictionary.
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
- function: The function to apply to each key-value pair in the
|
||||||
|
dictionary, taking a key and a value and returning anything.
|
||||||
|
|
||||||
|
This method does not return the results, so it is only useful for performing
|
||||||
|
computations with side-effects.
|
||||||
|
|
||||||
|
> Example
|
||||||
|
Printing each key and value in the dictionary.
|
||||||
|
|
||||||
|
import Standard.Examples
|
||||||
|
|
||||||
|
example_each_with_key = Examples.dictionary.each_with_key k->v->
|
||||||
|
IO.println k
|
||||||
|
IO.println v
|
||||||
|
each_with_key : (Any -> Any -> Any) -> Nothing
|
||||||
|
each_with_key self function =
|
||||||
|
self.to_vector.each pair->
|
||||||
|
function pair.first pair.last
|
||||||
|
|
||||||
|
## GROUP Conversions
|
||||||
|
ICON convert
|
||||||
|
Returns an unsorted vector of key-value pairs (nested 2 element vectors).
|
||||||
|
`Dictionary.from_vector` method is an inverse method, so the following
|
||||||
|
expression is true for all dictionaries:
|
||||||
|
`Dictionary.from_vector dictionary.to_vector == dictionary`.
|
||||||
|
to_vector : Vector Any
|
||||||
|
to_vector self = @Builtin_Method "Dictionary.to_vector"
|
||||||
|
|
||||||
|
## PRIVATE
|
||||||
|
Returns a text representation of this Dictionary.
|
||||||
|
to_text : Text
|
||||||
|
to_text self = @Builtin_Method "Dictionary.to_text"
|
||||||
|
|
||||||
|
## PRIVATE
|
||||||
|
get_builtin : Any -> Any -> Any
|
||||||
|
get_builtin self key ~if_missing = @Builtin_Method "Dictionary.get_builtin"
|
@ -1,7 +1,7 @@
|
|||||||
import project.Any.Any
|
import project.Any.Any
|
||||||
|
import project.Data.Hashset.Hashset
|
||||||
import project.Data.Locale.Locale
|
import project.Data.Locale.Locale
|
||||||
import project.Data.Numbers.Number
|
import project.Data.Numbers.Number
|
||||||
import project.Data.Set.Set
|
|
||||||
import project.Data.Text.Case_Sensitivity.Case_Sensitivity
|
import project.Data.Text.Case_Sensitivity.Case_Sensitivity
|
||||||
import project.Data.Text.Regex.Regex
|
import project.Data.Text.Regex.Regex
|
||||||
import project.Data.Text.Text
|
import project.Data.Text.Text
|
||||||
@ -198,9 +198,7 @@ type Filter_Condition
|
|||||||
Like sql_pattern _ ->
|
Like sql_pattern _ ->
|
||||||
regex = sql_like_to_regex sql_pattern
|
regex = sql_like_to_regex sql_pattern
|
||||||
handle_nothing <| regex.matches
|
handle_nothing <| regex.matches
|
||||||
Is_In values _ ->
|
Is_In values _ -> Hashset.from_vector values . contains
|
||||||
set = Set.from_vector values
|
|
||||||
set.contains
|
|
||||||
if self.action == Filter_Action.Keep then base else v -> (base v).not
|
if self.action == Filter_Action.Keep then base else v -> (base v).not
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
import project.Any.Any
|
import project.Any.Any
|
||||||
import project.Data.Array_Proxy.Array_Proxy
|
import project.Data.Array_Proxy.Array_Proxy
|
||||||
import project.Data.Map.Map
|
import project.Data.Dictionary.Dictionary
|
||||||
import project.Data.Numbers.Integer
|
import project.Data.Numbers.Integer
|
||||||
import project.Data.Ordering.Comparable
|
import project.Data.Ordering.Comparable
|
||||||
import project.Data.Ordering.Ordering
|
import project.Data.Ordering.Ordering
|
||||||
@ -13,9 +13,9 @@ from project.Data.Boolean import Boolean, False, True
|
|||||||
from project.Data.Text.Extensions import all
|
from project.Data.Text.Extensions import all
|
||||||
|
|
||||||
## An unordered collection of unique values.
|
## An unordered collection of unique values.
|
||||||
type Set
|
type Hashset
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
Value (underlying_map : Map Any Nothing)
|
Value (underlying_dictionary : Dictionary Any Nothing)
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
ADVANCED
|
ADVANCED
|
||||||
@ -28,30 +28,30 @@ type Set
|
|||||||
occurrence of each duplicated element is retained in the set. If set to
|
occurrence of each duplicated element is retained in the set. If set to
|
||||||
`True` it will raise an `Illegal_Argument` if duplicate elements are
|
`True` it will raise an `Illegal_Argument` if duplicate elements are
|
||||||
encountered.
|
encountered.
|
||||||
from_vector : Vector Any -> Boolean -> Set ! Illegal_Argument
|
from_vector : Vector Any -> Boolean -> Hashset ! Illegal_Argument
|
||||||
from_vector (vector : Vector) (error_on_duplicates : Boolean = False) =
|
from_vector (vector : Vector) (error_on_duplicates : Boolean = False) =
|
||||||
pairs_array = Array_Proxy.new vector.length (i-> [vector.at i, Nothing])
|
pairs_array = Array_Proxy.new vector.length (i-> [vector.at i, Nothing])
|
||||||
pairs = Vector.from_polyglot_array pairs_array
|
pairs = Vector.from_polyglot_array pairs_array
|
||||||
map = Map.from_vector pairs error_on_duplicates=error_on_duplicates
|
dictionary = Dictionary.from_vector pairs error_on_duplicates=error_on_duplicates
|
||||||
Set.Value map
|
Hashset.Value dictionary
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
ADVANCED
|
ADVANCED
|
||||||
Constructs an empty set.
|
Constructs an empty set.
|
||||||
empty : Set
|
empty : Hashset
|
||||||
empty = Set.Value Map.empty
|
empty = Hashset.Value Dictionary.empty
|
||||||
|
|
||||||
## GROUP Conversions
|
## GROUP Conversions
|
||||||
ICON convert
|
ICON convert
|
||||||
Returns a vector containing all elements of this set.
|
Returns a vector containing all elements of this set.
|
||||||
to_vector : Vector
|
to_vector : Vector
|
||||||
to_vector self = self.underlying_map.keys
|
to_vector self = self.underlying_dictionary.keys
|
||||||
|
|
||||||
## GROUP Metadata
|
## GROUP Metadata
|
||||||
ICON metadata
|
ICON metadata
|
||||||
Returns the number of elements in this set.
|
Returns the number of elements in this set.
|
||||||
size : Integer
|
size : Integer
|
||||||
size self = self.underlying_map.size
|
size self = self.underlying_dictionary.size
|
||||||
|
|
||||||
## GROUP Metadata
|
## GROUP Metadata
|
||||||
ICON metadata
|
ICON metadata
|
||||||
@ -63,19 +63,19 @@ type Set
|
|||||||
ICON metadata
|
ICON metadata
|
||||||
Checks if the set is empty.
|
Checks if the set is empty.
|
||||||
is_empty : Boolean
|
is_empty : Boolean
|
||||||
is_empty self = self.underlying_map.is_empty
|
is_empty self = self.underlying_dictionary.is_empty
|
||||||
|
|
||||||
## GROUP Logical
|
## GROUP Logical
|
||||||
ICON metadata
|
ICON metadata
|
||||||
Checks if the set is not empty.
|
Checks if the set is not empty.
|
||||||
not_empty : Boolean
|
not_empty : Boolean
|
||||||
not_empty self = self.underlying_map.not_empty
|
not_empty self = self.underlying_dictionary.not_empty
|
||||||
|
|
||||||
## GROUP Logical
|
## GROUP Logical
|
||||||
ICON preparation
|
ICON preparation
|
||||||
Checks if this set contains a given value.
|
Checks if this set contains a given value.
|
||||||
contains : Any -> Boolean
|
contains : Any -> Boolean
|
||||||
contains self value = self.underlying_map.contains_key value
|
contains self value = self.underlying_dictionary.contains_key value
|
||||||
|
|
||||||
## GROUP Logical
|
## GROUP Logical
|
||||||
ICON preparation
|
ICON preparation
|
||||||
@ -103,48 +103,48 @@ type Set
|
|||||||
GROUP Calculations
|
GROUP Calculations
|
||||||
ICON row_add
|
ICON row_add
|
||||||
Adds a value to this set.
|
Adds a value to this set.
|
||||||
insert : Any -> Set
|
insert : Any -> Hashset
|
||||||
insert self value =
|
insert self value =
|
||||||
new_map = self.underlying_map.insert value Nothing
|
dictionary = self.underlying_dictionary.insert value Nothing
|
||||||
Set.Value new_map
|
Hashset.Value dictionary
|
||||||
|
|
||||||
## GROUP Calculations
|
## GROUP Calculations
|
||||||
ICON union
|
ICON union
|
||||||
Creates a union of the two sets.
|
Creates a union of the two sets.
|
||||||
union : Set -> Set
|
union : Hashset -> Hashset
|
||||||
union self (other : Set) =
|
union self (other : Hashset) =
|
||||||
start_map = self.underlying_map
|
start_dictionary = self.underlying_dictionary
|
||||||
new_map = other.to_vector.fold start_map m-> el-> m.insert el Nothing
|
dictionary = other.to_vector.fold start_dictionary m-> el-> m.insert el Nothing
|
||||||
Set.Value new_map
|
Hashset.Value dictionary
|
||||||
|
|
||||||
## GROUP Calculations
|
## GROUP Calculations
|
||||||
ICON join
|
ICON join
|
||||||
Creates an intersection of the two sets.
|
Creates an intersection of the two sets.
|
||||||
intersection : Set -> Set
|
intersection : Hashset -> Hashset
|
||||||
intersection self (other : Set) =
|
intersection self (other : Hashset) =
|
||||||
other_map = other.underlying_map
|
other_dictionary = other.underlying_dictionary
|
||||||
new_map = self.underlying_map.keys.fold Map.empty m-> el->
|
dictionary = self.underlying_dictionary.keys.fold Dictionary.empty m-> el->
|
||||||
if other_map.contains_key el then m.insert el Nothing else m
|
if other_dictionary.contains_key el then m.insert el Nothing else m
|
||||||
Set.Value new_map
|
Hashset.Value dictionary
|
||||||
|
|
||||||
## ICON join
|
## ICON join
|
||||||
Computes a set difference.
|
Computes a set difference.
|
||||||
|
|
||||||
Returns the set that contains all elements of this set that are not in
|
Returns the set that contains all elements of this set that are not in
|
||||||
the other set.
|
the other set.
|
||||||
difference : Set -> Set
|
difference : Hashset -> Hashset
|
||||||
difference self (other : Set) =
|
difference self (other : Hashset) =
|
||||||
other_map = other.underlying_map
|
other_dictionary = other.underlying_dictionary
|
||||||
new_map = self.underlying_map.keys.fold Map.empty m-> el->
|
dictionary = self.underlying_dictionary.keys.fold Dictionary.empty m-> el->
|
||||||
if other_map.contains_key el then m else m.insert el Nothing
|
if other_dictionary.contains_key el then m else m.insert el Nothing
|
||||||
Set.Value new_map
|
Hashset.Value dictionary
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
to_text : Text
|
to_text : Text
|
||||||
to_text self = self.to_vector.map .pretty . join ", " "Set{" "}"
|
to_text self = self.to_vector.map .pretty . join ", " "Hashset{" "}"
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
type Set_Comparator
|
type Hashset_Comparator
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
compare x y =
|
compare x y =
|
||||||
if x.size != y.size then Nothing else
|
if x.size != y.size then Nothing else
|
@ -2,7 +2,7 @@ import project.Any.Any
|
|||||||
import project.Data.Array.Array
|
import project.Data.Array.Array
|
||||||
import project.Data.Array_Proxy.Array_Proxy
|
import project.Data.Array_Proxy.Array_Proxy
|
||||||
import project.Data.Decimal.Decimal
|
import project.Data.Decimal.Decimal
|
||||||
import project.Data.Map.Map
|
import project.Data.Dictionary.Dictionary
|
||||||
import project.Data.Numbers.Float
|
import project.Data.Numbers.Float
|
||||||
import project.Data.Numbers.Integer
|
import project.Data.Numbers.Integer
|
||||||
import project.Data.Numbers.Number
|
import project.Data.Numbers.Number
|
||||||
@ -252,13 +252,13 @@ type JS_Object
|
|||||||
|
|
||||||
## GROUP Logical
|
## GROUP Logical
|
||||||
ICON metadata
|
ICON metadata
|
||||||
Returns True iff the Map is empty, i.e., does not have any entries.
|
Returns True if the JS_Object is empty, i.e., does not have any entries.
|
||||||
is_empty : Boolean
|
is_empty : Boolean
|
||||||
is_empty self = self.length == 0
|
is_empty self = self.length == 0
|
||||||
|
|
||||||
## GROUP Logical
|
## GROUP Logical
|
||||||
ICON metadata
|
ICON metadata
|
||||||
Returns True iff the Map is not empty, i.e., has at least one entry.
|
Returns True if the JS_Object is not empty, i.e., has at least one entry.
|
||||||
not_empty : Boolean
|
not_empty : Boolean
|
||||||
not_empty self = self.is_empty.not
|
not_empty self = self.is_empty.not
|
||||||
|
|
||||||
@ -304,10 +304,10 @@ type JS_Object
|
|||||||
Creates an Enso object from the JS_Object.
|
Creates an Enso object from the JS_Object.
|
||||||
into : Any -> Any
|
into : Any -> Any
|
||||||
into self target_type = case target_type of
|
into self target_type = case target_type of
|
||||||
JS_Object -> self
|
JS_Object -> self
|
||||||
Vector -> self.to_vector
|
Vector -> self.to_vector
|
||||||
Map -> Map.from_vector self.to_vector
|
Dictionary -> Dictionary.from_vector self.to_vector
|
||||||
_ ->
|
_ ->
|
||||||
## First try a conversion
|
## First try a conversion
|
||||||
Panic.catch No_Such_Conversion (self.to target_type) _->
|
Panic.catch No_Such_Conversion (self.to target_type) _->
|
||||||
## If that fails, try to construct the type
|
## If that fails, try to construct the type
|
||||||
|
@ -2,10 +2,10 @@ import project.Any.Any
|
|||||||
import project.Data.Array.Array
|
import project.Data.Array.Array
|
||||||
import project.Data.Array_Proxy.Array_Proxy
|
import project.Data.Array_Proxy.Array_Proxy
|
||||||
import project.Data.Decimal.Decimal
|
import project.Data.Decimal.Decimal
|
||||||
|
import project.Data.Dictionary.Dictionary
|
||||||
import project.Data.Json.JS_Object
|
import project.Data.Json.JS_Object
|
||||||
import project.Data.Json.Json
|
import project.Data.Json.Json
|
||||||
import project.Data.Locale.Locale
|
import project.Data.Locale.Locale
|
||||||
import project.Data.Map.Map
|
|
||||||
import project.Data.Numbers.Float
|
import project.Data.Numbers.Float
|
||||||
import project.Data.Numbers.Integer
|
import project.Data.Numbers.Integer
|
||||||
import project.Data.Numbers.Number
|
import project.Data.Numbers.Number
|
||||||
@ -182,10 +182,10 @@ Locale.to_js_object self =
|
|||||||
For Map, this is serialized as a Vector of Key-Value pairs.
|
For Map, this is serialized as a Vector of Key-Value pairs.
|
||||||
|
|
||||||
Enso Maps support arbitrary types as map keys, so we cannot serialize them into JS Objects because there only strings are accepted as keys.
|
Enso Maps support arbitrary types as map keys, so we cannot serialize them into JS Objects because there only strings are accepted as keys.
|
||||||
Map.to_js_object : JS_Object
|
Dictionary.to_js_object : JS_Object
|
||||||
Map.to_js_object self =
|
Dictionary.to_js_object self =
|
||||||
map_vector = self.to_vector
|
as_vector = self.to_vector
|
||||||
map_vector.map p-> [p.first.to_js_object, p.second.to_js_object]
|
as_vector.map p-> [p.first.to_js_object, p.second.to_js_object]
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
ICON convert
|
ICON convert
|
||||||
|
@ -1,436 +1,37 @@
|
|||||||
import project.Any.Any
|
import project.Any.Any
|
||||||
import project.Data.Numbers.Integer
|
|
||||||
import project.Data.Pair.Pair
|
|
||||||
import project.Data.Text.Text
|
|
||||||
import project.Data.Vector.Vector
|
import project.Data.Vector.Vector
|
||||||
import project.Error.Error
|
import project.Error.Error
|
||||||
import project.Errors.Illegal_Argument.Illegal_Argument
|
import project.Errors.Deprecated.Deprecated
|
||||||
import project.Errors.No_Such_Key.No_Such_Key
|
|
||||||
import project.Nothing.Nothing
|
|
||||||
import project.Panic.Panic
|
|
||||||
from project.Data.Boolean import Boolean, False, True
|
from project.Data.Boolean import Boolean, False, True
|
||||||
from project.Data.Text.Extensions import all
|
|
||||||
|
|
||||||
## A key-value store. It is possible to use any type as keys and values and mix them in
|
## PRIVATE
|
||||||
one Map. Keys are checked for equality based on their hash code and `==` operator, which
|
Deprecate place holder for the Map type.
|
||||||
is both an internal part of Enso. Enso is capable of computing a hash code, and checking
|
|
||||||
for equality any objects that can appear in Enso - primitives, Atoms, values coming from
|
|
||||||
different languages, etc.
|
|
||||||
|
|
||||||
For keys that are not reflexive, like `Number.nan`,
|
|
||||||
[Same Value equality specification](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Equality_comparisons_and_sameness#same-value-zero_equality)
|
|
||||||
is used. This means that both `Number.nan` and types with comparators that violate
|
|
||||||
reflexivity (e.g. their `compare` method always returns `Nothing`) can be used as keys
|
|
||||||
in the Map.
|
|
||||||
|
|
||||||
A single key-value pair is called an *entry*.
|
|
||||||
|
|
||||||
It is possible to pass a Map created in Enso to foreign functions, where it will be treated
|
|
||||||
as appropriate map structures - in Python that is a dictionary, and in JavaScript, it is
|
|
||||||
a `Map`. And likewise, it is possible to pass a foreign map into Enso, where it will be
|
|
||||||
treated as a Map.
|
|
||||||
@Builtin_Type
|
|
||||||
type Map key value
|
type Map key value
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
ADVANCED
|
DEPRECATED Use Dictionary.empty instead.
|
||||||
Returns an empty map.
|
empty : Any ! Deprecated
|
||||||
empty : Map
|
empty =
|
||||||
empty = @Builtin_Method "Map.empty"
|
Error.throw (Deprecated.Warning "Standard.Base.Data.Map.Map" "empty" "Deprecated: `Map.empty` has been replaced by `Dictionary.empty`.")
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
ADVANCED
|
DEPRECATED Use Dictionary.singleton instead.
|
||||||
Returns a single-element map with the given key and value.
|
singleton : Any -> Any -> Any ! Deprecated
|
||||||
A Call to `Map.singleton key value` is the same as a call to
|
singleton key value =
|
||||||
`Map.empty.insert key value`.
|
_ = [key, value]
|
||||||
|
Error.throw (Deprecated.Warning "Standard.Base.Data.Map.Map" "singleton" "Deprecated: `Map.singleton` has been replaced by `Dictionary.singleton`.")
|
||||||
|
|
||||||
Arguments:
|
## PRIVATE
|
||||||
- key: The key to to use for `value` in the map.
|
|
||||||
- value: The value to store under 'key' in the map.
|
|
||||||
|
|
||||||
> Example
|
|
||||||
Create a single element map storing the key "my_key" and the value 2.
|
|
||||||
|
|
||||||
import Standard.Base.Data.Map.Map
|
|
||||||
|
|
||||||
example_singleton = Map.singleton "my_key" 2
|
|
||||||
singleton : Any -> Any -> Map
|
|
||||||
singleton key value = Map.empty.insert key value
|
|
||||||
|
|
||||||
## ALIAS dictionary, lookup table
|
|
||||||
GROUP Constants
|
|
||||||
ICON convert
|
ICON convert
|
||||||
Builds a map from two Vectors. The first vector contains the keys, and
|
DEPRECATED Use Dictionary.from_keys_and_values instead.
|
||||||
the second vector contains the values. The two vectors must be of the
|
from_keys_and_values : Vector Any -> Vector Any -> Boolean -> Any ! Deprecated
|
||||||
same length.
|
|
||||||
|
|
||||||
Arguments:
|
|
||||||
- keys: A vector of keys.
|
|
||||||
- values: A vector of values.
|
|
||||||
- error_on_duplicates: A flag which specifies if duplicate keys on the
|
|
||||||
input vector should result in an error. By default, set to `True`,
|
|
||||||
meaning that if two entries in the vector share the same key, an
|
|
||||||
`Illegal_Argument` error is raised. If set to `False`, the last entry
|
|
||||||
with a given key will be kept.
|
|
||||||
from_keys_and_values : Vector Any -> Vector Any -> Boolean -> Map ! Illegal_Argument
|
|
||||||
from_keys_and_values keys:Vector values:Vector error_on_duplicates:Boolean=True =
|
from_keys_and_values keys:Vector values:Vector error_on_duplicates:Boolean=True =
|
||||||
if keys.length != values.length then Error.throw (Illegal_Argument.Error "`Map.from_keys_and_values` encountered two vectors of different lengths.") else
|
_ = [keys, values, error_on_duplicates]
|
||||||
keys.fold_with_index Map.empty current-> idx-> key->
|
Error.throw (Deprecated.Warning "Standard.Base.Data.Map.Map" "from_keys_and_values" "Deprecated: `Map.from_keys_and_values` has been replaced by `Dictionary.from_keys_and_values`.")
|
||||||
if error_on_duplicates.not || (current.contains_key key . not) then current.insert key (values.at idx) else
|
|
||||||
Error.throw (Illegal_Argument.Error "`Map.from_keys_and_values` encountered duplicate key: "+key.to_display_text)
|
|
||||||
|
|
||||||
## ALIAS dictionary, lookup table
|
## PRIVATE
|
||||||
GROUP Constants
|
|
||||||
ICON convert
|
ICON convert
|
||||||
Builds a map from a vector of key-value pairs, with each key-value pair
|
DEPRECATED Use Dictionary.from_vector instead.
|
||||||
represented as a 2 element vector.
|
from_vector : Vector Any -> Boolean -> Any ! Deprecated
|
||||||
|
|
||||||
Arguments:
|
|
||||||
- vec: A vector of key-value pairs (2 element vectors).
|
|
||||||
- error_on_duplicates: A flag which specifies if duplicate keys on the
|
|
||||||
input vector should result in an error. By default, set to `True`,
|
|
||||||
meaning that if two entries in the vector share the same key, an
|
|
||||||
`Illegal_Argument` error is raised. If set to `False`, the last entry
|
|
||||||
with a given key will be kept.
|
|
||||||
|
|
||||||
> Example
|
|
||||||
Building a map containing two key-value pairs.
|
|
||||||
|
|
||||||
import Standard.Base.Data.Map.Map
|
|
||||||
|
|
||||||
example_from_vector = Map.from_vector [["A", 1], ["B", 2]]
|
|
||||||
from_vector : Vector Any -> Boolean -> Map ! Illegal_Argument
|
|
||||||
from_vector vec error_on_duplicates=True =
|
from_vector vec error_on_duplicates=True =
|
||||||
vec.fold Map.empty m-> el-> if el.length != 2 then Error.throw (Illegal_Argument.Error "`Map.from_vector` encountered an invalid value. Each value in the vector has to be a key-value pair - it must have exactly 2 elements.") else
|
_ = [vec, error_on_duplicates]
|
||||||
key = el.at 0
|
Error.throw (Deprecated.Warning "Standard.Base.Data.Map.Map" "from_vector" "Deprecated: `Map.from_vector` has been replaced by `Dictionary.from_vector`.")
|
||||||
value = el.at 1
|
|
||||||
if error_on_duplicates.not || (m.contains_key key . not) then m.insert key value else
|
|
||||||
Error.throw (Illegal_Argument.Error "`Map.from_vector` encountered duplicate key: "+key.to_display_text)
|
|
||||||
|
|
||||||
## GROUP Logical
|
|
||||||
ICON metadata
|
|
||||||
Returns True iff the Map is empty, i.e., does not have any entries.
|
|
||||||
is_empty : Boolean
|
|
||||||
is_empty self = self.size == 0
|
|
||||||
|
|
||||||
## GROUP Logical
|
|
||||||
ICON metadata
|
|
||||||
Returns True iff the Map is not empty, i.e., has at least one entry.
|
|
||||||
not_empty : Boolean
|
|
||||||
not_empty self = self.is_empty.not
|
|
||||||
|
|
||||||
## GROUP Metadata
|
|
||||||
ICON metadata
|
|
||||||
Returns the number of entries in this map.
|
|
||||||
size : Integer
|
|
||||||
size self = @Builtin_Method "Map.size"
|
|
||||||
|
|
||||||
## GROUP Metadata
|
|
||||||
ICON metadata
|
|
||||||
Returns the number of entries in this map.
|
|
||||||
length : Integer
|
|
||||||
length self = self.size
|
|
||||||
|
|
||||||
## GROUP Calculations
|
|
||||||
ICON row_add
|
|
||||||
Inserts a key-value mapping into this map, overriding any existing
|
|
||||||
instance of `key` with the new `value`.
|
|
||||||
|
|
||||||
Note that since the return type is also a `Map`, multiple `insert`
|
|
||||||
calls can be chained, e.g., `map.insert "A" 1 . insert "B" 2`.
|
|
||||||
|
|
||||||
Due to the limitation of the current implementation, inserts with a
|
|
||||||
key that is already contained in the map, or insert on a map instance that
|
|
||||||
is re-used in other computations, have a linear time complexity.
|
|
||||||
For all the other cases, the time complexity of this method is constant.
|
|
||||||
|
|
||||||
Arguments:
|
|
||||||
- key: The key to insert the value for.
|
|
||||||
- value: The value to associate with the `key`.
|
|
||||||
|
|
||||||
> Example
|
|
||||||
Insert the value "seven" into the map for the key 7.
|
|
||||||
|
|
||||||
import Standard.Base.Data.Map.Map
|
|
||||||
import Standard.Examples
|
|
||||||
|
|
||||||
example_insert = Examples.map.insert 7 "seven"
|
|
||||||
insert : Any -> Any -> Map
|
|
||||||
insert self key value = @Builtin_Method "Map.insert"
|
|
||||||
|
|
||||||
## GROUP Selections
|
|
||||||
ICON table_clean
|
|
||||||
Removes an entry specified by the given key from this map, and
|
|
||||||
returns a new map without this entry. Throw `No_Such_Key.Error`
|
|
||||||
if `key` is not present.
|
|
||||||
|
|
||||||
Arguments:
|
|
||||||
- key: The key to look up in the map.
|
|
||||||
|
|
||||||
> Example
|
|
||||||
Remove key "A" from a map
|
|
||||||
|
|
||||||
import Standard.Data.Map.Map
|
|
||||||
|
|
||||||
Examples.map.remove "A"
|
|
||||||
|
|
||||||
remove : Any -> Map ! No_Such_Key
|
|
||||||
remove self key =
|
|
||||||
Panic.catch Any (self.remove_builtin key) _->
|
|
||||||
Error.throw (No_Such_Key.Error self key)
|
|
||||||
|
|
||||||
## GROUP Selections
|
|
||||||
ICON parse3
|
|
||||||
Gets the value associated with `key` in this map, or throws a
|
|
||||||
`No_Such_Key.Error` if `key` is not present.
|
|
||||||
|
|
||||||
This method has a constant time complexity.
|
|
||||||
|
|
||||||
Arguments:
|
|
||||||
- key: The key to look up in the map.
|
|
||||||
|
|
||||||
> Example
|
|
||||||
Looks up the value for the key "A" in a map.
|
|
||||||
|
|
||||||
import Standard.Base.Data.Map.Map
|
|
||||||
import Standard.Examples
|
|
||||||
|
|
||||||
example_at = Examples.map.at "A"
|
|
||||||
at : Any -> Any ! No_Such_Key
|
|
||||||
at self key = self.get key (Error.throw (No_Such_Key.Error self key))
|
|
||||||
|
|
||||||
## ICON parse3
|
|
||||||
Gets the value associated with `key` in this map, or returns
|
|
||||||
`if_missing` if it isn't present.
|
|
||||||
|
|
||||||
This method has a constant time complexity.
|
|
||||||
|
|
||||||
Arguments:
|
|
||||||
- key: The key to look up in the map.
|
|
||||||
- if_missing: The value to use if the key isn't present.
|
|
||||||
|
|
||||||
> Example
|
|
||||||
Get the value for the key 2 in a map or instead return "zero" if it
|
|
||||||
isn't present.
|
|
||||||
|
|
||||||
import Standard.Base.Data.Map.Map
|
|
||||||
import Standard.Examples
|
|
||||||
|
|
||||||
example_get = Examples.map.get 2 "zero"
|
|
||||||
get : Any -> Any -> Any
|
|
||||||
get self key ~if_missing=Nothing = self.get_builtin key if_missing
|
|
||||||
|
|
||||||
## GROUP Logical
|
|
||||||
ICON preparation
|
|
||||||
Returns True iff the Map contains the given `key`.
|
|
||||||
contains_key : Any -> Boolean
|
|
||||||
contains_key self key = @Builtin_Method "Map.contains_key"
|
|
||||||
|
|
||||||
## GROUP Selections
|
|
||||||
ICON select_column
|
|
||||||
Returns an unsorted vector of all the keys in this Map.
|
|
||||||
keys : Vector Any
|
|
||||||
keys self = self.to_vector.map pair-> pair.at 0
|
|
||||||
|
|
||||||
## GROUP Selections
|
|
||||||
ICON select_column
|
|
||||||
Returns an unsorted vector of all the values in this Map.
|
|
||||||
values : Vector Any
|
|
||||||
values self = self.to_vector.map pair-> pair.at 1
|
|
||||||
|
|
||||||
## ICON column_add
|
|
||||||
Maps a function over each value in this map.
|
|
||||||
|
|
||||||
Arguments:
|
|
||||||
- function: The function to apply to each value in the map, taking a
|
|
||||||
value and returning a value.
|
|
||||||
|
|
||||||
> Example
|
|
||||||
Append "_word" to all values in the map.
|
|
||||||
|
|
||||||
import Standard.Base.Data.Map.Map
|
|
||||||
import Standard.Examples
|
|
||||||
|
|
||||||
example_map = Examples.map.map (+ "_word")
|
|
||||||
map : (Any -> Any) -> Map
|
|
||||||
map self function =
|
|
||||||
kv_func = _ -> function
|
|
||||||
self.map_with_key kv_func
|
|
||||||
|
|
||||||
## ICON column_add
|
|
||||||
Maps a function over each key-value pair in the map, transforming the
|
|
||||||
value.
|
|
||||||
|
|
||||||
Arguments:
|
|
||||||
- function: The function to apply to each key and value in the map,
|
|
||||||
taking a key and a value and returning a value.
|
|
||||||
|
|
||||||
> Example
|
|
||||||
Prepend the keys to the values in the map.
|
|
||||||
|
|
||||||
import Standard.Base.Data.Map.Map
|
|
||||||
import Standard.Examples
|
|
||||||
|
|
||||||
example_map_with_key =
|
|
||||||
Examples.map.map_with_key (k -> v -> k.to_text + "-" + v)
|
|
||||||
map_with_key : (Any -> Any -> Any) -> Map
|
|
||||||
map_with_key self function =
|
|
||||||
Map.from_vector <| self.to_vector.map pair->
|
|
||||||
key = pair.first
|
|
||||||
value = pair.last
|
|
||||||
[key, (function key value)]
|
|
||||||
|
|
||||||
## ICON column_add
|
|
||||||
Maps a function over each key in this map.
|
|
||||||
|
|
||||||
Arguments:
|
|
||||||
- function: The function to apply to each key in the map, taking a key
|
|
||||||
and returning a key.
|
|
||||||
|
|
||||||
> Example
|
|
||||||
Doubling all keys in the map.
|
|
||||||
|
|
||||||
import Standard.Base.Data.Map.Map
|
|
||||||
import Standard.Examples
|
|
||||||
|
|
||||||
example_map_keys = Examples.map.map_keys *2
|
|
||||||
map_keys : (Any -> Any) -> Map
|
|
||||||
map_keys self function =
|
|
||||||
trans_function = k -> v -> [function k, v]
|
|
||||||
self.transform trans_function
|
|
||||||
|
|
||||||
## ICON column_add
|
|
||||||
Transforms the map's keys and values to create a new map.
|
|
||||||
|
|
||||||
Arguments:
|
|
||||||
- function: The function used to transform the map, taking a key and a
|
|
||||||
value and returning a pair of `[key, value]`.
|
|
||||||
|
|
||||||
! Error Conditions
|
|
||||||
- If multiple map entries end up with duplicate keys after the
|
|
||||||
transformation, an `Illegal_Argument.Error` is thrown.
|
|
||||||
|
|
||||||
> Example
|
|
||||||
Turn all keys into `Text` and append "_word" to the values in the map.
|
|
||||||
|
|
||||||
import Standard.Base.Data.Map.Map
|
|
||||||
import Standard.Examples
|
|
||||||
|
|
||||||
example_transform =
|
|
||||||
Examples.map.transform (k -> v -> [k.to_text, v + "_word"])
|
|
||||||
transform : (Any -> Any -> [Any, Any]) -> Map
|
|
||||||
transform self function =
|
|
||||||
func_pairs = p -> function (p.at 0) (p.at 1)
|
|
||||||
vec_transformed = self.to_vector.map func_pairs
|
|
||||||
new_map = Map.from_vector vec_transformed error_on_duplicates=True
|
|
||||||
new_map.catch Illegal_Argument error->
|
|
||||||
case error.message.starts_with "`Map.from_vector` encountered duplicate key" of
|
|
||||||
True ->
|
|
||||||
new_message = error.message.replace "from_vector" "transform"
|
|
||||||
Error.throw (Illegal_Argument.Error new_message error.cause)
|
|
||||||
False -> new_map
|
|
||||||
|
|
||||||
## ICON transform4
|
|
||||||
Combines the values in the map.
|
|
||||||
|
|
||||||
Arguments:
|
|
||||||
- init: The initial value for the fold.
|
|
||||||
- function: A binary function to apply to pairs of values in the map.
|
|
||||||
|
|
||||||
> Example
|
|
||||||
Find the length of the longest word in the map.
|
|
||||||
|
|
||||||
import Standard.Base.Data.Map.Map
|
|
||||||
import Standard.Examples
|
|
||||||
|
|
||||||
example_fold = Examples.map.fold 0 (l -> r -> l.max r.length)
|
|
||||||
fold : Any -> (Any -> Any -> Any) -> Any
|
|
||||||
fold self init function = self.values.fold init function
|
|
||||||
|
|
||||||
## ICON transform4
|
|
||||||
Combines the key-value pairs in the map.
|
|
||||||
|
|
||||||
Arguments:
|
|
||||||
- init: The initial value for the fold.
|
|
||||||
- function: A function taking the left value, the current key, and the
|
|
||||||
current value, and combining them to yield a single value.
|
|
||||||
|
|
||||||
> Example
|
|
||||||
Glue the values in the map together with the keys.
|
|
||||||
|
|
||||||
import Standard.Base.Data.Map.Map
|
|
||||||
import Standard.Examples
|
|
||||||
|
|
||||||
example_fold_with_key =
|
|
||||||
Examples.map.fold_with_key "" (l -> k -> v -> l + k.to_text + v)
|
|
||||||
fold_with_key : Any -> (Any -> Any -> Any -> Any) -> Any
|
|
||||||
fold_with_key self init function =
|
|
||||||
self.to_vector.fold init acc-> pair->
|
|
||||||
function acc pair.first pair.last
|
|
||||||
|
|
||||||
## PRIVATE
|
|
||||||
ADVANCED
|
|
||||||
Applies a function to each value in the map.
|
|
||||||
|
|
||||||
Arguments:
|
|
||||||
- function: The function to apply to each value in the map, taking a
|
|
||||||
value and returning anything.
|
|
||||||
|
|
||||||
This method does not return the results, so it is only useful for performing
|
|
||||||
computations with side-effects.
|
|
||||||
|
|
||||||
If the function returns a dataflow error, the error is converted to a
|
|
||||||
panic and thrown immediately stopping further processing.
|
|
||||||
|
|
||||||
> Example
|
|
||||||
Printing each value in the map.
|
|
||||||
|
|
||||||
import Standard.Base.Data.Map.Map
|
|
||||||
import Standard.Examples
|
|
||||||
|
|
||||||
example_each = Examples.map.each IO.println
|
|
||||||
each : (Any -> Any) -> Nothing
|
|
||||||
each self function =
|
|
||||||
kv_func = _ -> function
|
|
||||||
self.each_with_key kv_func
|
|
||||||
|
|
||||||
## PRIVATE
|
|
||||||
ADVANCED
|
|
||||||
Applies a function to each key-value pair in the map.
|
|
||||||
|
|
||||||
Arguments:
|
|
||||||
- function: The function to apply to each key-value pair in the map,
|
|
||||||
taking a key and a value and returning anything.
|
|
||||||
|
|
||||||
This method does not return the results, so it is only useful for performing
|
|
||||||
computations with side-effects.
|
|
||||||
|
|
||||||
> Example
|
|
||||||
Printing each key and value in the map.
|
|
||||||
|
|
||||||
import Standard.Base.Data.Map.Map
|
|
||||||
import Standard.Examples
|
|
||||||
|
|
||||||
example_each_with_key = Examples.map.each_with_key k->v->
|
|
||||||
IO.println k
|
|
||||||
IO.println v
|
|
||||||
each_with_key : (Any -> Any -> Any) -> Nothing
|
|
||||||
each_with_key self function =
|
|
||||||
self.to_vector.each pair->
|
|
||||||
function pair.first pair.last
|
|
||||||
|
|
||||||
## GROUP Conversions
|
|
||||||
ICON convert
|
|
||||||
Returns an unsorted vector of key-value pairs (nested 2 element vectors).
|
|
||||||
`Map.from_vector` method is an inverse method, so the following expression
|
|
||||||
is true for all maps: `Map.from_vector map.to_vector == map`.
|
|
||||||
to_vector : Vector Any
|
|
||||||
to_vector self = @Builtin_Method "Map.to_vector"
|
|
||||||
|
|
||||||
## PRIVATE
|
|
||||||
Returns a text representation of this Map.
|
|
||||||
to_text : Text
|
|
||||||
to_text self = @Builtin_Method "Map.to_text"
|
|
||||||
|
|
||||||
## PRIVATE
|
|
||||||
get_builtin : Any -> Any -> Any
|
|
||||||
get_builtin self key ~if_missing = @Builtin_Method "Map.get_builtin"
|
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
import project.Any.Any
|
import project.Any.Any
|
||||||
import project.Data.Array.Array
|
import project.Data.Array.Array
|
||||||
|
import project.Data.Dictionary.Dictionary
|
||||||
import project.Data.Filter_Condition.Filter_Condition
|
import project.Data.Filter_Condition.Filter_Condition
|
||||||
import project.Data.Json.JS_Object
|
import project.Data.Json.JS_Object
|
||||||
import project.Data.Map.Map
|
|
||||||
import project.Data.Numbers.Integer
|
import project.Data.Numbers.Integer
|
||||||
import project.Data.Range.Range
|
import project.Data.Range.Range
|
||||||
import project.Data.Text.Case_Sensitivity.Case_Sensitivity
|
import project.Data.Text.Case_Sensitivity.Case_Sensitivity
|
||||||
@ -379,15 +379,16 @@ type Regex
|
|||||||
Return a vector of all named group names.
|
Return a vector of all named group names.
|
||||||
named_groups : Vector Text
|
named_groups : Vector Text
|
||||||
named_groups self =
|
named_groups self =
|
||||||
map = polyglot_map_to_map self.internal_regex_object.groups
|
dictionary = polyglot_map_to_dictionary self.internal_regex_object.groups
|
||||||
map.keys
|
dictionary.keys
|
||||||
|
|
||||||
## ICON metadata
|
## ICON metadata
|
||||||
Return a map from group number to group name. Only includes named groups.
|
Return a Dictionary from group number to group name. Only includes named
|
||||||
group_nums_to_names : Map Integer Text
|
groups.
|
||||||
|
group_nums_to_names : Dictionary Integer Text
|
||||||
group_nums_to_names self =
|
group_nums_to_names self =
|
||||||
map = polyglot_map_to_map self.internal_regex_object.groups
|
dictionary = polyglot_map_to_dictionary self.internal_regex_object.groups
|
||||||
map.transform k-> v-> [v.at 0, k]
|
dictionary.transform k-> v-> [v.at 0, k]
|
||||||
|
|
||||||
## ICON text
|
## ICON text
|
||||||
Escape the special characters in `expression` such that the result is a
|
Escape the special characters in `expression` such that the result is a
|
||||||
@ -419,20 +420,20 @@ type Regex
|
|||||||
Regex.compile self.pattern case_insensitive
|
Regex.compile self.pattern case_insensitive
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
Convert the polyglot map to a Map.
|
Convert the polyglot map to a Dictionary.
|
||||||
polyglot_map_to_map : Any -> Map Any Any
|
polyglot_map_to_dictionary : Any -> Dictionary Any Any
|
||||||
polyglot_map_to_map map =
|
polyglot_map_to_dictionary map =
|
||||||
polyglot_keys = Polyglot.get_members map
|
polyglot_keys = Polyglot.get_members map
|
||||||
keys = Vector.from_polyglot_array polyglot_keys
|
keys = Vector.from_polyglot_array polyglot_keys
|
||||||
pairs = keys.map key-> [key, Polyglot.get_member map key]
|
pairs = keys.map key-> [key, Polyglot.get_member map key]
|
||||||
Map.from_vector pairs
|
Dictionary.from_vector pairs
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
Get the named group from the polyglot map.
|
Get the named group from the polyglot map.
|
||||||
read_group_map : Any -> Text -> Integer | Nothing
|
read_group_map : Any -> Text -> Integer | Nothing
|
||||||
read_group_map polyglot_map name =
|
read_group_map polyglot_map name =
|
||||||
map = polyglot_map_to_map polyglot_map
|
dictionary = polyglot_map_to_dictionary polyglot_map
|
||||||
map.get name
|
dictionary.get name
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
match_to_group_maybe : Match | Nothing -> Text | Nothing
|
match_to_group_maybe : Match | Nothing -> Text | Nothing
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
import project.Any.Any
|
import project.Any.Any
|
||||||
|
import project.Data.Dictionary.Dictionary
|
||||||
import project.Data.Json.JS_Object
|
import project.Data.Json.JS_Object
|
||||||
import project.Data.Map.Map
|
|
||||||
import project.Data.Numbers.Integer
|
import project.Data.Numbers.Integer
|
||||||
import project.Data.Range.Range
|
import project.Data.Range.Range
|
||||||
import project.Data.Text.Regex.No_Such_Group
|
import project.Data.Text.Regex.No_Such_Group
|
||||||
@ -260,7 +260,7 @@ type Match
|
|||||||
|
|
||||||
## GROUP Metadata
|
## GROUP Metadata
|
||||||
ICON metadata
|
ICON metadata
|
||||||
Gets a map containing the named capturing groups for the pattern,
|
Gets a Dictionary containing the named capturing groups for the pattern,
|
||||||
replacing the value for groups that did not participate in the match with
|
replacing the value for groups that did not participate in the match with
|
||||||
`default`.
|
`default`.
|
||||||
|
|
||||||
@ -279,17 +279,18 @@ type Match
|
|||||||
a named group that does not participate to the default value.
|
a named group that does not participate to the default value.
|
||||||
|
|
||||||
> Example
|
> Example
|
||||||
Get the map of all of the named groups in this match, replacing the
|
Get the Dictionary of all of the named groups in this match, replacing
|
||||||
value for groups that didn't participate in the match with "UNMATCHED".
|
the value for groups that didn't participate in the match with
|
||||||
|
"UNMATCHED".
|
||||||
|
|
||||||
pattern = Regex.compile "(.. .. )(?<letters>.+)()??(?<empty>)??"
|
pattern = Regex.compile "(.. .. )(?<letters>.+)()??(?<empty>)??"
|
||||||
input = "aa ab abc a bc bcd"
|
input = "aa ab abc a bc bcd"
|
||||||
match = pattern.match input
|
match = pattern.match input
|
||||||
## match.named_groups.keys.sort == ["empty", "letters"]
|
## match.named_groups.keys.sort == ["empty", "letters"]
|
||||||
named_groups : Any -> Map Text (Text | Any)
|
named_groups : Any -> Dictionary Text (Text | Any)
|
||||||
named_groups self default=Nothing =
|
named_groups self default=Nothing =
|
||||||
pattern_named_groups = self.pattern.named_groups
|
pattern_named_groups = self.pattern.named_groups
|
||||||
Map.from_vector <|
|
Dictionary.from_vector <|
|
||||||
pattern_named_groups.map name-> [name, self.text name default=default]
|
pattern_named_groups.map name-> [name, self.text name default=default]
|
||||||
|
|
||||||
## ICON split
|
## ICON split
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
import project.Any.Any
|
import project.Any.Any
|
||||||
|
import project.Data.Dictionary.Dictionary
|
||||||
import project.Data.Json.Extensions
|
import project.Data.Json.Extensions
|
||||||
import project.Data.Json.JS_Object
|
import project.Data.Json.JS_Object
|
||||||
import project.Data.Map.Map
|
|
||||||
import project.Data.Numbers.Integer
|
import project.Data.Numbers.Integer
|
||||||
import project.Data.Text.Encoding.Encoding
|
import project.Data.Text.Encoding.Encoding
|
||||||
import project.Data.Text.Text
|
import project.Data.Text.Text
|
||||||
@ -305,9 +305,9 @@ type XML_Document
|
|||||||
|
|
||||||
## GROUP Selections
|
## GROUP Selections
|
||||||
ICON array_new
|
ICON array_new
|
||||||
Gets a map containing of the attributes of an XML document.
|
Gets a Dictionary containing of the attributes of an XML document.
|
||||||
attributes : Map Text Text ! XML_Error
|
attributes : Dictionary Text Text ! XML_Error
|
||||||
attributes self = Map.empty
|
attributes self = Dictionary.empty
|
||||||
|
|
||||||
## GROUP Selections
|
## GROUP Selections
|
||||||
ICON metadata
|
ICON metadata
|
||||||
@ -508,19 +508,19 @@ type XML_Element
|
|||||||
|
|
||||||
## GROUP Selections
|
## GROUP Selections
|
||||||
ICON array_new
|
ICON array_new
|
||||||
Gets a map containing of the attributes of an XML element.
|
Gets a Dictionary containing of the attributes of an XML element.
|
||||||
|
|
||||||
> Example
|
> Example
|
||||||
XML_Document.from_text '<foo bar="one">hello</foo>' . root_element . attributes
|
XML_Document.from_text '<foo bar="one">hello</foo>' . root_element . attributes
|
||||||
# => Map.from_vector [["bar", "one"]]
|
# => Dictionary.from_vector [["bar", "one"]]
|
||||||
attributes : Map Text Text ! XML_Error
|
attributes : Dictionary Text Text ! XML_Error
|
||||||
attributes self =
|
attributes self =
|
||||||
XML_Error.handle_java_exceptions <|
|
XML_Error.handle_java_exceptions <|
|
||||||
named_node_map = self.java_element.getAttributes
|
named_node_map = self.java_element.getAttributes
|
||||||
keys_and_values = 0.up_to named_node_map.getLength . map i->
|
keys_and_values = 0.up_to named_node_map.getLength . map i->
|
||||||
node = named_node_map.item i
|
node = named_node_map.item i
|
||||||
[node.getNodeName, node.getNodeValue]
|
[node.getNodeName, node.getNodeValue]
|
||||||
Map.from_vector keys_and_values
|
Dictionary.from_vector keys_and_values
|
||||||
|
|
||||||
## GROUP Selections
|
## GROUP Selections
|
||||||
ICON metadata
|
ICON metadata
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
import project.Data.Base_64.Base_64
|
import project.Data.Base_64.Base_64
|
||||||
|
import project.Data.Dictionary.Dictionary
|
||||||
import project.Data.Json.JS_Object
|
import project.Data.Json.JS_Object
|
||||||
import project.Data.Map.Map
|
|
||||||
import project.Data.Text.Text
|
import project.Data.Text.Text
|
||||||
import project.Data.Vector.Vector
|
import project.Data.Vector.Vector
|
||||||
import project.Enso_Cloud.Enso_File.Enso_Asset_Type
|
import project.Enso_Cloud.Enso_File.Enso_Asset_Type
|
||||||
@ -56,7 +56,7 @@ type Enso_Secret
|
|||||||
handle_already_exists _ =
|
handle_already_exists _ =
|
||||||
message = "A secret with the name " + name.pretty + " already exists inside of directory " + parent_dir.name + "."
|
message = "A secret with the name " + name.pretty + " already exists inside of directory " + parent_dir.name + "."
|
||||||
Error.throw (Illegal_Argument.Error message)
|
Error.throw (Illegal_Argument.Error message)
|
||||||
error_handlers = Map.from_vector [["resource_already_exists", handle_already_exists]]
|
error_handlers = Dictionary.from_vector [["resource_already_exists", handle_already_exists]]
|
||||||
id = Utils.http_request_as_json HTTP_Method.Post Utils.secrets_api body error_handlers=error_handlers
|
id = Utils.http_request_as_json HTTP_Method.Post Utils.secrets_api body error_handlers=error_handlers
|
||||||
Enso_Secret.Value name id path
|
Enso_Secret.Value name id path
|
||||||
|
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
private
|
private
|
||||||
|
|
||||||
import project.Any.Any
|
import project.Any.Any
|
||||||
|
import project.Data.Dictionary.Dictionary
|
||||||
import project.Data.Json.Invalid_JSON
|
import project.Data.Json.Invalid_JSON
|
||||||
import project.Data.Json.JS_Object
|
import project.Data.Json.JS_Object
|
||||||
import project.Data.Map.Map
|
|
||||||
import project.Data.Text.Text
|
import project.Data.Text.Text
|
||||||
import project.Enso_Cloud.Enso_File.Enso_Asset_Type
|
import project.Enso_Cloud.Enso_File.Enso_Asset_Type
|
||||||
import project.Enso_Cloud.Enso_File.Enso_File
|
import project.Enso_Cloud.Enso_File.Enso_File
|
||||||
@ -44,7 +44,7 @@ upload_file (local_file : File) (destination : Enso_File) (replace_existing : Bo
|
|||||||
The `create_action` function is called with the existing asset for the parent
|
The `create_action` function is called with the existing asset for the parent
|
||||||
directory and for the file, if it already exists, or `Nothing` otherwise, and
|
directory and for the file, if it already exists, or `Nothing` otherwise, and
|
||||||
with a mapping of error handlers that may be added to the request.
|
with a mapping of error handlers that may be added to the request.
|
||||||
generic_create_asset (destination : Enso_File) (allow_existing : Boolean) (create_action : Existing_Enso_Asset -> (Existing_Enso_Asset | Nothing) -> Map -> Any) -> Any =
|
generic_create_asset (destination : Enso_File) (allow_existing : Boolean) (create_action : Existing_Enso_Asset -> (Existing_Enso_Asset | Nothing) -> Dictionary -> Any) -> Any =
|
||||||
parent_directory = destination.parent
|
parent_directory = destination.parent
|
||||||
if parent_directory.is_nothing then Error.throw (Illegal_Argument.Error "Please provide an asset name inside of the root directory.") else
|
if parent_directory.is_nothing then Error.throw (Illegal_Argument.Error "Please provide an asset name inside of the root directory.") else
|
||||||
parent_directory_asset = Existing_Enso_Asset.get_asset_reference_for parent_directory
|
parent_directory_asset = Existing_Enso_Asset.get_asset_reference_for parent_directory
|
||||||
@ -55,7 +55,7 @@ generic_create_asset (destination : Enso_File) (allow_existing : Boolean) (creat
|
|||||||
File_Error.Not_Found _ -> Nothing
|
File_Error.Not_Found _ -> Nothing
|
||||||
_ -> Error.throw error
|
_ -> Error.throw error
|
||||||
if existing_asset.is_nothing.not && allow_existing.not then Error.throw (File_Error.Already_Exists destination) else
|
if existing_asset.is_nothing.not && allow_existing.not then Error.throw (File_Error.Already_Exists destination) else
|
||||||
error_handlers = if existing_asset.is_nothing.not then Map.empty else
|
error_handlers = if existing_asset.is_nothing.not then Dictionary.empty else
|
||||||
## Currently we just report the race condition and request the user to re-run.
|
## Currently we just report the race condition and request the user to re-run.
|
||||||
We don't retry automatically because it is harder than it seems - the `create_action` usually
|
We don't retry automatically because it is harder than it seems - the `create_action` usually
|
||||||
depends on some user code that is writing to a stream (the callback given to `with_output_stream`).
|
depends on some user code that is writing to a stream (the callback given to `with_output_stream`).
|
||||||
@ -64,7 +64,7 @@ generic_create_asset (destination : Enso_File) (allow_existing : Boolean) (creat
|
|||||||
into memory or a temporary file and relies on that for the retry.
|
into memory or a temporary file and relies on that for the retry.
|
||||||
For now, reporting the race condition in a sane way seemed like the simplest choice.
|
For now, reporting the race condition in a sane way seemed like the simplest choice.
|
||||||
This situation should be very rare.
|
This situation should be very rare.
|
||||||
Map.from_vector [["resource_already_exists", Error.throw (Illegal_State.Error "A race-condition has been encountered - another process has created a colliding resource at "+destination.path+". Please try re-running the operation.")]]
|
Dictionary.from_vector [["resource_already_exists", Error.throw (Illegal_State.Error "A race-condition has been encountered - another process has created a colliding resource at "+destination.path+". Please try re-running the operation.")]]
|
||||||
create_action parent_directory_asset existing_asset error_handlers
|
create_action parent_directory_asset existing_asset error_handlers
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
private
|
private
|
||||||
|
|
||||||
|
import project.Data.Dictionary.Dictionary
|
||||||
import project.Data.Json.JS_Object
|
import project.Data.Json.JS_Object
|
||||||
import project.Data.Map.Map
|
|
||||||
import project.Data.Text.Text
|
import project.Data.Text.Text
|
||||||
import project.Data.Time.Date_Time.Date_Time
|
import project.Data.Time.Date_Time.Date_Time
|
||||||
import project.Data.Time.Date_Time_Formatter.Date_Time_Formatter
|
import project.Data.Time.Date_Time_Formatter.Date_Time_Formatter
|
||||||
@ -83,7 +83,7 @@ type Existing_Enso_Asset
|
|||||||
Resolves a path to an existing asset in the cloud.
|
Resolves a path to an existing asset in the cloud.
|
||||||
resolve_path (path : Text) ~if_not_found = path.if_not_error <|
|
resolve_path (path : Text) ~if_not_found = path.if_not_error <|
|
||||||
handle_not_found _ = Error.throw Not_Found
|
handle_not_found _ = Error.throw Not_Found
|
||||||
error_handlers = Map.from_vector [["resource_missing", handle_not_found]]
|
error_handlers = Dictionary.from_vector [["resource_missing", handle_not_found]]
|
||||||
|
|
||||||
uri = ((URI.from Utils.cloud_root_uri) / "path/resolve") . add_query_argument "path" path
|
uri = ((URI.from Utils.cloud_root_uri) / "path/resolve") . add_query_argument "path" path
|
||||||
response = Utils.http_request_as_json HTTP_Method.Get uri error_handlers=error_handlers
|
response = Utils.http_request_as_json HTTP_Method.Get uri error_handlers=error_handlers
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
private
|
private
|
||||||
|
|
||||||
import project.Any.Any
|
import project.Any.Any
|
||||||
|
import project.Data.Dictionary.Dictionary
|
||||||
import project.Data.Json.Invalid_JSON
|
import project.Data.Json.Invalid_JSON
|
||||||
import project.Data.Map.Map
|
|
||||||
import project.Data.Numbers.Integer
|
import project.Data.Numbers.Integer
|
||||||
import project.Data.Text.Text
|
import project.Data.Text.Text
|
||||||
import project.Data.Time.Duration.Duration
|
import project.Data.Time.Duration.Duration
|
||||||
@ -72,7 +72,7 @@ flush_caches = CloudAPI.flushCloudCaches
|
|||||||
## PRIVATE
|
## PRIVATE
|
||||||
Performs a standard request to the Enso Cloud API,
|
Performs a standard request to the Enso Cloud API,
|
||||||
parsing the result as JSON.
|
parsing the result as JSON.
|
||||||
http_request_as_json (method : HTTP_Method) (url : URI) (body : Request_Body = Request_Body.Empty) (additional_headers : Vector = []) (error_handlers : Map Text (Any -> Any) = Map.empty) (retries : Integer = 3) -> Any ! Enso_Cloud_Error =
|
http_request_as_json (method : HTTP_Method) (url : URI) (body : Request_Body = Request_Body.Empty) (additional_headers : Vector = []) (error_handlers : Dictionary Text (Any -> Any) = Dictionary.empty) (retries : Integer = 3) -> Any ! Enso_Cloud_Error =
|
||||||
response = http_request method url body additional_headers error_handlers retries
|
response = http_request method url body additional_headers error_handlers retries
|
||||||
response.decode_as_json.catch Invalid_JSON error->
|
response.decode_as_json.catch Invalid_JSON error->
|
||||||
Error.throw (Enso_Cloud_Error.Invalid_Response_Payload error)
|
Error.throw (Enso_Cloud_Error.Invalid_Response_Payload error)
|
||||||
@ -87,7 +87,7 @@ http_request_as_json (method : HTTP_Method) (url : URI) (body : Request_Body = R
|
|||||||
Custom error handlers can be provided as a mapping from error codes
|
Custom error handlers can be provided as a mapping from error codes
|
||||||
(defined in the cloud project) to functions that take the full JSON payload
|
(defined in the cloud project) to functions that take the full JSON payload
|
||||||
and return a custom error.
|
and return a custom error.
|
||||||
http_request (method : HTTP_Method) (url : URI) (body : Request_Body = Request_Body.Empty) (additional_headers : Vector = []) (error_handlers : Map Text (Any -> Any) = Map.empty) (retries : Integer = 3) -> Response ! Enso_Cloud_Error = method.if_not_error <| url.if_not_error <| body.if_not_error <| additional_headers.if_not_error <|
|
http_request (method : HTTP_Method) (url : URI) (body : Request_Body = Request_Body.Empty) (additional_headers : Vector = []) (error_handlers : Dictionary Text (Any -> Any) = Dictionary.empty) (retries : Integer = 3) -> Response ! Enso_Cloud_Error = method.if_not_error <| url.if_not_error <| body.if_not_error <| additional_headers.if_not_error <|
|
||||||
all_headers = [authorization_header] + additional_headers
|
all_headers = [authorization_header] + additional_headers
|
||||||
as_connection_error err = Error.throw (Enso_Cloud_Error.Connection_Error err)
|
as_connection_error err = Error.throw (Enso_Cloud_Error.Connection_Error err)
|
||||||
|
|
||||||
|
@ -3,9 +3,9 @@ private
|
|||||||
import project.Any.Any
|
import project.Any.Any
|
||||||
import project.Data.Array.Array
|
import project.Data.Array.Array
|
||||||
import project.Data.Array_Proxy.Array_Proxy
|
import project.Data.Array_Proxy.Array_Proxy
|
||||||
|
import project.Data.Dictionary.Dictionary
|
||||||
import project.Data.Index_Sub_Range.Index_Sub_Range
|
import project.Data.Index_Sub_Range.Index_Sub_Range
|
||||||
import project.Data.List.List
|
import project.Data.List.List
|
||||||
import project.Data.Map.Map
|
|
||||||
import project.Data.Maybe.Maybe
|
import project.Data.Maybe.Maybe
|
||||||
import project.Data.Numbers.Integer
|
import project.Data.Numbers.Integer
|
||||||
import project.Data.Pair.Pair
|
import project.Data.Pair.Pair
|
||||||
@ -166,7 +166,7 @@ private on_problems_to_number on_problems:Problem_Behavior = case on_problems of
|
|||||||
|
|
||||||
distinct vector on =
|
distinct vector on =
|
||||||
Vector.build builder->
|
Vector.build builder->
|
||||||
vector.fold Map.empty existing->
|
vector.fold Dictionary.empty existing->
|
||||||
item->
|
item->
|
||||||
key = on item
|
key = on item
|
||||||
if (existing.get key False) then existing else
|
if (existing.get key False) then existing else
|
||||||
@ -174,7 +174,7 @@ distinct vector on =
|
|||||||
existing.insert key True
|
existing.insert key True
|
||||||
|
|
||||||
duplicates vector on = Vector.build builder->
|
duplicates vector on = Vector.build builder->
|
||||||
counts = vector.fold Map.empty current-> item->
|
counts = vector.fold Dictionary.empty current-> item->
|
||||||
key = on item
|
key = on item
|
||||||
count = current.get key 0
|
count = current.get key 0
|
||||||
current.insert key count+1
|
current.insert key count+1
|
||||||
|
@ -2,8 +2,10 @@ export project.Any.Any
|
|||||||
export project.Data
|
export project.Data
|
||||||
export project.Data.Array.Array
|
export project.Data.Array.Array
|
||||||
export project.Data.Decimal.Decimal
|
export project.Data.Decimal.Decimal
|
||||||
|
export project.Data.Dictionary.Dictionary
|
||||||
export project.Data.Filter_Condition.Filter_Action
|
export project.Data.Filter_Condition.Filter_Action
|
||||||
export project.Data.Filter_Condition.Filter_Condition
|
export project.Data.Filter_Condition.Filter_Condition
|
||||||
|
export project.Data.Hashset.Hashset
|
||||||
export project.Data.Index_Sub_Range.Index_Sub_Range
|
export project.Data.Index_Sub_Range.Index_Sub_Range
|
||||||
export project.Data.Interval.Bound
|
export project.Data.Interval.Bound
|
||||||
export project.Data.Interval.Interval
|
export project.Data.Interval.Interval
|
||||||
@ -25,7 +27,6 @@ export project.Data.Pair.Pair
|
|||||||
export project.Data.Range.Range
|
export project.Data.Range.Range
|
||||||
export project.Data.Raw_Response
|
export project.Data.Raw_Response
|
||||||
export project.Data.Regression
|
export project.Data.Regression
|
||||||
export project.Data.Set.Set
|
|
||||||
export project.Data.Sort_Direction.Sort_Direction
|
export project.Data.Sort_Direction.Sort_Direction
|
||||||
export project.Data.Statistics.Rank_Method
|
export project.Data.Statistics.Rank_Method
|
||||||
export project.Data.Statistics.Statistic
|
export project.Data.Statistics.Statistic
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
import project.Any.Any
|
import project.Any.Any
|
||||||
import project.Data.Map.Map
|
import project.Data.Dictionary.Dictionary
|
||||||
|
import project.Data.Hashset.Hashset
|
||||||
import project.Data.Pair.Pair
|
import project.Data.Pair.Pair
|
||||||
import project.Data.Set.Set
|
|
||||||
import project.Data.Text.Encoding.Encoding
|
import project.Data.Text.Encoding.Encoding
|
||||||
import project.Data.Text.Text
|
import project.Data.Text.Text
|
||||||
import project.Data.Time.Duration.Duration
|
import project.Data.Time.Duration.Duration
|
||||||
@ -261,8 +261,8 @@ resolve_body_to_publisher_and_boundary body:Request_Body =
|
|||||||
|
|
||||||
Build a BodyPublisher from the given form data.
|
Build a BodyPublisher from the given form data.
|
||||||
The pair's second value is a content boundary in the case of a `multipart/form-data` form; otherwise, Nothing
|
The pair's second value is a content boundary in the case of a `multipart/form-data` form; otherwise, Nothing
|
||||||
build_form_body_publisher : Map Text (Text | File) -> Boolean -> Pair BodyPublisher Text
|
build_form_body_publisher : Dictionary Text (Text | File) -> Boolean -> Pair BodyPublisher Text
|
||||||
build_form_body_publisher (form_data:(Map Text (Text | File))) (url_encoded:Boolean=False) = case url_encoded of
|
build_form_body_publisher (form_data:(Dictionary Text (Text | File))) (url_encoded:Boolean=False) = case url_encoded of
|
||||||
True ->
|
True ->
|
||||||
body_builder = UrlencodedBodyBuilder.new
|
body_builder = UrlencodedBodyBuilder.new
|
||||||
form_data.map_with_key key-> value->
|
form_data.map_with_key key-> value->
|
||||||
@ -280,15 +280,15 @@ build_form_body_publisher (form_data:(Map Text (Text | File))) (url_encoded:Bool
|
|||||||
Pair.new body_builder.build boundary
|
Pair.new body_builder.build boundary
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
fetch_methods : Set HTTP_Method
|
fetch_methods : Hashset HTTP_Method
|
||||||
fetch_methods = Set.from_vector [HTTP_Method.Get, HTTP_Method.Head, HTTP_Method.Options]
|
fetch_methods = Hashset.from_vector [HTTP_Method.Get, HTTP_Method.Head, HTTP_Method.Options]
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
post_methods : Set HTTP_Method
|
post_methods : Hashset HTTP_Method
|
||||||
post_methods = Set.from_vector [HTTP_Method.Post, HTTP_Method.Put, HTTP_Method.Patch, HTTP_Method.Delete]
|
post_methods = Hashset.from_vector [HTTP_Method.Post, HTTP_Method.Put, HTTP_Method.Patch, HTTP_Method.Delete]
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
check_method : Set HTTP_Method -> Any -> Any -> Any ! Illegal_Argument
|
check_method : Hashset HTTP_Method -> Any -> Any -> Any ! Illegal_Argument
|
||||||
check_method allowed_methods method ~action =
|
check_method allowed_methods method ~action =
|
||||||
if allowed_methods.contains method then action else
|
if allowed_methods.contains method then action else
|
||||||
Error.throw (Illegal_Argument.Error ("Unsupported method " + method.to_display_text))
|
Error.throw (Illegal_Argument.Error ("Unsupported method " + method.to_display_text))
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import project.Any.Any
|
import project.Any.Any
|
||||||
import project.Data.Map.Map
|
import project.Data.Dictionary.Dictionary
|
||||||
import project.Data.Pair.Pair
|
import project.Data.Pair.Pair
|
||||||
import project.Data.Text.Text
|
import project.Data.Text.Text
|
||||||
import project.Data.Vector.Vector
|
import project.Data.Vector.Vector
|
||||||
@ -249,10 +249,10 @@ type Request
|
|||||||
|
|
||||||
example_delete =
|
example_delete =
|
||||||
Request.delete (URI.parse "http://example.com") . with_form []
|
Request.delete (URI.parse "http://example.com") . with_form []
|
||||||
with_form : (Vector | Map) -> Request
|
with_form : (Vector | Dictionary) -> Request
|
||||||
with_form self parts =
|
with_form self parts =
|
||||||
form_data = case parts of
|
form_data = case parts of
|
||||||
_ : Vector -> Map.from_vector parts
|
_ : Vector -> Dictionary.from_vector parts
|
||||||
_ : Map -> parts
|
_ : Dictionary -> parts
|
||||||
new_body = Request_Body.Form_Data form_data
|
new_body = Request_Body.Form_Data form_data
|
||||||
Request.Value self.method self.uri self.headers new_body . with_headers [Header.application_x_www_form_urlencoded]
|
Request.Value self.method self.uri self.headers new_body . with_headers [Header.application_x_www_form_urlencoded]
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import project.Any.Any
|
import project.Any.Any
|
||||||
import project.Data.Map.Map
|
import project.Data.Dictionary.Dictionary
|
||||||
import project.Data.Text.Encoding.Encoding
|
import project.Data.Text.Encoding.Encoding
|
||||||
import project.Data.Text.Text
|
import project.Data.Text.Text
|
||||||
import project.Network.HTTP.Header.Header
|
import project.Network.HTTP.Header.Header
|
||||||
@ -36,7 +36,7 @@ type Request_Body
|
|||||||
- form_data: the form fields (text or file) to be sent
|
- form_data: the form fields (text or file) to be sent
|
||||||
- url_encoded: if true, use a URL-encoded form; otherwise, use a
|
- url_encoded: if true, use a URL-encoded form; otherwise, use a
|
||||||
multi-part encoding.
|
multi-part encoding.
|
||||||
Form_Data (form_data:(Map Text (Text | File))) (url_encoded:Boolean=False)
|
Form_Data (form_data:(Dictionary Text (Text | File))) (url_encoded:Boolean=False)
|
||||||
|
|
||||||
## Empty request body; used for GET
|
## Empty request body; used for GET
|
||||||
Empty
|
Empty
|
||||||
|
@ -75,7 +75,7 @@ type Response
|
|||||||
|
|
||||||
import Standard.Examples
|
import Standard.Examples
|
||||||
|
|
||||||
example_headers = Map.from_vector error_on_duplicates=True (Examples.get_response.headers.map h-> [h.name, h.value])
|
example_headers = Dictionary.from_vector error_on_duplicates=True (Examples.get_response.headers.map h-> [h.name, h.value])
|
||||||
headers : Vector Header
|
headers : Vector Header
|
||||||
headers self =
|
headers self =
|
||||||
# This is a mapping that maps a header name to a list of values (since headers may be duplicated).
|
# This is a mapping that maps a header name to a list of values (since headers may be duplicated).
|
||||||
|
@ -1,8 +1,6 @@
|
|||||||
import project.Any.Any
|
import project.Any.Any
|
||||||
import project.Data.Array.Array
|
import project.Data.Array.Array
|
||||||
import project.Data.Map.Map
|
|
||||||
import project.Data.Range.Range
|
import project.Data.Range.Range
|
||||||
import project.Data.Set.Set
|
|
||||||
import project.Data.Text.Text
|
import project.Data.Text.Text
|
||||||
import project.Data.Time.Date.Date
|
import project.Data.Time.Date.Date
|
||||||
import project.Data.Time.Date_Range.Date_Range
|
import project.Data.Time.Date_Range.Date_Range
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
import project.Any.Any
|
import project.Any.Any
|
||||||
import project.Data.Array.Array
|
import project.Data.Array.Array
|
||||||
|
import project.Data.Hashset.Hashset
|
||||||
import project.Data.Maybe.Maybe
|
import project.Data.Maybe.Maybe
|
||||||
import project.Data.Numbers.Integer
|
import project.Data.Numbers.Integer
|
||||||
import project.Data.Pair.Pair
|
import project.Data.Pair.Pair
|
||||||
import project.Data.Set.Set
|
|
||||||
import project.Data.Vector.Map_Error
|
import project.Data.Vector.Map_Error
|
||||||
import project.Data.Vector.No_Wrap
|
import project.Data.Vector.No_Wrap
|
||||||
import project.Data.Vector.Vector
|
import project.Data.Vector.Vector
|
||||||
|
@ -168,11 +168,11 @@ type Connection
|
|||||||
Nothing -> Nothing
|
Nothing -> Nothing
|
||||||
_ : Vector -> types
|
_ : Vector -> types
|
||||||
_ -> [types]
|
_ -> [types]
|
||||||
name_map = Map.from_vector [["TABLE_CAT", "Database"], ["TABLE_SCHEM", "Schema"], ["TABLE_NAME", "Name"], ["TABLE_TYPE", "Type"], ["REMARKS", "Description"], ["TYPE_CAT", "Type Database"], ["TYPE_SCHEM", "Type Schema"], ["TYPE_NAME", "Type Name"]]
|
name_dict = Dictionary.from_vector [["TABLE_CAT", "Database"], ["TABLE_SCHEM", "Schema"], ["TABLE_NAME", "Name"], ["TABLE_TYPE", "Type"], ["REMARKS", "Description"], ["TYPE_CAT", "Type Database"], ["TYPE_SCHEM", "Type Schema"], ["TYPE_NAME", "Type Name"]]
|
||||||
result = self.jdbc_connection.with_metadata metadata->
|
result = self.jdbc_connection.with_metadata metadata->
|
||||||
table = Managed_Resource.bracket (metadata.getTables database schema name_like types_vector) .close result_set->
|
table = Managed_Resource.bracket (metadata.getTables database schema name_like types_vector) .close result_set->
|
||||||
result_set_to_table result_set self.dialect.get_type_mapping.make_column_fetcher
|
result_set_to_table result_set self.dialect.get_type_mapping.make_column_fetcher
|
||||||
renamed = table.rename_columns name_map
|
renamed = table.rename_columns name_dict
|
||||||
if all_fields then renamed else
|
if all_fields then renamed else
|
||||||
renamed.select_columns ["Database", "Schema", "Name", "Type", "Description"]
|
renamed.select_columns ["Database", "Schema", "Name", "Type", "Description"]
|
||||||
case include_hidden of
|
case include_hidden of
|
||||||
|
@ -544,17 +544,17 @@ type DB_Table
|
|||||||
> Example
|
> Example
|
||||||
Rename the "Alpha" column to "Beta"
|
Rename the "Alpha" column to "Beta"
|
||||||
|
|
||||||
table.rename_columns (Map.from_vector [["Alpha", "Beta"]])
|
table.rename_columns (Dictionary.from_vector [["Alpha", "Beta"]])
|
||||||
|
|
||||||
> Example
|
> Example
|
||||||
Rename the last column to "LastColumn"
|
Rename the last column to "LastColumn"
|
||||||
|
|
||||||
table.rename_columns (Map.from_vector [[-1, "LastColumn"]])
|
table.rename_columns (Dictionary.from_vector [[-1, "LastColumn"]])
|
||||||
|
|
||||||
> Example
|
> Example
|
||||||
Rename the "Alpha" column to "Beta" and last column to "LastColumn"
|
Rename the "Alpha" column to "Beta" and last column to "LastColumn"
|
||||||
|
|
||||||
table.rename_columns (Map.from_vector [["Alpha", "Beta"], [-1, "LastColumn"]])
|
table.rename_columns (Dictionary.from_vector [["Alpha", "Beta"], [-1, "LastColumn"]])
|
||||||
|
|
||||||
> Example
|
> Example
|
||||||
Rename the first column to "FirstColumn"
|
Rename the first column to "FirstColumn"
|
||||||
@ -569,12 +569,12 @@ type DB_Table
|
|||||||
> Example
|
> Example
|
||||||
For all columns starting with the prefix `name=`, replace it with `key:`.
|
For all columns starting with the prefix `name=`, replace it with `key:`.
|
||||||
|
|
||||||
table.rename_columns (Map.from_vector [["name=(.*)".to_regex, "key:$1"]])
|
table.rename_columns (Dictionary.from_vector [["name=(.*)".to_regex, "key:$1"]])
|
||||||
@column_map Widget_Helpers.make_rename_name_vector_selector
|
@column_map Widget_Helpers.make_rename_name_vector_selector
|
||||||
rename_columns : Map (Text | Integer | Regex) Text | Vector Text | Vector Vector -> Case_Sensitivity -> Boolean -> Problem_Behavior -> DB_Table ! Missing_Input_Columns | Ambiguous_Column_Rename | Too_Many_Column_Names_Provided | Invalid_Column_Names | Duplicate_Output_Column_Names
|
rename_columns : Table | Dictionary (Text | Integer | Regex) Text | Vector Text | Vector Vector -> Case_Sensitivity -> Boolean -> Problem_Behavior -> DB_Table ! Missing_Input_Columns | Ambiguous_Column_Rename | Too_Many_Column_Names_Provided | Invalid_Column_Names | Duplicate_Output_Column_Names
|
||||||
rename_columns self (column_map:(Table | Map | Vector)=["Column"]) (case_sensitivity:Case_Sensitivity=..Default) (error_on_missing_columns:Boolean=True) (on_problems:Problem_Behavior=..Report_Warning) = case column_map of
|
rename_columns self (column_map:(Table | Dictionary | Vector)=["Column"]) (case_sensitivity:Case_Sensitivity=..Default) (error_on_missing_columns:Boolean=True) (on_problems:Problem_Behavior=..Report_Warning) = case column_map of
|
||||||
_ : Table ->
|
_ : Table ->
|
||||||
resolved = Table_Helpers.read_name_map_from_table column_map
|
resolved = Table_Helpers.read_name_mapping_from_table column_map
|
||||||
self.rename_columns resolved case_sensitivity error_on_missing_columns on_problems
|
self.rename_columns resolved case_sensitivity error_on_missing_columns on_problems
|
||||||
_ ->
|
_ ->
|
||||||
new_names = Table_Helpers.rename_columns self.column_naming_helper self.internal_columns column_map case_sensitivity error_on_missing_columns on_problems
|
new_names = Table_Helpers.rename_columns self.column_naming_helper self.internal_columns column_map case_sensitivity error_on_missing_columns on_problems
|
||||||
@ -1035,26 +1035,26 @@ type DB_Table
|
|||||||
Warning.set result []
|
Warning.set result []
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
A helper that creates a two-column table from a Map.
|
A helper that creates a two-column table from a Dictionary.
|
||||||
|
|
||||||
The keys of the `Map` become the first column, with name
|
The keys of the `Dictionary` become the first column, with name
|
||||||
`key_column_name`, and the values of the `Map` become the second column,
|
`key_column_name`, and the values become the second column, with name
|
||||||
with name `value_column_name`.
|
`value_column_name`.
|
||||||
|
|
||||||
For the in-memory database, the `Map` can be empty. For the database
|
For the in-memory database, the `Dictionary` can be empty. For the
|
||||||
backends, it must not be empty.
|
database backends, it must not be empty.
|
||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
- map: The `Map` to create the table from.
|
- dict: The `Dictionary` to create the table from.
|
||||||
- key_column_name: The name to use for the first column.
|
- key_column_name: The name to use for the first column.
|
||||||
- value_column_name: The name to use for the second column.
|
- value_column_name: The name to use for the second column.
|
||||||
make_table_from_map : Map Any Any -> Text -> Text -> Table
|
make_table_from_dictionary : Dictionary Any Any -> Text -> Text -> Table
|
||||||
make_table_from_map self map key_column_name value_column_name =
|
make_table_from_dictionary self dict key_column_name value_column_name =
|
||||||
total_size = map.size * 2
|
total_size = dict.size * 2
|
||||||
|
|
||||||
if map.is_empty then Error.throw (Illegal_Argument.Error "Map argument cannot be empty") else
|
if dict.is_empty then Error.throw (Illegal_Argument.Error "Dictionary cannot be empty") else
|
||||||
if total_size > MAX_LITERAL_ELEMENT_COUNT then Error.throw (Illegal_Argument.Error "Map argument is too large ("+map.size.to_text+" entries): materialize a table into the database instead") else
|
if total_size > MAX_LITERAL_ELEMENT_COUNT then Error.throw (Illegal_Argument.Error "Dictionary is too large ("+dict.size.to_text+" entries): materialize a table into the database instead") else
|
||||||
keys_and_values = map.to_vector
|
keys_and_values = dict.to_vector
|
||||||
self.make_table_from_vectors [keys_and_values.map .first, keys_and_values.map .second] [key_column_name, value_column_name]
|
self.make_table_from_vectors [keys_and_values.map .first, keys_and_values.map .second] [key_column_name, value_column_name]
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
@ -1683,8 +1683,8 @@ type DB_Table
|
|||||||
@columns (Widget_Helpers.make_column_name_multi_selector add_regex=True add_by_type=True)
|
@columns (Widget_Helpers.make_column_name_multi_selector add_regex=True add_by_type=True)
|
||||||
@from_column Widget.Text_Input
|
@from_column Widget.Text_Input
|
||||||
@to_column Widget.Text_Input
|
@to_column Widget.Text_Input
|
||||||
replace : (DB_Table | Map) -> Vector (Integer | Text | Regex | By_Type) | Text | Integer | Regex | By_Type -> (Text | Integer | Nothing) -> (Text | Integer | Nothing) -> Boolean -> Problem_Behavior -> DB_Table ! Missing_Input_Columns | Non_Unique_Key | Unmatched_Rows_In_Lookup
|
replace : (DB_Table | Dictionary) -> Vector (Integer | Text | Regex | By_Type) | Text | Integer | Regex | By_Type -> (Text | Integer | Nothing) -> (Text | Integer | Nothing) -> Boolean -> Problem_Behavior -> DB_Table ! Missing_Input_Columns | Non_Unique_Key | Unmatched_Rows_In_Lookup
|
||||||
replace self lookup_table:(DB_Table | Map) columns:(Vector (Integer | Text | Regex | By_Type) | Text | Integer | Regex | By_Type) from_column:(Text | Integer | Nothing)=Nothing to_column:(Text | Integer | Nothing)=Nothing allow_unmatched_rows:Boolean=True on_problems:Problem_Behavior=..Report_Warning =
|
replace self lookup_table:(DB_Table | Dictionary) columns:(Vector (Integer | Text | Regex | By_Type) | Text | Integer | Regex | By_Type) from_column:(Text | Integer | Nothing)=Nothing to_column:(Text | Integer | Nothing)=Nothing allow_unmatched_rows:Boolean=True on_problems:Problem_Behavior=..Report_Warning =
|
||||||
Replace_Helpers.replace self lookup_table columns from_column to_column allow_unmatched_rows on_problems
|
Replace_Helpers.replace self lookup_table columns from_column to_column allow_unmatched_rows on_problems
|
||||||
|
|
||||||
## ALIAS join by row position
|
## ALIAS join by row position
|
||||||
|
@ -17,33 +17,32 @@ from project.Internal.IR.Operation_Metadata import Row_Number_Metadata
|
|||||||
type Dialect_Operations
|
type Dialect_Operations
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
|
Operations supported by a particular SQL dialect and how they are
|
||||||
Operations supported by a particular SQL dialect and how they are implemeneted.
|
implemented.
|
||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
- operation_map: The mapping which maps operation names to their
|
- operations_dict: Dictionary mapping operation names to their
|
||||||
implementations; each implementation is a function which takes SQL
|
implementations; each implementation is a function which takes SQL
|
||||||
builders for the arguments, and optionally an additional metadata
|
builders for the arguments, and optionally an additional metadata
|
||||||
argument, and should return a SQL builder yielding code for the whole
|
argument, and should return a SQL builder yielding code for the whole
|
||||||
operation.
|
operation.
|
||||||
Value (operation_map:(Map Text (Vector (SQL_Builder->SQL_Builder))))
|
Value (operations_dict:(Dictionary Text (Vector (SQL_Builder->SQL_Builder))))
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
|
|
||||||
Creates a copy of the dialect that supports additional operations or
|
Creates a copy of the dialect that supports additional operations or
|
||||||
overrides existing ones.
|
overrides existing ones.
|
||||||
|
|
||||||
# extend_with : Vector [Text, Vector SQL_Builder -> SQL_Builder] -> Dialect_Operations
|
# extend_with : Vector [Text, Vector SQL_Builder -> SQL_Builder] -> Dialect_Operations
|
||||||
extend_with : Vector Any -> Dialect_Operations
|
extend_with : Vector Any -> Dialect_Operations
|
||||||
extend_with self mappings =
|
extend_with self mappings =
|
||||||
new_map = mappings.fold self.operation_map (m -> el -> m.insert (el.at 0) (el.at 1))
|
new_dict = mappings.fold self.operations_dict (m -> el -> m.insert (el.at 0) (el.at 1))
|
||||||
Dialect_Operations.Value new_map
|
Dialect_Operations.Value new_dict
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
Checks if an operation is supported by the dialect.
|
Checks if an operation is supported by the dialect.
|
||||||
is_supported : Text -> Boolean
|
is_supported : Text -> Boolean
|
||||||
is_supported self operation =
|
is_supported self operation =
|
||||||
self.operation_map.contains_key operation
|
self.operations_dict.contains_key operation
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
|
|
||||||
@ -200,8 +199,8 @@ base_dialect_operations =
|
|||||||
contains = [["IS_IN", make_is_in], ["IS_IN_COLUMN", make_is_in_column]]
|
contains = [["IS_IN", make_is_in], ["IS_IN_COLUMN", make_is_in_column]]
|
||||||
types = [simple_cast]
|
types = [simple_cast]
|
||||||
windows = [["ROW_NUMBER", make_row_number], ["ROW_NUMBER_IN_GROUP", make_row_number_in_group]]
|
windows = [["ROW_NUMBER", make_row_number], ["ROW_NUMBER_IN_GROUP", make_row_number_in_group]]
|
||||||
base_map = Map.from_vector (arith + logic + compare + functions + agg + counts + text + nulls + contains + types + windows)
|
base_dict = Dictionary.from_vector (arith + logic + compare + functions + agg + counts + text + nulls + contains + types + windows)
|
||||||
Dialect_Operations.Value base_map
|
Dialect_Operations.Value base_dict
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
is_empty = lift_unary_op "IS_EMPTY" arg->
|
is_empty = lift_unary_op "IS_EMPTY" arg->
|
||||||
@ -311,7 +310,7 @@ generate_expression dialect expr = case expr of
|
|||||||
escaped = value.replace "'" "''"
|
escaped = value.replace "'" "''"
|
||||||
SQL_Builder.code ("'" + escaped + "'")
|
SQL_Builder.code ("'" + escaped + "'")
|
||||||
SQL_Expression.Operation kind arguments metadata ->
|
SQL_Expression.Operation kind arguments metadata ->
|
||||||
op = dialect.dialect_operations.operation_map.get kind (Error.throw <| Unsupported_Database_Operation.Error kind)
|
op = dialect.dialect_operations.operations_dict.get kind (Error.throw <| Unsupported_Database_Operation.Error kind)
|
||||||
parsed_args = arguments.map (generate_expression dialect)
|
parsed_args = arguments.map (generate_expression dialect)
|
||||||
result = op parsed_args
|
result = op parsed_args
|
||||||
# If the function expects more arguments, we pass the metadata as the last argument.
|
# If the function expects more arguments, we pass the metadata as the last argument.
|
||||||
|
@ -70,11 +70,10 @@ type Join_Subquery_Setup
|
|||||||
## PRIVATE
|
## PRIVATE
|
||||||
Creates a mapping from names of columns in the original table to
|
Creates a mapping from names of columns in the original table to
|
||||||
corresponding columns in the created subquery.
|
corresponding columns in the created subquery.
|
||||||
column_mapping : Map Text Internal_Column
|
column_mapping : Dictionary Text Internal_Column
|
||||||
column_mapping self =
|
column_mapping self =
|
||||||
Map.from_vector <|
|
Dictionary.from_vector <|
|
||||||
self.old_columns.zip self.new_columns old-> new->
|
self.old_columns.zip self.new_columns old->new->[old.name, new]
|
||||||
[old.name, new]
|
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
prepare_subqueries : Connection -> DB_Table -> DB_Table -> Boolean -> Boolean -> Pair Join_Subquery_Setup
|
prepare_subqueries : Connection -> DB_Table -> DB_Table -> Boolean -> Boolean -> Pair Join_Subquery_Setup
|
||||||
|
@ -116,7 +116,7 @@ type Postgres_Dialect
|
|||||||
inner_table_alias = table_name_deduplicator.make_unique table.name+"_inner"
|
inner_table_alias = table_name_deduplicator.make_unique table.name+"_inner"
|
||||||
setup = table.context.as_subquery inner_table_alias [table.internal_columns]
|
setup = table.context.as_subquery inner_table_alias [table.internal_columns]
|
||||||
new_columns = setup.new_columns.first
|
new_columns = setup.new_columns.first
|
||||||
column_mapping = Map.from_vector <| new_columns.map c-> [c.name, c]
|
column_mapping = Dictionary.from_vector <| new_columns.map c-> [c.name, c]
|
||||||
new_key_columns = key_columns.map c-> column_mapping.at c.name
|
new_key_columns = key_columns.map c-> column_mapping.at c.name
|
||||||
type_mapping = self.get_type_mapping
|
type_mapping = self.get_type_mapping
|
||||||
distinct_expressions = new_key_columns.map column->
|
distinct_expressions = new_key_columns.map column->
|
||||||
@ -563,14 +563,14 @@ decimal_mod = Base_Generator.lift_binary_op "DECIMAL_MOD" x-> y->
|
|||||||
x ++ " - FLOOR(CAST(" ++ x ++ " AS decimal) / CAST(" ++ y ++ " AS decimal)) * " ++ y
|
x ++ " - FLOOR(CAST(" ++ x ++ " AS decimal) / CAST(" ++ y ++ " AS decimal)) * " ++ y
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
supported_replace_params : Set Replace_Params
|
supported_replace_params : Hashset Replace_Params
|
||||||
supported_replace_params =
|
supported_replace_params =
|
||||||
e0 = [Replace_Params.Value Text Case_Sensitivity.Default False, Replace_Params.Value Text Case_Sensitivity.Default True, Replace_Params.Value Text Case_Sensitivity.Sensitive False]
|
e0 = [Replace_Params.Value Text Case_Sensitivity.Default False, Replace_Params.Value Text Case_Sensitivity.Default True, Replace_Params.Value Text Case_Sensitivity.Sensitive False]
|
||||||
e1 = [Replace_Params.Value Text Case_Sensitivity.Sensitive True, Replace_Params.Value Text Case_Sensitivity.Insensitive False, Replace_Params.Value Text Case_Sensitivity.Insensitive True]
|
e1 = [Replace_Params.Value Text Case_Sensitivity.Sensitive True, Replace_Params.Value Text Case_Sensitivity.Insensitive False, Replace_Params.Value Text Case_Sensitivity.Insensitive True]
|
||||||
e2 = [Replace_Params.Value Regex Case_Sensitivity.Default False, Replace_Params.Value Regex Case_Sensitivity.Default True, Replace_Params.Value Regex Case_Sensitivity.Sensitive False]
|
e2 = [Replace_Params.Value Regex Case_Sensitivity.Default False, Replace_Params.Value Regex Case_Sensitivity.Default True, Replace_Params.Value Regex Case_Sensitivity.Sensitive False]
|
||||||
e3 = [Replace_Params.Value Regex Case_Sensitivity.Sensitive True, Replace_Params.Value Regex Case_Sensitivity.Insensitive False, Replace_Params.Value Regex Case_Sensitivity.Insensitive True]
|
e3 = [Replace_Params.Value Regex Case_Sensitivity.Sensitive True, Replace_Params.Value Regex Case_Sensitivity.Insensitive False, Replace_Params.Value Regex Case_Sensitivity.Insensitive True]
|
||||||
e4 = [Replace_Params.Value DB_Column Case_Sensitivity.Default False, Replace_Params.Value DB_Column Case_Sensitivity.Sensitive False]
|
e4 = [Replace_Params.Value DB_Column Case_Sensitivity.Default False, Replace_Params.Value DB_Column Case_Sensitivity.Sensitive False]
|
||||||
Set.from_vector <| e0 + e1 + e2 + e3 + e4
|
Hashset.from_vector <| e0 + e1 + e2 + e3 + e4
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
replace : Vector SQL_Builder -> Any -> SQL_Builder
|
replace : Vector SQL_Builder -> Any -> SQL_Builder
|
||||||
|
@ -126,14 +126,14 @@ type Postgres_Type_Mapping
|
|||||||
Column_Fetcher_Module.default_fetcher_for_value_type value_type
|
Column_Fetcher_Module.default_fetcher_for_value_type value_type
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
simple_types_map = Map.from_vector <|
|
simple_types_map = Dictionary.from_vector <|
|
||||||
ints = [[Types.SMALLINT, Value_Type.Integer Bits.Bits_16], [Types.BIGINT, Value_Type.Integer Bits.Bits_64], [Types.INTEGER, Value_Type.Integer Bits.Bits_32]]
|
ints = [[Types.SMALLINT, Value_Type.Integer Bits.Bits_16], [Types.BIGINT, Value_Type.Integer Bits.Bits_64], [Types.INTEGER, Value_Type.Integer Bits.Bits_32]]
|
||||||
floats = [[Types.DOUBLE, Value_Type.Float Bits.Bits_64], [Types.REAL, Value_Type.Float Bits.Bits_32]]
|
floats = [[Types.DOUBLE, Value_Type.Float Bits.Bits_64], [Types.REAL, Value_Type.Float Bits.Bits_32]]
|
||||||
other = [[Types.DATE, Value_Type.Date], [Types.TIME, Value_Type.Time]]
|
other = [[Types.DATE, Value_Type.Date], [Types.TIME, Value_Type.Time]]
|
||||||
ints + floats + other
|
ints + floats + other
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
complex_types_map = Map.from_vector <|
|
complex_types_map = Dictionary.from_vector <|
|
||||||
make_decimal sql_type =
|
make_decimal sql_type =
|
||||||
Value_Type.Decimal sql_type.precision sql_type.scale
|
Value_Type.Decimal sql_type.precision sql_type.scale
|
||||||
make_varchar sql_type =
|
make_varchar sql_type =
|
||||||
|
@ -125,7 +125,7 @@ type SQLite_Dialect
|
|||||||
inner_table_alias = table_name_deduplicator.make_unique table.name+"_inner"
|
inner_table_alias = table_name_deduplicator.make_unique table.name+"_inner"
|
||||||
setup = table.context.as_subquery inner_table_alias [table.internal_columns]
|
setup = table.context.as_subquery inner_table_alias [table.internal_columns]
|
||||||
new_columns = setup.new_columns.first
|
new_columns = setup.new_columns.first
|
||||||
column_mapping = Map.from_vector <| new_columns.map c-> [c.name, c]
|
column_mapping = Dictionary.from_vector <| new_columns.map c-> [c.name, c]
|
||||||
new_key_columns = key_columns.map c-> column_mapping.at c.name
|
new_key_columns = key_columns.map c-> column_mapping.at c.name
|
||||||
type_mapping = self.get_type_mapping
|
type_mapping = self.get_type_mapping
|
||||||
distinct_expressions = new_key_columns.map column->
|
distinct_expressions = new_key_columns.map column->
|
||||||
@ -447,10 +447,10 @@ mod_op = Base_Generator.lift_binary_op "MOD" x-> y->
|
|||||||
x ++ " - FLOOR(CAST(" ++ x ++ " AS REAL) / CAST(" ++ y ++ " AS REAL)) * " ++ y
|
x ++ " - FLOOR(CAST(" ++ x ++ " AS REAL) / CAST(" ++ y ++ " AS REAL)) * " ++ y
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
supported_replace_params : Set Replace_Params
|
supported_replace_params : Hashset Replace_Params
|
||||||
supported_replace_params =
|
supported_replace_params =
|
||||||
e = [Replace_Params.Value Text Case_Sensitivity.Default False, Replace_Params.Value Text Case_Sensitivity.Sensitive False, Replace_Params.Value Text Case_Sensitivity.Default True, Replace_Params.Value Text Case_Sensitivity.Sensitive True, Replace_Params.Value Text Case_Sensitivity.Insensitive True]
|
e = [Replace_Params.Value Text Case_Sensitivity.Default False, Replace_Params.Value Text Case_Sensitivity.Sensitive False, Replace_Params.Value Text Case_Sensitivity.Default True, Replace_Params.Value Text Case_Sensitivity.Sensitive True, Replace_Params.Value Text Case_Sensitivity.Insensitive True]
|
||||||
Set.from_vector e
|
Hashset.from_vector e
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
replace : Vector SQL_Builder -> Any -> SQL_Builder
|
replace : Vector SQL_Builder -> Any -> SQL_Builder
|
||||||
|
@ -96,7 +96,7 @@ type SQLite_Type_Mapping
|
|||||||
infer_return_type : (SQL_Expression -> SQL_Type_Reference) -> Text -> Vector -> SQL_Expression -> SQL_Type_Reference
|
infer_return_type : (SQL_Expression -> SQL_Type_Reference) -> Text -> Vector -> SQL_Expression -> SQL_Type_Reference
|
||||||
infer_return_type infer_from_database_callback op_name arguments expression =
|
infer_return_type infer_from_database_callback op_name arguments expression =
|
||||||
_ = [infer_from_database_callback, expression]
|
_ = [infer_from_database_callback, expression]
|
||||||
handler = operations_map.get op_name (_ -> Error.throw (Illegal_State.Error "Impossible: Unknown operation "+op_name+". This is a bug in the Database library."))
|
handler = operations_dict.get op_name (_ -> Error.throw (Illegal_State.Error "Impossible: Unknown operation "+op_name+". This is a bug in the Database library."))
|
||||||
sql_type = handler arguments
|
sql_type = handler arguments
|
||||||
SQL_Type_Reference.from_constant sql_type
|
SQL_Type_Reference.from_constant sql_type
|
||||||
|
|
||||||
@ -127,7 +127,7 @@ type SQLite_Type_Mapping
|
|||||||
|
|
||||||
For types like dates - we map them to unsupported type, because date
|
For types like dates - we map them to unsupported type, because date
|
||||||
operations in SQLite are currently not supported due to their weird storage.
|
operations in SQLite are currently not supported due to their weird storage.
|
||||||
simple_types_map = Map.from_vector <|
|
simple_types_map = Dictionary.from_vector <|
|
||||||
ints = [Types.TINYINT, Types.SMALLINT, Types.BIGINT, Types.INTEGER] . map x-> [x, default_integer]
|
ints = [Types.TINYINT, Types.SMALLINT, Types.BIGINT, Types.INTEGER] . map x-> [x, default_integer]
|
||||||
floats = [Types.DOUBLE, Types.REAL, Types.FLOAT] . map x-> [x, default_float]
|
floats = [Types.DOUBLE, Types.REAL, Types.FLOAT] . map x-> [x, default_float]
|
||||||
# We treat numeric as a float, since that is what really sits in SQLite under the hood.
|
# We treat numeric as a float, since that is what really sits in SQLite under the hood.
|
||||||
@ -142,13 +142,13 @@ simple_types_map = Map.from_vector <|
|
|||||||
https://www.sqlite.org/datatype3.html#affinity_name_examples
|
https://www.sqlite.org/datatype3.html#affinity_name_examples
|
||||||
However, with this the date-time columns will be mapped to the numeric type.
|
However, with this the date-time columns will be mapped to the numeric type.
|
||||||
Instead, we want to treat such columns as Text, so we override the mapping.
|
Instead, we want to treat such columns as Text, so we override the mapping.
|
||||||
name_based_workarounds = Map.from_vector <|
|
name_based_workarounds = Dictionary.from_vector <|
|
||||||
["TIME", "DATE", "DATETIME", "TIMESTAMP"] . map x-> [x, default_text]
|
["TIME", "DATE", "DATETIME", "TIMESTAMP"] . map x-> [x, default_text]
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
Maps operation names to functions that infer its result type.
|
Maps operation names to functions that infer its result type.
|
||||||
operations_map : Map Text (Vector -> SQL_Type)
|
operations_dict : Dictionary Text (Vector -> SQL_Type)
|
||||||
operations_map =
|
operations_dict =
|
||||||
find_type arg = case arg of
|
find_type arg = case arg of
|
||||||
column : DB_Column -> column.value_type
|
column : DB_Column -> column.value_type
|
||||||
internal_column : Internal_Column ->
|
internal_column : Internal_Column ->
|
||||||
@ -198,7 +198,7 @@ operations_map =
|
|||||||
arithmetic_ops = ["ADD_NUMBER", "-", "*", "^", "%", "SUM"]
|
arithmetic_ops = ["ADD_NUMBER", "-", "*", "^", "%", "SUM"]
|
||||||
merge_input_types_ops = ["ROW_MAX", "ROW_MIN", "MAX", "MIN", "FILL_NULL", "COALESCE"]
|
merge_input_types_ops = ["ROW_MAX", "ROW_MIN", "MAX", "MIN", "FILL_NULL", "COALESCE"]
|
||||||
others = [["IIF", handle_iif], ["CAST", handle_cast], ["CASE", handle_case], ["RUNTIME_ERROR", handle_runtime_error]]
|
others = [["IIF", handle_iif], ["CAST", handle_cast], ["CASE", handle_case], ["RUNTIME_ERROR", handle_runtime_error]]
|
||||||
Map.from_vector <|
|
Dictionary.from_vector <|
|
||||||
v1 = always_boolean_ops.map [_, const SQLite_Types.boolean]
|
v1 = always_boolean_ops.map [_, const SQLite_Types.boolean]
|
||||||
v2 = always_floating_ops.map [_, const SQLite_Types.real]
|
v2 = always_floating_ops.map [_, const SQLite_Types.real]
|
||||||
v3 = always_integer_ops.map [_, const SQLite_Types.integer]
|
v3 = always_integer_ops.map [_, const SQLite_Types.integer]
|
||||||
|
@ -190,8 +190,8 @@ resolve_primary_key structure primary_key = case primary_key of
|
|||||||
if key.is_a Text then key else
|
if key.is_a Text then key else
|
||||||
Error.throw (Illegal_Argument.Error ("Primary key must be a vector of column names, instead got a " + (Meta.type_of key . to_display_text)))
|
Error.throw (Illegal_Argument.Error ("Primary key must be a vector of column names, instead got a " + (Meta.type_of key . to_display_text)))
|
||||||
validated.if_not_error <|
|
validated.if_not_error <|
|
||||||
column_names = Set.from_vector (structure.map .name)
|
column_names = Hashset.from_vector (structure.map .name)
|
||||||
missing_columns = (Set.from_vector primary_key).difference column_names
|
missing_columns = (Hashset.from_vector primary_key).difference column_names
|
||||||
if missing_columns.not_empty then Error.throw (Missing_Input_Columns.Error missing_columns.to_vector) else
|
if missing_columns.not_empty then Error.throw (Missing_Input_Columns.Error missing_columns.to_vector) else
|
||||||
primary_key
|
primary_key
|
||||||
|
|
||||||
@ -470,13 +470,13 @@ check_update_arguments_structure_match source_table target_table key_columns upd
|
|||||||
if source_type.can_be_widened_to target_type then [Inexact_Type_Coercion.Warning source_type target_type unavailable=False] else
|
if source_type.can_be_widened_to target_type then [Inexact_Type_Coercion.Warning source_type target_type unavailable=False] else
|
||||||
Error.throw (Column_Type_Mismatch.Error source_column.name target_type source_type)
|
Error.throw (Column_Type_Mismatch.Error source_column.name target_type source_type)
|
||||||
|
|
||||||
source_columns = Set.from_vector source_table.column_names
|
source_columns = Hashset.from_vector source_table.column_names
|
||||||
target_columns = Set.from_vector target_table.column_names
|
target_columns = Hashset.from_vector target_table.column_names
|
||||||
extra_columns = source_columns.difference target_columns
|
extra_columns = source_columns.difference target_columns
|
||||||
if extra_columns.not_empty then Error.throw (Unmatched_Columns.Error extra_columns.to_vector) else
|
if extra_columns.not_empty then Error.throw (Unmatched_Columns.Error extra_columns.to_vector) else
|
||||||
missing_columns = target_columns.difference source_columns
|
missing_columns = target_columns.difference source_columns
|
||||||
if missing_columns.not_empty && error_on_missing_columns then Error.throw (Missing_Input_Columns.Error missing_columns.to_vector "the source table") else
|
if missing_columns.not_empty && error_on_missing_columns then Error.throw (Missing_Input_Columns.Error missing_columns.to_vector "the source table") else
|
||||||
key_set = Set.from_vector key_columns
|
key_set = Hashset.from_vector key_columns
|
||||||
missing_source_key_columns = key_set.difference source_columns
|
missing_source_key_columns = key_set.difference source_columns
|
||||||
missing_target_key_columns = key_set.difference target_columns
|
missing_target_key_columns = key_set.difference target_columns
|
||||||
if missing_source_key_columns.not_empty then Error.throw (Missing_Input_Columns.Error missing_source_key_columns.to_vector "the source table") else
|
if missing_source_key_columns.not_empty then Error.throw (Missing_Input_Columns.Error missing_source_key_columns.to_vector "the source table") else
|
||||||
@ -600,10 +600,10 @@ type Delete_Rows_Source
|
|||||||
check_delete_rows_arguments target_table key_values_to_delete key_columns ~continuation =
|
check_delete_rows_arguments target_table key_values_to_delete key_columns ~continuation =
|
||||||
check_target_table_for_update target_table <|
|
check_target_table_for_update target_table <|
|
||||||
if key_columns.is_empty then Error.throw (Illegal_Argument.Error "One or more key columns must be provided to correlate the rows to be deleted.") else
|
if key_columns.is_empty then Error.throw (Illegal_Argument.Error "One or more key columns must be provided to correlate the rows to be deleted.") else
|
||||||
key_set = Set.from_vector key_columns
|
key_set = Hashset.from_vector key_columns
|
||||||
missing_target_key_columns = key_set . difference (Set.from_vector target_table.column_names)
|
missing_target_key_columns = key_set . difference (Hashset.from_vector target_table.column_names)
|
||||||
if missing_target_key_columns.not_empty then Error.throw (Missing_Input_Columns.Error missing_target_key_columns.to_vector "the target table") else
|
if missing_target_key_columns.not_empty then Error.throw (Missing_Input_Columns.Error missing_target_key_columns.to_vector "the target table") else
|
||||||
missing_source_key_columns = key_set . difference (Set.from_vector key_values_to_delete.column_names)
|
missing_source_key_columns = key_set . difference (Hashset.from_vector key_values_to_delete.column_names)
|
||||||
if missing_source_key_columns.not_empty then Error.throw (Missing_Input_Columns.Error missing_source_key_columns.to_vector "the key values to delete table") else
|
if missing_source_key_columns.not_empty then Error.throw (Missing_Input_Columns.Error missing_source_key_columns.to_vector "the key values to delete table") else
|
||||||
continuation
|
continuation
|
||||||
|
|
||||||
|
@ -112,9 +112,9 @@ json_object = json.first
|
|||||||
list : List
|
list : List
|
||||||
list = List.Cons 1 (List.Cons 2 (List.Cons 3 List.Nil))
|
list = List.Cons 1 (List.Cons 2 (List.Cons 3 List.Nil))
|
||||||
|
|
||||||
## A simple map that contains some numbers mapped to their word equivalents.
|
## A simple dictionary that contains some numbers mapped to their word equivalents.
|
||||||
map : Map
|
dictionary : Dictionary
|
||||||
map = Map.empty . insert 1 "one" . insert 3 "three" . insert 5 "five"
|
dictionary = Dictionary.empty . insert 1 "one" . insert 3 "three" . insert 5 "five"
|
||||||
|
|
||||||
## A dummy type that is used for example purposes.
|
## A dummy type that is used for example purposes.
|
||||||
type No_Methods
|
type No_Methods
|
||||||
|
@ -119,7 +119,7 @@ type Snowflake_Dialect
|
|||||||
inner_table_alias = table_name_deduplicator.make_unique table.name+"_inner"
|
inner_table_alias = table_name_deduplicator.make_unique table.name+"_inner"
|
||||||
setup = (Internals_Access.get_context table).as_subquery inner_table_alias [Internals_Access.internal_columns table]
|
setup = (Internals_Access.get_context table).as_subquery inner_table_alias [Internals_Access.internal_columns table]
|
||||||
new_columns = setup.new_columns.first
|
new_columns = setup.new_columns.first
|
||||||
column_mapping = Map.from_vector <| new_columns.map c-> [c.name, c]
|
column_mapping = Dictionary.from_vector <| new_columns.map c-> [c.name, c]
|
||||||
new_key_columns = key_columns.map c-> column_mapping.at c.name
|
new_key_columns = key_columns.map c-> column_mapping.at c.name
|
||||||
type_mapping = self.get_type_mapping
|
type_mapping = self.get_type_mapping
|
||||||
distinct_expressions = new_key_columns.map column->
|
distinct_expressions = new_key_columns.map column->
|
||||||
@ -464,14 +464,14 @@ decimal_mod = Base_Generator.lift_binary_op "DECIMAL_MOD" x-> y->
|
|||||||
x ++ " - FLOOR(CAST(" ++ x ++ " AS decimal) / CAST(" ++ y ++ " AS decimal)) * " ++ y
|
x ++ " - FLOOR(CAST(" ++ x ++ " AS decimal) / CAST(" ++ y ++ " AS decimal)) * " ++ y
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
supported_replace_params : Set Replace_Params
|
supported_replace_params : Hashset Replace_Params
|
||||||
supported_replace_params =
|
supported_replace_params =
|
||||||
e0 = [Replace_Params.Value Text Case_Sensitivity.Default False, Replace_Params.Value Text Case_Sensitivity.Default True, Replace_Params.Value Text Case_Sensitivity.Sensitive False]
|
e0 = [Replace_Params.Value Text Case_Sensitivity.Default False, Replace_Params.Value Text Case_Sensitivity.Default True, Replace_Params.Value Text Case_Sensitivity.Sensitive False]
|
||||||
e1 = [Replace_Params.Value Text Case_Sensitivity.Sensitive True, Replace_Params.Value Text Case_Sensitivity.Insensitive False, Replace_Params.Value Text Case_Sensitivity.Insensitive True]
|
e1 = [Replace_Params.Value Text Case_Sensitivity.Sensitive True, Replace_Params.Value Text Case_Sensitivity.Insensitive False, Replace_Params.Value Text Case_Sensitivity.Insensitive True]
|
||||||
e2 = [Replace_Params.Value Regex Case_Sensitivity.Default False, Replace_Params.Value Regex Case_Sensitivity.Default True, Replace_Params.Value Regex Case_Sensitivity.Sensitive False]
|
e2 = [Replace_Params.Value Regex Case_Sensitivity.Default False, Replace_Params.Value Regex Case_Sensitivity.Default True, Replace_Params.Value Regex Case_Sensitivity.Sensitive False]
|
||||||
e3 = [Replace_Params.Value Regex Case_Sensitivity.Sensitive True, Replace_Params.Value Regex Case_Sensitivity.Insensitive False, Replace_Params.Value Regex Case_Sensitivity.Insensitive True]
|
e3 = [Replace_Params.Value Regex Case_Sensitivity.Sensitive True, Replace_Params.Value Regex Case_Sensitivity.Insensitive False, Replace_Params.Value Regex Case_Sensitivity.Insensitive True]
|
||||||
e4 = [Replace_Params.Value DB_Column Case_Sensitivity.Default False, Replace_Params.Value DB_Column Case_Sensitivity.Sensitive False]
|
e4 = [Replace_Params.Value DB_Column Case_Sensitivity.Default False, Replace_Params.Value DB_Column Case_Sensitivity.Sensitive False]
|
||||||
Set.from_vector <| e0 + e1 + e2 + e3 + e4
|
Hashset.from_vector <| e0 + e1 + e2 + e3 + e4
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
replace : Vector SQL_Builder -> Any -> SQL_Builder
|
replace : Vector SQL_Builder -> Any -> SQL_Builder
|
||||||
|
@ -118,14 +118,14 @@ type Snowflake_Type_Mapping
|
|||||||
_ -> Column_Fetcher_Module.default_fetcher_for_value_type value_type
|
_ -> Column_Fetcher_Module.default_fetcher_for_value_type value_type
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
simple_types_map = Map.from_vector <|
|
simple_types_map = Dictionary.from_vector <|
|
||||||
ints = [[Types.TINYINT, Value_Type.Byte], [Types.SMALLINT, Value_Type.Integer Bits.Bits_16], [Types.BIGINT, Value_Type.Integer Bits.Bits_64], [Types.INTEGER, Value_Type.Integer Bits.Bits_32]]
|
ints = [[Types.TINYINT, Value_Type.Byte], [Types.SMALLINT, Value_Type.Integer Bits.Bits_16], [Types.BIGINT, Value_Type.Integer Bits.Bits_64], [Types.INTEGER, Value_Type.Integer Bits.Bits_32]]
|
||||||
floats = [[Types.DOUBLE, Value_Type.Float Bits.Bits_64], [Types.REAL, Value_Type.Float Bits.Bits_32]]
|
floats = [[Types.DOUBLE, Value_Type.Float Bits.Bits_64], [Types.REAL, Value_Type.Float Bits.Bits_32]]
|
||||||
other = [[Types.DATE, Value_Type.Date], [Types.TIME, Value_Type.Time], [Types.BOOLEAN, Value_Type.Boolean]]
|
other = [[Types.DATE, Value_Type.Date], [Types.TIME, Value_Type.Time], [Types.BOOLEAN, Value_Type.Boolean]]
|
||||||
ints + floats + other
|
ints + floats + other
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
complex_types_map = Map.from_vector <|
|
complex_types_map = Dictionary.from_vector <|
|
||||||
make_decimal sql_type =
|
make_decimal sql_type =
|
||||||
Value_Type.Decimal sql_type.precision sql_type.scale
|
Value_Type.Decimal sql_type.precision sql_type.scale
|
||||||
make_varchar sql_type =
|
make_varchar sql_type =
|
||||||
|
@ -1716,7 +1716,7 @@ type Column
|
|||||||
True ->
|
True ->
|
||||||
run_vectorized_binary_op self op_name as_vector expected_result_type=Value_Type.Boolean skip_nulls=False new_name=result_name
|
run_vectorized_binary_op self op_name as_vector expected_result_type=Value_Type.Boolean skip_nulls=False new_name=result_name
|
||||||
False ->
|
False ->
|
||||||
set = Set.from_vector as_vector error_on_duplicates=False
|
set = Hashset.from_vector as_vector error_on_duplicates=False
|
||||||
apply_unary_map self result_name set.contains_relational Value_Type.Boolean nothing_unchanged=False
|
apply_unary_map self result_name set.contains_relational Value_Type.Boolean nothing_unchanged=False
|
||||||
|
|
||||||
## GROUP Standard.Base.Conversions
|
## GROUP Standard.Base.Conversions
|
||||||
|
@ -20,12 +20,12 @@ Convertible_To_Columns.from (that:JS_Object) =
|
|||||||
Convertible_To_Columns.Value that.field_names (that.get _)
|
Convertible_To_Columns.Value that.field_names (that.get _)
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
Convertible_To_Columns.from (that:Map) =
|
Convertible_To_Columns.from (that:Dictionary) =
|
||||||
pairs = that.keys.map k-> [k.to_text, k]
|
pairs = that.keys.map k-> [k.to_text, k]
|
||||||
field_map = pairs.if_not_error <|
|
field_dict = pairs.if_not_error <|
|
||||||
Map.from_vector pairs error_on_duplicates=True . catch Illegal_Argument _->
|
Dictionary.from_vector pairs error_on_duplicates=True . catch Illegal_Argument _->
|
||||||
Error.throw (Illegal_Argument.Error "Cannot convert "+that.to_display_text+" to a set of columns, because its keys are duplicated when converted to text.")
|
Error.throw (Illegal_Argument.Error "Cannot convert "+that.to_display_text+" to a set of columns, because its keys are duplicated when converted to text.")
|
||||||
Convertible_To_Columns.Value field_map.keys (k-> that.get (field_map.get k))
|
Convertible_To_Columns.Value field_dict.keys (k-> that.get (field_dict.get k))
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
Convertible_To_Columns.from (that:Pair) =
|
Convertible_To_Columns.from (that:Pair) =
|
||||||
@ -67,5 +67,5 @@ Convertible_To_Columns.from (that:XML_Element) =
|
|||||||
has_child_nodes = that_children.any (_.is_a XML_Element)
|
has_child_nodes = that_children.any (_.is_a XML_Element)
|
||||||
children = if that_children.is_empty.not && has_child_nodes then [["Children", that_children]] else []
|
children = if that_children.is_empty.not && has_child_nodes then [["Children", that_children]] else []
|
||||||
value = if that_children.is_empty.not && has_child_nodes.not then [["Value", that.text]] else []
|
value = if that_children.is_empty.not && has_child_nodes.not then [["Value", that.text]] else []
|
||||||
as_map = Map.from_vector (name + tags + children + value)
|
as_dict = Dictionary.from_vector (name + tags + children + value)
|
||||||
Convertible_To_Columns.from as_map
|
Convertible_To_Columns.from as_dict
|
||||||
|
@ -54,7 +54,7 @@ Convertible_To_Rows.from that:Pair = Convertible_To_Rows.Value that.length (that
|
|||||||
Convertible_To_Rows.from that:Date_Range = Convertible_To_Rows.Value that.length (that.get _)
|
Convertible_To_Rows.from that:Date_Range = Convertible_To_Rows.Value that.length (that.get _)
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
Convertible_To_Rows.from that:Map =
|
Convertible_To_Rows.from that:Dictionary =
|
||||||
vals = that.to_vector.map p-> Key_Value.Pair p.first p.second
|
vals = that.to_vector.map p-> Key_Value.Pair p.first p.second
|
||||||
Convertible_To_Rows.Value vals.length (vals.get _) ["Key", "Value"]
|
Convertible_To_Rows.Value vals.length (vals.get _) ["Key", "Value"]
|
||||||
|
|
||||||
|
@ -755,7 +755,7 @@ type Truncated_Column_Names
|
|||||||
## PRIVATE
|
## PRIVATE
|
||||||
Indicates that some column names were truncated to fit the maximum length
|
Indicates that some column names were truncated to fit the maximum length
|
||||||
supported by the backend.
|
supported by the backend.
|
||||||
Warning (names_map : Map Text Text)
|
Warning (names_map : Dictionary Text Text)
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
Get the original column names.
|
Get the original column names.
|
||||||
|
@ -97,47 +97,46 @@ create_table_from_objects (value : Convertible_To_Rows) (fields : Vector | Nothi
|
|||||||
|
|
||||||
Java_Problems.with_problem_aggregator Problem_Behavior.Report_Warning java_problem_aggregator->
|
Java_Problems.with_problem_aggregator Problem_Behavior.Report_Warning java_problem_aggregator->
|
||||||
preset_fields = fields.is_nothing.not
|
preset_fields = fields.is_nothing.not
|
||||||
initial_map = if preset_fields.not then Map.empty else
|
initial_dict = if preset_fields.not then Dictionary.empty else
|
||||||
Map.from_vector (fields.map field_name-> [field_name, Java_Exports.make_inferred_builder len java_problem_aggregator]) error_on_duplicates=False
|
Dictionary.from_vector (fields.map field_name-> [field_name, Java_Exports.make_inferred_builder len java_problem_aggregator]) error_on_duplicates=False
|
||||||
|
|
||||||
# This is used to ensure that field names in the resulting table are in the order they were encountered.
|
# This is used to ensure that field names in the resulting table are in the order they were encountered.
|
||||||
discovered_field_names = Builder.new
|
discovered_field_names = Builder.new
|
||||||
|
|
||||||
builder_map = case preset_fields of
|
builder_dict = case preset_fields of
|
||||||
# Just get the queried keys from each object.
|
# Just get the queried keys from each object.
|
||||||
True ->
|
True ->
|
||||||
0.up_to len . each idx->
|
0.up_to len . each idx->
|
||||||
v = (value.getter idx).to Convertible_To_Columns
|
v = (value.getter idx).to Convertible_To_Columns
|
||||||
initial_map.each_with_key field_name-> builder->
|
initial_dict.each_with_key field_name-> builder->
|
||||||
builder.append (v.getter field_name)
|
builder.append (v.getter field_name)
|
||||||
|
|
||||||
initial_map
|
initial_dict
|
||||||
|
|
||||||
# In this mode we are discovering the key set as we go.
|
# In this mode we are discovering the key set as we go.
|
||||||
False ->
|
False ->
|
||||||
0.up_to len . fold initial_map current_map-> idx->
|
0.up_to len . fold initial_dict current_dict-> idx->
|
||||||
v = (value.getter idx).to Convertible_To_Columns
|
v = (value.getter idx).to Convertible_To_Columns
|
||||||
v.field_names.fold current_map inner_current_map-> f->
|
v.field_names.fold current_dict inner_current_dict-> f->
|
||||||
existing_builder = inner_current_map.get f Nothing
|
existing_builder = inner_current_dict.get f Nothing
|
||||||
builder = existing_builder.if_nothing <|
|
builder = existing_builder.if_nothing <|
|
||||||
discovered_field_names.append f
|
discovered_field_names.append f
|
||||||
Java_Exports.make_inferred_builder len java_problem_aggregator
|
Java_Exports.make_inferred_builder len java_problem_aggregator
|
||||||
builder.fillUpToSize idx
|
builder.fillUpToSize idx
|
||||||
builder.append (v.getter f)
|
builder.append (v.getter f)
|
||||||
new_map = if existing_builder.is_nothing.not then inner_current_map else
|
if existing_builder.is_nothing.not then inner_current_dict else
|
||||||
inner_current_map.insert f builder
|
inner_current_dict.insert f builder
|
||||||
new_map
|
|
||||||
|
|
||||||
# Seal all builders and create columns
|
# Seal all builders and create columns
|
||||||
column_map = builder_map.map_with_key name-> builder->
|
column_dict = builder_dict.map_with_key name-> builder->
|
||||||
builder.fillUpToSize len
|
builder.fillUpToSize len
|
||||||
Column.from_storage name builder.seal
|
Column.from_storage name builder.seal
|
||||||
|
|
||||||
column_map.if_not_error <|
|
column_dict.if_not_error <|
|
||||||
# Create a vector of columns, preserving the original order if it was specified.
|
# Create a vector of columns, preserving the original order if it was specified.
|
||||||
columns = case preset_fields of
|
columns = case preset_fields of
|
||||||
True -> fields.distinct.map column_map.get
|
True -> fields.distinct.map column_dict.get
|
||||||
False ->
|
False ->
|
||||||
if discovered_field_names.is_empty then Error.throw (Illegal_Argument.Error "Unable to generate column names as all inputs had no fields.") else
|
if discovered_field_names.is_empty then Error.throw (Illegal_Argument.Error "Unable to generate column names as all inputs had no fields.") else
|
||||||
discovered_field_names.to_vector.map column_map.get
|
discovered_field_names.to_vector.map column_dict.get
|
||||||
Table.new columns
|
Table.new columns
|
||||||
|
@ -172,7 +172,7 @@ fan_out_to_rows_and_columns_fixed input_storage function at_least_one_row:Boolea
|
|||||||
fan_out_to_rows_and_columns_dynamic : Any -> (Any -> Vector (Vector Any)) -> Boolean -> (Any -> Text) -> (Integer -> Any) -> Problem_Builder -> Vector
|
fan_out_to_rows_and_columns_dynamic : Any -> (Any -> Vector (Vector Any)) -> Boolean -> (Any -> Text) -> (Integer -> Any) -> Problem_Builder -> Vector
|
||||||
fan_out_to_rows_and_columns_dynamic input_storage function at_least_one_row column_names_for_row column_builder problem_builder =
|
fan_out_to_rows_and_columns_dynamic input_storage function at_least_one_row column_names_for_row column_builder problem_builder =
|
||||||
# Accumulates the outputs of the function.
|
# Accumulates the outputs of the function.
|
||||||
column_map = Ref.new Map.empty
|
column_dict = Ref.new Dictionary.empty
|
||||||
output_column_builders = Builder.new
|
output_column_builders = Builder.new
|
||||||
|
|
||||||
# Guess that most of the time, we'll get at least one value for each input.
|
# Guess that most of the time, we'll get at least one value for each input.
|
||||||
@ -180,7 +180,7 @@ fan_out_to_rows_and_columns_dynamic input_storage function at_least_one_row colu
|
|||||||
|
|
||||||
# Column Builder add function
|
# Column Builder add function
|
||||||
add_column n current_length =
|
add_column n current_length =
|
||||||
column_map.put (column_map.get.insert n output_column_builders.length)
|
column_dict.put (column_dict.get.insert n output_column_builders.length)
|
||||||
builder = column_builder num_input_rows
|
builder = column_builder num_input_rows
|
||||||
builder.appendNulls current_length
|
builder.appendNulls current_length
|
||||||
output_column_builders.append builder
|
output_column_builders.append builder
|
||||||
@ -200,11 +200,11 @@ fan_out_to_rows_and_columns_dynamic input_storage function at_least_one_row colu
|
|||||||
|
|
||||||
# Add any missing columns.
|
# Add any missing columns.
|
||||||
row_column_names.each n->
|
row_column_names.each n->
|
||||||
if column_map.get.contains_key n . not then
|
if column_dict.get.contains_key n . not then
|
||||||
add_column n order_mask_positions.length
|
add_column n order_mask_positions.length
|
||||||
|
|
||||||
# Append each group of values to the builder.
|
# Append each group of values to the builder.
|
||||||
current_columns = column_map.get
|
current_columns = column_dict.get
|
||||||
output_values.each row_unchecked->
|
output_values.each row_unchecked->
|
||||||
row = uniform_length row_column_names.length row_unchecked problem_builder
|
row = uniform_length row_column_names.length row_unchecked problem_builder
|
||||||
row_column_names.each_with_index i->n->
|
row_column_names.each_with_index i->n->
|
||||||
@ -220,7 +220,7 @@ fan_out_to_rows_and_columns_dynamic input_storage function at_least_one_row colu
|
|||||||
repeat_each output_values.length <| order_mask_positions.append i
|
repeat_each output_values.length <| order_mask_positions.append i
|
||||||
|
|
||||||
# Build the output column
|
# Build the output column
|
||||||
output_columns = column_map.get.to_vector.sort on=_.second . map pair->
|
output_columns = column_dict.get.to_vector.sort on=_.second . map pair->
|
||||||
Column.from_storage pair.first (output_column_builders.at pair.second . seal)
|
Column.from_storage pair.first (output_column_builders.at pair.second . seal)
|
||||||
|
|
||||||
[output_columns, order_mask_positions]
|
[output_columns, order_mask_positions]
|
||||||
|
@ -50,7 +50,7 @@ prepare_columns_for_lookup base_table lookup_table key_columns_selector add_new_
|
|||||||
|
|
||||||
problems_to_add = Builder.new
|
problems_to_add = Builder.new
|
||||||
key_columns.if_not_error <| lookup_table_key_columns.if_not_error <|
|
key_columns.if_not_error <| lookup_table_key_columns.if_not_error <|
|
||||||
key_set = Set.from_vector key_columns
|
key_set = Hashset.from_vector key_columns
|
||||||
my_updated_columns = base_table.columns.map on_problems=No_Wrap base_column->
|
my_updated_columns = base_table.columns.map on_problems=No_Wrap base_column->
|
||||||
base_column_name = base_column.name
|
base_column_name = base_column.name
|
||||||
is_key = key_set.contains base_column_name
|
is_key = key_set.contains base_column_name
|
||||||
@ -67,7 +67,7 @@ prepare_columns_for_lookup base_table lookup_table key_columns_selector add_new_
|
|||||||
Nothing -> Lookup_Column.Keep_Column base_column
|
Nothing -> Lookup_Column.Keep_Column base_column
|
||||||
_ -> merge_columns base_column lookup_column allow_unmatched_rows
|
_ -> merge_columns base_column lookup_column allow_unmatched_rows
|
||||||
|
|
||||||
my_column_set = Set.from_vector base_table.column_names
|
my_column_set = Hashset.from_vector base_table.column_names
|
||||||
extra_columns = lookup_table.columns.filter col->
|
extra_columns = lookup_table.columns.filter col->
|
||||||
is_new = my_column_set.contains col.name . not
|
is_new = my_column_set.contains col.name . not
|
||||||
is_new
|
is_new
|
||||||
|
@ -10,7 +10,7 @@ import project.Value_Type.By_Type
|
|||||||
from project.Errors import Missing_Input_Columns, No_Such_Column, Non_Unique_Key, Unmatched_Rows_In_Lookup
|
from project.Errors import Missing_Input_Columns, No_Such_Column, Non_Unique_Key, Unmatched_Rows_In_Lookup
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
replace : Table -> (Table | Map) -> (Text | Integer | By_Type | Vector (Text | Integer | By_Type)) -> (Text | Integer | Nothing) -> (Text | Integer | Nothing) -> Boolean -> Problem_Behavior -> Table ! Missing_Input_Columns | Non_Unique_Key | Unmatched_Rows_In_Lookup
|
replace : Table -> (Table | Dictionary) -> (Text | Integer | By_Type | Vector (Text | Integer | By_Type)) -> (Text | Integer | Nothing) -> (Text | Integer | Nothing) -> Boolean -> Problem_Behavior -> Table ! Missing_Input_Columns | Non_Unique_Key | Unmatched_Rows_In_Lookup
|
||||||
replace base_table lookup_table columns:(Text | Integer | By_Type | Vector (Text | Integer | By_Type)) from_column:(Text | Integer | Nothing)=Nothing to_column:(Text | Integer | Nothing)=Nothing allow_unmatched_rows:Boolean=True on_problems:Problem_Behavior=..Report_Warning =
|
replace base_table lookup_table columns:(Text | Integer | By_Type | Vector (Text | Integer | By_Type)) from_column:(Text | Integer | Nothing)=Nothing to_column:(Text | Integer | Nothing)=Nothing allow_unmatched_rows:Boolean=True on_problems:Problem_Behavior=..Report_Warning =
|
||||||
case columns of
|
case columns of
|
||||||
_ : Vector ->
|
_ : Vector ->
|
||||||
@ -20,8 +20,8 @@ replace base_table lookup_table columns:(Text | Integer | By_Type | Vector (Text
|
|||||||
_ ->
|
_ ->
|
||||||
column = columns
|
column = columns
|
||||||
case lookup_table of
|
case lookup_table of
|
||||||
_ : Map ->
|
_ : Dictionary ->
|
||||||
if from_column.is_nothing.not || to_column.is_nothing.not then Error.throw (Illegal_Argument.Error "If a Map is provided as the lookup_table, then from_column and to_column should not also be specified.") else
|
if from_column.is_nothing.not || to_column.is_nothing.not then Error.throw (Illegal_Argument.Error "If a Dictionary is provided as the lookup_table, then from_column and to_column should not also be specified.") else
|
||||||
handle_empty_lookup_table ~action =
|
handle_empty_lookup_table ~action =
|
||||||
if lookup_table.is_empty.not then action else
|
if lookup_table.is_empty.not then action else
|
||||||
## If the lookup table is empty but the base table is
|
## If the lookup table is empty but the base table is
|
||||||
@ -33,7 +33,7 @@ replace base_table lookup_table columns:(Text | Integer | By_Type | Vector (Text
|
|||||||
a_key_value = base_table.at column . at 0
|
a_key_value = base_table.at column . at 0
|
||||||
Error.throw (Unmatched_Rows_In_Lookup.Error [a_key_value])
|
Error.throw (Unmatched_Rows_In_Lookup.Error [a_key_value])
|
||||||
handle_empty_lookup_table <|
|
handle_empty_lookup_table <|
|
||||||
base_table.replace (base_table.make_table_from_map lookup_table 'from' 'to') column 'from' 'to' allow_unmatched_rows=allow_unmatched_rows on_problems=on_problems
|
base_table.replace (base_table.make_table_from_dictionary lookup_table 'from' 'to') column 'from' 'to' allow_unmatched_rows=allow_unmatched_rows on_problems=on_problems
|
||||||
_ ->
|
_ ->
|
||||||
from_column_resolved = from_column.if_nothing 0
|
from_column_resolved = from_column.if_nothing 0
|
||||||
to_column_resolved = to_column.if_nothing 1
|
to_column_resolved = to_column.if_nothing 1
|
||||||
@ -52,7 +52,7 @@ replace base_table lookup_table columns:(Text | Integer | By_Type | Vector (Text
|
|||||||
## Create a lookup table with just `to_column` and `from_column`,
|
## Create a lookup table with just `to_column` and `from_column`,
|
||||||
renamed to match the base table's `column` and its duplicate,
|
renamed to match the base table's `column` and its duplicate,
|
||||||
respectively.
|
respectively.
|
||||||
lookup_table_renamed = selected_lookup_columns . rename_columns (Map.from_vector [[from_column_resolved, duplicate_key_column_name], [to_column_resolved, column]])
|
lookup_table_renamed = selected_lookup_columns . rename_columns (Dictionary.from_vector [[from_column_resolved, duplicate_key_column_name], [to_column_resolved, column]])
|
||||||
|
|
||||||
warn_if_empty result_table = if lookup_table_renamed.row_count != 0 then result_table else Warning.attach (Empty_Error.Error "lookup_table") result_table
|
warn_if_empty result_table = if lookup_table_renamed.row_count != 0 then result_table else Warning.attach (Empty_Error.Error "lookup_table") result_table
|
||||||
|
|
||||||
|
@ -91,7 +91,7 @@ regex_to_column_names pattern original_column_name =
|
|||||||
group_nums_to_names = pattern.group_nums_to_names
|
group_nums_to_names = pattern.group_nums_to_names
|
||||||
|
|
||||||
unnamed_group_numbers = 1.up_to pattern.group_count . filter i-> group_nums_to_names.contains_key i . not
|
unnamed_group_numbers = 1.up_to pattern.group_count . filter i-> group_nums_to_names.contains_key i . not
|
||||||
group_number_to_column_name_suffix = Map.from_vector <| unnamed_group_numbers.zip (0.up_to unnamed_group_numbers.length)
|
group_number_to_column_name_suffix = Dictionary.from_vector <| unnamed_group_numbers.zip (0.up_to unnamed_group_numbers.length)
|
||||||
|
|
||||||
Vector.new (pattern.group_count-1) i->
|
Vector.new (pattern.group_count-1) i->
|
||||||
# explicit groups start at 1
|
# explicit groups start at 1
|
||||||
|
@ -112,7 +112,7 @@ type Table_Column_Helper
|
|||||||
remove_columns self (selectors:(Text | Integer | Regex | Vector)) (case_sensitivity:Case_Sensitivity) (error_on_missing_columns:Boolean) (on_problems:Problem_Behavior) =
|
remove_columns self (selectors:(Text | Integer | Regex | Vector)) (case_sensitivity:Case_Sensitivity) (error_on_missing_columns:Boolean) (on_problems:Problem_Behavior) =
|
||||||
problem_builder = Problem_Builder.new error_on_missing_columns=error_on_missing_columns
|
problem_builder = Problem_Builder.new error_on_missing_columns=error_on_missing_columns
|
||||||
selection = self.select_columns_helper selectors case_sensitivity False problem_builder
|
selection = self.select_columns_helper selectors case_sensitivity False problem_builder
|
||||||
selected_names = Map.from_vector (selection.map column-> [column.name, True])
|
selected_names = Dictionary.from_vector (selection.map column-> [column.name, True])
|
||||||
result = self.columns.filter column->
|
result = self.columns.filter column->
|
||||||
should_be_removed = selected_names.get column.name False
|
should_be_removed = selected_names.get column.name False
|
||||||
should_be_removed.not
|
should_be_removed.not
|
||||||
@ -160,7 +160,7 @@ type Table_Column_Helper
|
|||||||
problem_builder = Problem_Builder.new error_on_missing_columns=error_on_missing_columns
|
problem_builder = Problem_Builder.new error_on_missing_columns=error_on_missing_columns
|
||||||
selection = self.select_columns_helper selectors case_sensitivity True problem_builder
|
selection = self.select_columns_helper selectors case_sensitivity True problem_builder
|
||||||
problem_builder.attach_problems_before on_problems <|
|
problem_builder.attach_problems_before on_problems <|
|
||||||
selected_names = Map.from_vector (selection.map column-> [column.name, True])
|
selected_names = Dictionary.from_vector (selection.map column-> [column.name, True])
|
||||||
other_columns = self.columns.filter column->
|
other_columns = self.columns.filter column->
|
||||||
is_selected = selected_names.get column.name False
|
is_selected = selected_names.get column.name False
|
||||||
is_selected.not
|
is_selected.not
|
||||||
@ -203,8 +203,8 @@ type Table_Column_Helper
|
|||||||
_ -> [selectors]
|
_ -> [selectors]
|
||||||
selected_columns = vector.map resolve_selector . flatten
|
selected_columns = vector.map resolve_selector . flatten
|
||||||
if reorder then selected_columns.distinct on=_.name else
|
if reorder then selected_columns.distinct on=_.name else
|
||||||
map = Map.from_vector (selected_columns.map column-> [column.name, True]) error_on_duplicates=False
|
dict = Dictionary.from_vector (selected_columns.map column-> [column.name, True]) error_on_duplicates=False
|
||||||
self.columns.filter column-> map.contains_key column.name
|
self.columns.filter column-> dict.contains_key column.name
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
A helper function which selects a single column from the table.
|
A helper function which selects a single column from the table.
|
||||||
@ -289,14 +289,14 @@ type Table_Column_Helper
|
|||||||
A helper function which takes a Table like object and a Table for a name
|
A helper function which takes a Table like object and a Table for a name
|
||||||
mapping and returns a new Table with the columns renamed according to the
|
mapping and returns a new Table with the columns renamed according to the
|
||||||
mapping.
|
mapping.
|
||||||
read_name_map_from_table : Table -> Vector | Map ! Illegal_Argument
|
read_name_mapping_from_table : Table -> Vector | Dictionary ! Illegal_Argument
|
||||||
read_name_map_from_table column_map:Table = case column_map.column_count of
|
read_name_mapping_from_table column_map:Table = case column_map.column_count of
|
||||||
1 ->
|
1 ->
|
||||||
col = column_map.first_column
|
col = column_map.first_column
|
||||||
if col.value_type.is_text then col.to_vector else
|
if col.value_type.is_text then col.to_vector else
|
||||||
Error.throw (Illegal_Argument.Error "Expected a table with one or two columns of text values.")
|
Error.throw (Illegal_Argument.Error "Expected a table with one or two columns of text values.")
|
||||||
2 ->
|
2 ->
|
||||||
if column_map.first_column.value_type.is_text && (column_map.at 1).value_type.is_text then Map.from_vector column_map.rows else
|
if column_map.first_column.value_type.is_text && (column_map.at 1).value_type.is_text then Dictionary.from_vector column_map.rows else
|
||||||
Error.throw (Illegal_Argument.Error "Expected a table with one or two columns of text values.")
|
Error.throw (Illegal_Argument.Error "Expected a table with one or two columns of text values.")
|
||||||
_ -> Error.throw (Illegal_Argument.Error "Expected a table with one or two columns of text values.")
|
_ -> Error.throw (Illegal_Argument.Error "Expected a table with one or two columns of text values.")
|
||||||
|
|
||||||
@ -322,9 +322,9 @@ read_name_map_from_table column_map:Table = case column_map.column_count of
|
|||||||
operation. By default, a warning is issued, but the operation proceeds.
|
operation. By default, a warning is issued, but the operation proceeds.
|
||||||
If set to `Report_Error`, the operation fails with a dataflow error.
|
If set to `Report_Error`, the operation fails with a dataflow error.
|
||||||
If set to `Ignore`, the operation proceeds without errors or warnings.
|
If set to `Ignore`, the operation proceeds without errors or warnings.
|
||||||
rename_columns : Column_Naming_Helper -> Vector -> Map (Text | Integer | Regex) Text | Vector Text -> Case_Sensitivity -> Boolean -> Problem_Behavior -> Map Text Text
|
rename_columns : Column_Naming_Helper -> Vector -> Dictionary (Text | Integer | Regex) Text | Vector Text -> Case_Sensitivity -> Boolean -> Problem_Behavior -> Dictionary Text Text
|
||||||
rename_columns (naming_helper : Column_Naming_Helper) (internal_columns:Vector) (mapping:(Map | Vector)) (case_sensitivity:Case_Sensitivity) (error_on_missing_columns:Boolean) (on_problems:Problem_Behavior) =
|
rename_columns (naming_helper : Column_Naming_Helper) (internal_columns:Vector) (mapping:(Dictionary | Vector)) (case_sensitivity:Case_Sensitivity) (error_on_missing_columns:Boolean) (on_problems:Problem_Behavior) =
|
||||||
## Convert Vector of Pairs to Map
|
## Convert Vector of Pairs to Dictionary
|
||||||
is_vec_pairs = mapping.is_a Vector && mapping.length > 0 && (mapping.first.is_a Text . not)
|
is_vec_pairs = mapping.is_a Vector && mapping.length > 0 && (mapping.first.is_a Text . not)
|
||||||
case is_vec_pairs of
|
case is_vec_pairs of
|
||||||
True ->
|
True ->
|
||||||
@ -333,8 +333,8 @@ rename_columns (naming_helper : Column_Naming_Helper) (internal_columns:Vector)
|
|||||||
is_valid_key k = k.is_a Integer || k.is_a Text || k.is_a Regex
|
is_valid_key k = k.is_a Integer || k.is_a Text || k.is_a Regex
|
||||||
all_pairs = mapping.all p-> (is_valid_row p) && p.length == 2 && (is_valid_key p.first) && p.second.is_a Text
|
all_pairs = mapping.all p-> (is_valid_row p) && p.length == 2 && (is_valid_key p.first) && p.second.is_a Text
|
||||||
if all_pairs.not then Error.throw (Illegal_Argument.Error "mapping is not a Vector of old name to new name.") else
|
if all_pairs.not then Error.throw (Illegal_Argument.Error "mapping is not a Vector of old name to new name.") else
|
||||||
## Attempt to treat as Map
|
## Attempt to treat as Dictionary
|
||||||
map = Map.from_vector mapping error_on_duplicates=False
|
map = Dictionary.from_vector mapping error_on_duplicates=False
|
||||||
if map.length == mapping.length then rename_columns naming_helper internal_columns map case_sensitivity error_on_missing_columns on_problems else
|
if map.length == mapping.length then rename_columns naming_helper internal_columns map case_sensitivity error_on_missing_columns on_problems else
|
||||||
duplicates = mapping.duplicates on=_.first . map p->p.first.to_text . distinct
|
duplicates = mapping.duplicates on=_.first . map p->p.first.to_text . distinct
|
||||||
duplicate_text = if duplicates.length < 5 then duplicates.to_vector . join ", " else
|
duplicate_text = if duplicates.length < 5 then duplicates.to_vector . join ", " else
|
||||||
@ -356,7 +356,7 @@ rename_columns (naming_helper : Column_Naming_Helper) (internal_columns:Vector)
|
|||||||
problem_builder.report_other_warning (Too_Many_Column_Names_Provided.Error (vec.drop (..First col_count)))
|
problem_builder.report_other_warning (Too_Many_Column_Names_Provided.Error (vec.drop (..First col_count)))
|
||||||
vec.take (..First col_count)
|
vec.take (..First col_count)
|
||||||
internal_columns.take good_names.length . zip good_names
|
internal_columns.take good_names.length . zip good_names
|
||||||
_ : Map ->
|
_ : Dictionary ->
|
||||||
resolve_rename selector replacement = case selector of
|
resolve_rename selector replacement = case selector of
|
||||||
ix : Integer -> if is_index_valid internal_columns.length ix then [Pair.new (internal_columns.at ix) replacement] else
|
ix : Integer -> if is_index_valid internal_columns.length ix then [Pair.new (internal_columns.at ix) replacement] else
|
||||||
problem_builder.report_oob_indices [ix]
|
problem_builder.report_oob_indices [ix]
|
||||||
@ -378,7 +378,7 @@ rename_columns (naming_helper : Column_Naming_Helper) (internal_columns:Vector)
|
|||||||
naming_helper.validate_many_column_names all_new_names <|
|
naming_helper.validate_many_column_names all_new_names <|
|
||||||
## Resolves actual new names for renamed columns.
|
## Resolves actual new names for renamed columns.
|
||||||
If a column received ambiguous new names, an error is raised.
|
If a column received ambiguous new names, an error is raised.
|
||||||
name_map = columns_with_new_names.fold Map.empty current-> pair->
|
name_dict = columns_with_new_names.fold Dictionary.empty current-> pair->
|
||||||
old_name = pair.first.name
|
old_name = pair.first.name
|
||||||
case current.contains_key old_name of
|
case current.contains_key old_name of
|
||||||
False -> current.insert old_name pair.second
|
False -> current.insert old_name pair.second
|
||||||
@ -387,12 +387,12 @@ rename_columns (naming_helper : Column_Naming_Helper) (internal_columns:Vector)
|
|||||||
Error.throw (Ambiguous_Column_Rename.Error old_name new_names)
|
Error.throw (Ambiguous_Column_Rename.Error old_name new_names)
|
||||||
|
|
||||||
## Renamed names take precedence, so we first deduplicate those.
|
## Renamed names take precedence, so we first deduplicate those.
|
||||||
resolved_name_map = name_map.map unique.make_unique
|
resolved_name_dict = name_dict.map unique.make_unique
|
||||||
|
|
||||||
## Then we ensure that the names of not-renamed columns are also unique and
|
## Then we ensure that the names of not-renamed columns are also unique and
|
||||||
return the effective column names for each column.
|
return the effective column names for each column.
|
||||||
not_renamed = internal_columns.filter c-> resolved_name_map.contains_key c.name . not
|
not_renamed = internal_columns.filter c-> resolved_name_dict.contains_key c.name . not
|
||||||
new_column_names = not_renamed.fold resolved_name_map current-> column->
|
new_column_names = not_renamed.fold resolved_name_dict current-> column->
|
||||||
new_name = unique.make_unique column.name
|
new_name = unique.make_unique column.name
|
||||||
current.insert column.name new_name
|
current.insert column.name new_name
|
||||||
|
|
||||||
@ -443,7 +443,7 @@ select_indices_reordering vector indices =
|
|||||||
The elements are returned in the same order as they appeared in the original
|
The elements are returned in the same order as they appeared in the original
|
||||||
vector.
|
vector.
|
||||||
select_indices_preserving_order vector indices =
|
select_indices_preserving_order vector indices =
|
||||||
indices_to_keep = Map.from_vector (indices.map i-> [i, True])
|
indices_to_keep = Dictionary.from_vector (indices.map i-> [i, True])
|
||||||
vector.filter_with_index ix-> _->
|
vector.filter_with_index ix-> _->
|
||||||
indices_to_keep.get ix False
|
indices_to_keep.get ix False
|
||||||
|
|
||||||
|
@ -98,10 +98,10 @@ type Unique_Name_Strategy
|
|||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
A mapping of original names to their truncated counterpart.
|
A mapping of original names to their truncated counterpart.
|
||||||
truncated_names : Map Text Text
|
truncated_names : Dictionary Text Text
|
||||||
truncated_names self =
|
truncated_names self =
|
||||||
entries = Vector.from_polyglot_array self.deduplicator.getTruncatedNames
|
entries = Vector.from_polyglot_array self.deduplicator.getTruncatedNames
|
||||||
Map.from_vector <| entries.map e-> [e.getLeft, e.getRight]
|
Dictionary.from_vector <| entries.map e-> [e.getLeft, e.getRight]
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
ADVANCED
|
ADVANCED
|
||||||
|
@ -142,7 +142,7 @@ type Column_Set
|
|||||||
## PRIVATE
|
## PRIVATE
|
||||||
Returns a map indicating in how many tables did a column with a given name appear.
|
Returns a map indicating in how many tables did a column with a given name appear.
|
||||||
find_column_counts tables =
|
find_column_counts tables =
|
||||||
tables.fold Map.empty current->table->
|
tables.fold Dictionary.empty current->table->
|
||||||
table.columns.fold current counts-> column->
|
table.columns.fold current counts-> column->
|
||||||
name=column.name
|
name=column.name
|
||||||
new_count = counts.get name 0 + 1
|
new_count = counts.get name 0 + 1
|
||||||
@ -153,7 +153,7 @@ find_column_counts tables =
|
|||||||
starting from the first table.
|
starting from the first table.
|
||||||
distinct_columns_in_appearance_order tables =
|
distinct_columns_in_appearance_order tables =
|
||||||
Vector.build names_builder->
|
Vector.build names_builder->
|
||||||
tables.fold Map.empty current-> table->
|
tables.fold Dictionary.empty current-> table->
|
||||||
table.columns.fold current seen_names-> column->
|
table.columns.fold current seen_names-> column->
|
||||||
name = column.name
|
name = column.name
|
||||||
if seen_names.contains_key name then seen_names else
|
if seen_names.contains_key name then seen_names else
|
||||||
|
@ -53,11 +53,11 @@ type Row
|
|||||||
|
|
||||||
## GROUP Standard.Base.Conversions
|
## GROUP Standard.Base.Conversions
|
||||||
ICON convert
|
ICON convert
|
||||||
Gets the row as a Map.
|
Gets the row as a Dictionary.
|
||||||
to_map : Map
|
to_dictionary : Dictionary
|
||||||
to_map self =
|
to_dictionary self =
|
||||||
pairs = self.table.column_names.map n-> [n, self.get n]
|
pairs = self.table.column_names.map n-> [n, self.get n]
|
||||||
Map.from_vector pairs
|
Dictionary.from_vector pairs
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
Converts this row into a JS_Object.
|
Converts this row into a JS_Object.
|
||||||
|
@ -680,17 +680,17 @@ type Table
|
|||||||
> Example
|
> Example
|
||||||
Rename the "Alpha" column to "Beta"
|
Rename the "Alpha" column to "Beta"
|
||||||
|
|
||||||
table.rename_columns (Map.from_vector [["Alpha", "Beta"]])
|
table.rename_columns (Dictionary.from_vector [["Alpha", "Beta"]])
|
||||||
|
|
||||||
> Example
|
> Example
|
||||||
Rename the last column to "LastColumn"
|
Rename the last column to "LastColumn"
|
||||||
|
|
||||||
table.rename_columns (Map.from_vector [[-1, "LastColumn"]])
|
table.rename_columns (Dictionary.from_vector [[-1, "LastColumn"]])
|
||||||
|
|
||||||
> Example
|
> Example
|
||||||
Rename the "Alpha" column to "Beta" and last column to "LastColumn"
|
Rename the "Alpha" column to "Beta" and last column to "LastColumn"
|
||||||
|
|
||||||
table.rename_columns (Map.from_vector [["Alpha", "Beta"], [-1, "LastColumn"]])
|
table.rename_columns (Dictionary.from_vector [["Alpha", "Beta"], [-1, "LastColumn"]])
|
||||||
|
|
||||||
> Example
|
> Example
|
||||||
Rename the first column to "FirstColumn"
|
Rename the first column to "FirstColumn"
|
||||||
@ -705,12 +705,12 @@ type Table
|
|||||||
> Example
|
> Example
|
||||||
For all columns starting with the prefix `name=`, replace it with `key:`.
|
For all columns starting with the prefix `name=`, replace it with `key:`.
|
||||||
|
|
||||||
table.rename_columns (Map.from_vector [["name=(.*)".to_regex, "key:$1"]])
|
table.rename_columns (Dictionary.from_vector [["name=(.*)".to_regex, "key:$1"]])
|
||||||
@column_map Widget_Helpers.make_rename_name_vector_selector
|
@column_map Widget_Helpers.make_rename_name_vector_selector
|
||||||
rename_columns : Map (Text | Integer | Regex) Text | Vector Text | Vector Vector -> Case_Sensitivity -> Boolean -> Problem_Behavior -> Table ! Missing_Input_Columns | Ambiguous_Column_Rename | Too_Many_Column_Names_Provided | Invalid_Column_Names | Duplicate_Output_Column_Names
|
rename_columns : Table | Dictionary (Text | Integer | Regex) Text | Vector Text | Vector Vector -> Case_Sensitivity -> Boolean -> Problem_Behavior -> Table ! Missing_Input_Columns | Ambiguous_Column_Rename | Too_Many_Column_Names_Provided | Invalid_Column_Names | Duplicate_Output_Column_Names
|
||||||
rename_columns self (column_map:(Table | Map | Vector)=["Column"]) (case_sensitivity:Case_Sensitivity=..Default) (error_on_missing_columns:Boolean=True) (on_problems:Problem_Behavior=..Report_Warning) = case column_map of
|
rename_columns self (column_map:(Table | Dictionary | Vector)=["Column"]) (case_sensitivity:Case_Sensitivity=..Default) (error_on_missing_columns:Boolean=True) (on_problems:Problem_Behavior=..Report_Warning) = case column_map of
|
||||||
_ : Table ->
|
_ : Table ->
|
||||||
resolved = Table_Helpers.read_name_map_from_table column_map
|
resolved = Table_Helpers.read_name_mapping_from_table column_map
|
||||||
self.rename_columns resolved case_sensitivity error_on_missing_columns on_problems
|
self.rename_columns resolved case_sensitivity error_on_missing_columns on_problems
|
||||||
_ ->
|
_ ->
|
||||||
new_names = Table_Helpers.rename_columns self.column_naming_helper self.columns column_map case_sensitivity error_on_missing_columns on_problems
|
new_names = Table_Helpers.rename_columns self.column_naming_helper self.columns column_map case_sensitivity error_on_missing_columns on_problems
|
||||||
@ -1123,9 +1123,9 @@ type Table
|
|||||||
no_columns_problem_behavior = case error_on_missing_columns of
|
no_columns_problem_behavior = case error_on_missing_columns of
|
||||||
True -> Problem_Behavior.Report_Error
|
True -> Problem_Behavior.Report_Error
|
||||||
False -> on_problems
|
False -> on_problems
|
||||||
no_columns_problem_behavior.attach_problem_before No_Input_Columns_Selected Map.empty
|
no_columns_problem_behavior.attach_problem_before No_Input_Columns_Selected Dictionary.empty
|
||||||
False ->
|
False ->
|
||||||
Map.from_vector <| selected_columns.map c-> [c.name, True]
|
Dictionary.from_vector <| selected_columns.map c-> [c.name, True]
|
||||||
|
|
||||||
new_columns = self.columns.map on_problems=No_Wrap column-> if selected_column_names.contains_key column.name . not then column else
|
new_columns = self.columns.map on_problems=No_Wrap column-> if selected_column_names.contains_key column.name . not then column else
|
||||||
Value_Type.expect_text column <|
|
Value_Type.expect_text column <|
|
||||||
@ -1222,9 +1222,9 @@ type Table
|
|||||||
no_columns_problem_behavior = case error_on_missing_columns of
|
no_columns_problem_behavior = case error_on_missing_columns of
|
||||||
True -> Problem_Behavior.Report_Error
|
True -> Problem_Behavior.Report_Error
|
||||||
False -> on_problems
|
False -> on_problems
|
||||||
no_columns_problem_behavior.attach_problem_before No_Input_Columns_Selected Map.empty
|
no_columns_problem_behavior.attach_problem_before No_Input_Columns_Selected Dictionary.empty
|
||||||
False ->
|
False ->
|
||||||
Map.from_vector <| selected_columns.map c-> [c.name, True]
|
Dictionary.from_vector <| selected_columns.map c-> [c.name, True]
|
||||||
|
|
||||||
new_columns = self.columns.map column-> if selected_column_names.contains_key column.name . not then column else
|
new_columns = self.columns.map column-> if selected_column_names.contains_key column.name . not then column else
|
||||||
column.format format locale
|
column.format format locale
|
||||||
@ -1887,22 +1887,22 @@ type Table
|
|||||||
Warning.set result []
|
Warning.set result []
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
A helper that creates a two-column table from a Map.
|
A helper that creates a two-column table from a Dictionary.
|
||||||
|
|
||||||
The keys of the `Map` become the first column, with name
|
The keys of the `Dictionary` become the first column, with name
|
||||||
`key_column_name`, and the values of the `Map` become the second column,
|
`key_column_name`, and the values become the second column, with name
|
||||||
with name `value_column_name`.
|
`value_column_name`.
|
||||||
|
|
||||||
For the in-memory database, the `Map` can be empty. For the database
|
For the in-memory database, the `Dictionary` can be empty. For the
|
||||||
backends, it must not be empty.
|
database backends, it must not be empty.
|
||||||
|
|
||||||
Arguments:
|
Arguments:
|
||||||
- map: The `Map` to create the table from.
|
- dict: The `Dictionary` to create the table from.
|
||||||
- key_column_name: The name to use for the first column.
|
- key_column_name: The name to use for the first column.
|
||||||
- value_column_name: The name to use for the second column.
|
- value_column_name: The name to use for the second column.
|
||||||
make_table_from_map : Map Any Any -> Text -> Text -> Table
|
make_table_from_dictionary : Dictionary Any Any -> Text -> Text -> Table
|
||||||
make_table_from_map self map key_column_name value_column_name =
|
make_table_from_dictionary self dict key_column_name value_column_name =
|
||||||
keys_and_values = map.to_vector
|
keys_and_values = dict.to_vector
|
||||||
self.make_table_from_vectors [keys_and_values.map .first, keys_and_values.map .second] [key_column_name, value_column_name]
|
self.make_table_from_vectors [keys_and_values.map .first, keys_and_values.map .second] [key_column_name, value_column_name]
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
@ -2283,8 +2283,8 @@ type Table
|
|||||||
@columns (Widget_Helpers.make_column_name_multi_selector add_regex=True add_by_type=True)
|
@columns (Widget_Helpers.make_column_name_multi_selector add_regex=True add_by_type=True)
|
||||||
@from_column Widget.Text_Input
|
@from_column Widget.Text_Input
|
||||||
@to_column Widget.Text_Input
|
@to_column Widget.Text_Input
|
||||||
replace : (Table | Map) -> (Text | Integer | Regex | By_Type | Vector (Text | Integer | Regex | By_Type)) -> (Text | Integer | Nothing) -> (Text | Integer | Nothing) -> Boolean -> Problem_Behavior -> Table ! Missing_Input_Columns | Non_Unique_Key | Unmatched_Rows_In_Lookup
|
replace : (Table | Dictionary) -> (Text | Integer | Regex | By_Type | Vector (Text | Integer | Regex | By_Type)) -> (Text | Integer | Nothing) -> (Text | Integer | Nothing) -> Boolean -> Problem_Behavior -> Table ! Missing_Input_Columns | Non_Unique_Key | Unmatched_Rows_In_Lookup
|
||||||
replace self lookup_table:(Table | Map) columns:(Text | Integer | Regex | By_Type | Vector (Text | Integer | Regex | By_Type)) from_column:(Text | Integer | Nothing)=Nothing to_column:(Text | Integer | Nothing)=Nothing allow_unmatched_rows:Boolean=True on_problems:Problem_Behavior=..Report_Warning =
|
replace self lookup_table:(Table | Dictionary) columns:(Text | Integer | Regex | By_Type | Vector (Text | Integer | Regex | By_Type)) from_column:(Text | Integer | Nothing)=Nothing to_column:(Text | Integer | Nothing)=Nothing allow_unmatched_rows:Boolean=True on_problems:Problem_Behavior=..Report_Warning =
|
||||||
Replace_Helpers.replace self lookup_table columns from_column to_column allow_unmatched_rows on_problems
|
Replace_Helpers.replace self lookup_table columns from_column to_column allow_unmatched_rows on_problems
|
||||||
|
|
||||||
## ALIAS join by row position
|
## ALIAS join by row position
|
||||||
@ -2566,7 +2566,7 @@ type Table
|
|||||||
|
|
||||||
id_columns = columns_helper.select_columns_helper key_columns Case_Sensitivity.Default False problem_builder
|
id_columns = columns_helper.select_columns_helper key_columns Case_Sensitivity.Default False problem_builder
|
||||||
|
|
||||||
selected_names = Map.from_vector (id_columns.map column-> [column.name, True])
|
selected_names = Dictionary.from_vector (id_columns.map column-> [column.name, True])
|
||||||
|
|
||||||
data = columns_helper.internal_columns.filter column->(selected_names.get column.name False . not)
|
data = columns_helper.internal_columns.filter column->(selected_names.get column.name False . not)
|
||||||
java_data = data.map c->c.java_column
|
java_data = data.map c->c.java_column
|
||||||
@ -3183,13 +3183,6 @@ concat_columns column_set all_tables result_type result_row_count needs_cast on_
|
|||||||
sealed_storage = storage_builder.seal
|
sealed_storage = storage_builder.seal
|
||||||
Column.from_storage column_set.name sealed_storage
|
Column.from_storage column_set.name sealed_storage
|
||||||
|
|
||||||
## PRIVATE
|
|
||||||
A helper that creates a two-column table from a map.
|
|
||||||
map_to_lookup_table : Map Any Any -> Text -> Text -> Table
|
|
||||||
map_to_lookup_table map key_column value_column =
|
|
||||||
keys_and_values = map.to_vector
|
|
||||||
Table.new [[key_column, keys_and_values.map .first], [value_column, keys_and_values.map .second]]
|
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
Conversion method to a Table from a Column.
|
Conversion method to a Table from a Column.
|
||||||
Table.from (that:Column) = that.to_table
|
Table.from (that:Column) = that.to_table
|
||||||
|
@ -98,7 +98,7 @@ print_single_result (test_result : Test_Result) (config : Suite_Config) =
|
|||||||
print_report : Vector Test_Result -> Suite_Config -> (StringBuilder | Nothing) -> Nothing
|
print_report : Vector Test_Result -> Suite_Config -> (StringBuilder | Nothing) -> Nothing
|
||||||
print_report (test_results : Vector Test_Result) (config : Suite_Config) (builder : (StringBuilder | Nothing)) =
|
print_report (test_results : Vector Test_Result) (config : Suite_Config) (builder : (StringBuilder | Nothing)) =
|
||||||
distinct_group_names = test_results.map (_.group_name) . distinct
|
distinct_group_names = test_results.map (_.group_name) . distinct
|
||||||
results_per_group = distinct_group_names.fold Map.empty acc-> group_name->
|
results_per_group = distinct_group_names.fold Dictionary.empty acc-> group_name->
|
||||||
group_results = test_results.filter res->
|
group_results = test_results.filter res->
|
||||||
res.group_name == group_name
|
res.group_name == group_name
|
||||||
assert (group_results.length > 0)
|
assert (group_results.length > 0)
|
||||||
|
@ -173,7 +173,7 @@ limit_data limit data = case limit of
|
|||||||
bounds = case data.fold_with_index (Extreme.Value first first first first) update_extreme of
|
bounds = case data.fold_with_index (Extreme.Value first first first first) update_extreme of
|
||||||
Extreme.Value min_x max_x min_y max_y -> [min_x, max_x, min_y, max_y]
|
Extreme.Value min_x max_x min_y max_y -> [min_x, max_x, min_y, max_y]
|
||||||
_ -> []
|
_ -> []
|
||||||
extreme = Map.from_vector bounds error_on_duplicates=False . values
|
extreme = Dictionary.from_vector bounds error_on_duplicates=False . values
|
||||||
|
|
||||||
if limit <= extreme.length then extreme.take (..First limit) else
|
if limit <= extreme.length then extreme.take (..First limit) else
|
||||||
extreme + data.take (..Sample (limit - extreme.length))
|
extreme + data.take (..Sample (limit - extreme.length))
|
||||||
|
@ -27,9 +27,9 @@ prepare_visualization y max_rows=1000 =
|
|||||||
result = case x of
|
result = case x of
|
||||||
_ : Vector -> make_json_for_vector x max_rows
|
_ : Vector -> make_json_for_vector x max_rows
|
||||||
_ : Array -> prepare_visualization x.to_vector max_rows
|
_ : Array -> prepare_visualization x.to_vector max_rows
|
||||||
_ : Map -> make_json_for_map x max_rows
|
_ : Dictionary -> make_json_for_dictionary x max_rows
|
||||||
_ : JS_Object -> make_json_for_js_object x max_rows
|
_ : JS_Object -> make_json_for_js_object x max_rows
|
||||||
_ : Row -> make_json_for_map x.to_map max_rows "column"
|
_ : Row -> make_json_for_dictionary x.to_dictionary max_rows "column"
|
||||||
_ : Column -> prepare_visualization x.to_table max_rows
|
_ : Column -> prepare_visualization x.to_table max_rows
|
||||||
_ : Table ->
|
_ : Table ->
|
||||||
dataframe = x.take max_rows
|
dataframe = x.take max_rows
|
||||||
@ -98,7 +98,7 @@ make_json_for_object_matrix current vector idx=0 = if idx == vector.length then
|
|||||||
_ : Date_Time -> False
|
_ : Date_Time -> False
|
||||||
_ : Duration -> False
|
_ : Duration -> False
|
||||||
_ : Period -> False
|
_ : Period -> False
|
||||||
_ : Map ->
|
_ : Dictionary ->
|
||||||
pairs = row.keys.map k-> [k.to_text, make_json_for_value (row.get k)]
|
pairs = row.keys.map k-> [k.to_text, make_json_for_value (row.get k)]
|
||||||
JS_Object.from_pairs pairs
|
JS_Object.from_pairs pairs
|
||||||
_ : Row ->
|
_ : Row ->
|
||||||
@ -115,13 +115,13 @@ make_json_for_object_matrix current vector idx=0 = if idx == vector.length then
|
|||||||
@Tail_Call make_json_for_object_matrix current vector idx+1
|
@Tail_Call make_json_for_object_matrix current vector idx+1
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
Render Map to JSON
|
Render Dictionary to JSON
|
||||||
make_json_for_map : Map -> Integer -> Text -> JS_Object
|
make_json_for_dictionary : Dictionary -> Integer -> Text -> JS_Object
|
||||||
make_json_for_map map max_items key_name="key" =
|
make_json_for_dictionary dict max_items key_name="key" =
|
||||||
header = ["header", [key_name, "value"]]
|
header = ["header", [key_name, "value"]]
|
||||||
all_rows = ["all_rows_count", map.size]
|
all_rows = ["all_rows_count", dict.size]
|
||||||
map_vector = Warning.clear (map.to_vector.take max_items)
|
as_vector = Warning.clear (dict.to_vector.take max_items)
|
||||||
mapped = map_vector . map p-> [p.first.to_text, make_json_for_value p.second]
|
mapped = as_vector . map p-> [p.first.to_text, make_json_for_value p.second]
|
||||||
data = ["data", [mapped.map .first, mapped.map .second]]
|
data = ["data", [mapped.map .first, mapped.map .second]]
|
||||||
JS_Object.from_pairs [header, data, all_rows, ["type", "Map"]]
|
JS_Object.from_pairs [header, data, all_rows, ["type", "Map"]]
|
||||||
|
|
||||||
@ -187,7 +187,7 @@ make_json_for_value val level=0 = case val of
|
|||||||
prepared = if val.length > 5 then truncated + ["… " + (val.length - 5).to_text+ " items"] else truncated
|
prepared = if val.length > 5 then truncated + ["… " + (val.length - 5).to_text+ " items"] else truncated
|
||||||
"[" + (prepared.join ", ") + "]"
|
"[" + (prepared.join ", ") + "]"
|
||||||
_ : Array -> make_json_for_value val.to_vector level
|
_ : Array -> make_json_for_value val.to_vector level
|
||||||
_ : Map ->
|
_ : Dictionary ->
|
||||||
if level != 0 then "{… "+val.size.to_text+" items}" else
|
if level != 0 then "{… "+val.size.to_text+" items}" else
|
||||||
truncated = val.keys.take 5 . map k-> k.to_text + ": " + (make_json_for_value (val.get k) level+1).to_text
|
truncated = val.keys.take 5 . map k-> k.to_text + ": " + (make_json_for_value (val.get k) level+1).to_text
|
||||||
prepared = if val.length > 5 then truncated + ["… " + (val.length - 5).to_text+ " items"] else truncated
|
prepared = if val.length > 5 then truncated + ["… " + (val.length - 5).to_text+ " items"] else truncated
|
||||||
|
@ -64,12 +64,12 @@ public class ConversionMethodTests {
|
|||||||
String src =
|
String src =
|
||||||
"""
|
"""
|
||||||
polyglot java import java.util.Map as Java_Map
|
polyglot java import java.util.Map as Java_Map
|
||||||
import Standard.Base.Data.Map.Map
|
import Standard.Base.Data.Dictionary.Dictionary
|
||||||
|
|
||||||
type Foo
|
type Foo
|
||||||
Mk_Foo data
|
Mk_Foo data
|
||||||
|
|
||||||
Foo.from (that:Map) = Foo.Mk_Foo that
|
Foo.from (that:Dictionary) = Foo.Mk_Foo that
|
||||||
|
|
||||||
main =
|
main =
|
||||||
jmap = Java_Map.of "A" 1 "B" 2 "C" 3
|
jmap = Java_Map.of "A" 1 "B" 2 "C" 3
|
||||||
@ -83,7 +83,7 @@ public class ConversionMethodTests {
|
|||||||
public void testDispatchOnJSMap() {
|
public void testDispatchOnJSMap() {
|
||||||
String src =
|
String src =
|
||||||
"""
|
"""
|
||||||
import Standard.Base.Data.Map.Map
|
import Standard.Base.Data.Dictionary.Dictionary
|
||||||
|
|
||||||
foreign js js_map = '''
|
foreign js js_map = '''
|
||||||
let m = new Map()
|
let m = new Map()
|
||||||
@ -94,7 +94,7 @@ public class ConversionMethodTests {
|
|||||||
type Foo
|
type Foo
|
||||||
Mk_Foo data
|
Mk_Foo data
|
||||||
|
|
||||||
Foo.from (that:Map) = Foo.Mk_Foo that
|
Foo.from (that:Dictionary) = Foo.Mk_Foo that
|
||||||
|
|
||||||
main =
|
main =
|
||||||
Foo.from js_map . data . size
|
Foo.from js_map . data . size
|
||||||
|
@ -285,66 +285,7 @@ main = Nothing
|
|||||||
.invokeMember(MethodNames.Module.EVAL_EXPRESSION, "sn");
|
.invokeMember(MethodNames.Module.EVAL_EXPRESSION, "sn");
|
||||||
var sb = new StringBuilder();
|
var sb = new StringBuilder();
|
||||||
for (var v : g.allValues()) {
|
for (var v : g.allValues()) {
|
||||||
var simpleName = sn.execute(v).asString();
|
compareQualifiedNameOfValue(sn, v, sb);
|
||||||
if (v.isNumber()) {
|
|
||||||
var ok =
|
|
||||||
switch (simpleName) {
|
|
||||||
case "Integer", "Float" -> true;
|
|
||||||
default -> false;
|
|
||||||
};
|
|
||||||
assertTrue("Unexpected simple name for number: " + simpleName, ok);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
var meta = v.getMetaObject();
|
|
||||||
var metaName = meta != null ? meta.getMetaSimpleName() : "null";
|
|
||||||
if (!simpleName.equals(metaName)) {
|
|
||||||
if (v.isHostObject()) {
|
|
||||||
if (v.hasArrayElements()) {
|
|
||||||
assertEquals("Array", simpleName);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if (v.hasHashEntries()) {
|
|
||||||
assertEquals("Map", simpleName);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (v.isString()) {
|
|
||||||
assertEquals("Text", simpleName);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if (v.isDuration()) {
|
|
||||||
assertEquals("Duration", simpleName);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if (v.isDate() && v.isTime()) {
|
|
||||||
assertEquals("Date_Time", simpleName);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if (v.isTimeZone()) {
|
|
||||||
assertEquals("Time_Zone", simpleName);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if (v.isDate()) {
|
|
||||||
assertEquals("Date", simpleName);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if (v.isTime()) {
|
|
||||||
assertEquals("Time_Of_Day", simpleName);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if (v.isNull()) {
|
|
||||||
assertEquals("Nothing", simpleName);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
sb.append("\n")
|
|
||||||
.append("Simple names shall be the same for ")
|
|
||||||
.append(v)
|
|
||||||
.append(" get_simple_type_name: ")
|
|
||||||
.append(simpleName)
|
|
||||||
.append(" getMetaSimpleName: ")
|
|
||||||
.append(metaName);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
if (!sb.isEmpty()) {
|
if (!sb.isEmpty()) {
|
||||||
var lines = sb.toString().lines().count() - 1;
|
var lines = sb.toString().lines().count() - 1;
|
||||||
@ -353,6 +294,77 @@ main = Nothing
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private boolean compareQualifiedNameOfValue(Value sn, Value v, StringBuilder sb) {
|
||||||
|
var simpleName = sn.execute(v).asString();
|
||||||
|
if (v.isNumber()) {
|
||||||
|
var ok =
|
||||||
|
switch (simpleName) {
|
||||||
|
case "Integer", "Float" -> true;
|
||||||
|
default -> false;
|
||||||
|
};
|
||||||
|
assertTrue("Unexpected simple name for number: " + simpleName, ok);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
var meta = v.getMetaObject();
|
||||||
|
var metaName = meta != null ? meta.getMetaSimpleName() : "null";
|
||||||
|
if (!simpleName.equals(metaName)) {
|
||||||
|
if (v.isHostObject()) {
|
||||||
|
if (v.hasArrayElements()) {
|
||||||
|
assertEquals("Array", simpleName);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (v.hasHashEntries()) {
|
||||||
|
assertEquals("Dictionary", simpleName);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (v.hasMembers() && v.getMember("__proto__") != null) {
|
||||||
|
if (v.hasHashEntries()) {
|
||||||
|
assertEquals("JavaScript hash map is called Map", "Map", metaName);
|
||||||
|
assertEquals(
|
||||||
|
"JavaScript hash map is seen as Dictionary by Enso", "Dictionary", simpleName);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (v.isString()) {
|
||||||
|
assertEquals("Text", simpleName);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (v.isDuration()) {
|
||||||
|
assertEquals("Duration", simpleName);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (v.isDate() && v.isTime()) {
|
||||||
|
assertEquals("Date_Time", simpleName);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (v.isTimeZone()) {
|
||||||
|
assertEquals("Time_Zone", simpleName);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (v.isDate()) {
|
||||||
|
assertEquals("Date", simpleName);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (v.isTime()) {
|
||||||
|
assertEquals("Time_Of_Day", simpleName);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (v.isNull()) {
|
||||||
|
assertEquals("Nothing", simpleName);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
sb.append("\n")
|
||||||
|
.append("Simple names shall be the same for ")
|
||||||
|
.append(v)
|
||||||
|
.append(" get_simple_type_name: ")
|
||||||
|
.append(simpleName)
|
||||||
|
.append(" getMetaSimpleName: ")
|
||||||
|
.append(metaName);
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void compareQualifiedAndSimpleTypeNameForTypes() throws Exception {
|
public void compareQualifiedAndSimpleTypeNameForTypes() throws Exception {
|
||||||
var g = generator();
|
var g = generator();
|
||||||
|
@ -303,9 +303,13 @@ public final class ValuesGenerator {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public Value typeMap() {
|
public Value typeMap() {
|
||||||
return v("typeMap", """
|
return v(
|
||||||
import Standard.Base.Data.Map.Map
|
"typeMap",
|
||||||
""", "Map").type();
|
"""
|
||||||
|
import Standard.Base.Data.Dictionary.Dictionary
|
||||||
|
""",
|
||||||
|
"Dictionary")
|
||||||
|
.type();
|
||||||
}
|
}
|
||||||
|
|
||||||
public Value typeWarning() {
|
public Value typeWarning() {
|
||||||
@ -679,28 +683,28 @@ public final class ValuesGenerator {
|
|||||||
if (languages.contains(Language.ENSO)) {
|
if (languages.contains(Language.ENSO)) {
|
||||||
var imports =
|
var imports =
|
||||||
"""
|
"""
|
||||||
import Standard.Base.Data.Map.Map
|
import Standard.Base.Data.Dictionary.Dictionary
|
||||||
import Standard.Base.Nothing.Nothing
|
import Standard.Base.Nothing.Nothing
|
||||||
""";
|
""";
|
||||||
for (var expr :
|
for (var expr :
|
||||||
List.of(
|
List.of(
|
||||||
"Map.empty",
|
"Dictionary.empty",
|
||||||
"Map.singleton Nothing Nothing",
|
"Dictionary.singleton Nothing Nothing",
|
||||||
"Map.singleton Nothing 'my_value'",
|
"Dictionary.singleton Nothing 'my_value'",
|
||||||
"Map.singleton 'my_value' Nothing",
|
"Dictionary.singleton 'my_value' Nothing",
|
||||||
"Map.singleton 1 1",
|
"Dictionary.singleton 1 1",
|
||||||
"Map.singleton 'C' 3",
|
"Dictionary.singleton 'C' 3",
|
||||||
"Map.singleton 'C' 43",
|
"Dictionary.singleton 'C' 43",
|
||||||
"Map.empty.insert 'A' 10 . insert 'B' 20",
|
"Dictionary.empty.insert 'A' 10 . insert 'B' 20",
|
||||||
// ((int) 'A') + ((int) 'B') = 131 ; codePoint(131) = \203
|
// ((int) 'A') + ((int) 'B') = 131 ; codePoint(131) = \203
|
||||||
"Map.singleton '\203' 30",
|
"Dictionary.singleton '\203' 30",
|
||||||
"Map.singleton Map.empty 1",
|
"Dictionary.singleton Dictionary.empty 1",
|
||||||
"Map.singleton Map.empty Map.empty",
|
"Dictionary.singleton Dictionary.empty Dictionary.empty",
|
||||||
"Map.empty.insert 1 1 . insert 2 2",
|
"Dictionary.empty.insert 1 1 . insert 2 2",
|
||||||
"Map.empty.insert Nothing 'val' . insert 'key' 42",
|
"Dictionary.empty.insert Nothing 'val' . insert 'key' 42",
|
||||||
"Map.empty.insert 'A' 1 . insert 'B' 2 . insert 'C' 3",
|
"Dictionary.empty.insert 'A' 1 . insert 'B' 2 . insert 'C' 3",
|
||||||
"Map.empty.insert 'C' 3 . insert 'B' 2 . insert 'A' 1")) {
|
"Dictionary.empty.insert 'C' 3 . insert 'B' 2 . insert 'A' 1")) {
|
||||||
collect.add(v("maps-" + expr, imports, expr, "Map").type());
|
collect.add(v("maps-" + expr, imports, expr, "Dictionary").type());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (languages.contains(Language.JAVA)) {
|
if (languages.contains(Language.JAVA)) {
|
||||||
|
@ -358,7 +358,10 @@ public abstract class InvokeConversionNode extends BaseNode {
|
|||||||
@Shared("conversionResolverNode") @Cached ConversionResolverNode conversionResolverNode) {
|
@Shared("conversionResolverNode") @Cached ConversionResolverNode conversionResolverNode) {
|
||||||
Function function =
|
Function function =
|
||||||
conversionResolverNode.expectNonNull(
|
conversionResolverNode.expectNonNull(
|
||||||
thatMap, extractType(self), EnsoContext.get(this).getBuiltins().map(), conversion);
|
thatMap,
|
||||||
|
extractType(self),
|
||||||
|
EnsoContext.get(this).getBuiltins().dictionary(),
|
||||||
|
conversion);
|
||||||
return invokeFunctionNode.execute(function, frame, state, arguments);
|
return invokeFunctionNode.execute(function, frame, state, arguments);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -637,7 +637,7 @@ public abstract class InvokeMethodNode extends BaseNode {
|
|||||||
@Shared("warnings") @CachedLibrary(limit = "10") WarningsLibrary warnings,
|
@Shared("warnings") @CachedLibrary(limit = "10") WarningsLibrary warnings,
|
||||||
@Shared("methodResolverNode") @Cached MethodResolverNode methodResolverNode) {
|
@Shared("methodResolverNode") @Cached MethodResolverNode methodResolverNode) {
|
||||||
var ctx = EnsoContext.get(this);
|
var ctx = EnsoContext.get(this);
|
||||||
var hashMapType = ctx.getBuiltins().map();
|
var hashMapType = ctx.getBuiltins().dictionary();
|
||||||
var function = methodResolverNode.expectNonNull(self, hashMapType, symbol);
|
var function = methodResolverNode.expectNonNull(self, hashMapType, symbol);
|
||||||
arguments[0] = self;
|
arguments[0] = self;
|
||||||
return invokeFunctionNode.execute(function, frame, state, arguments);
|
return invokeFunctionNode.execute(function, frame, state, arguments);
|
||||||
|
@ -104,7 +104,7 @@ public final class Builtins {
|
|||||||
private final Builtin text;
|
private final Builtin text;
|
||||||
private final Builtin array;
|
private final Builtin array;
|
||||||
private final Builtin vector;
|
private final Builtin vector;
|
||||||
private final Builtin map;
|
private final Builtin dictionary;
|
||||||
private final Builtin dataflowError;
|
private final Builtin dataflowError;
|
||||||
private final Builtin ref;
|
private final Builtin ref;
|
||||||
private final Builtin managedResource;
|
private final Builtin managedResource;
|
||||||
@ -155,7 +155,7 @@ public final class Builtins {
|
|||||||
text = builtins.get(Text.class);
|
text = builtins.get(Text.class);
|
||||||
array = builtins.get(Array.class);
|
array = builtins.get(Array.class);
|
||||||
vector = builtins.get(Vector.class);
|
vector = builtins.get(Vector.class);
|
||||||
map = builtins.get(org.enso.interpreter.node.expression.builtin.Map.class);
|
dictionary = builtins.get(org.enso.interpreter.node.expression.builtin.Dictionary.class);
|
||||||
dataflowError = builtins.get(org.enso.interpreter.node.expression.builtin.Error.class);
|
dataflowError = builtins.get(org.enso.interpreter.node.expression.builtin.Error.class);
|
||||||
ref = builtins.get(Ref.class);
|
ref = builtins.get(Ref.class);
|
||||||
managedResource = builtins.get(ManagedResource.class);
|
managedResource = builtins.get(ManagedResource.class);
|
||||||
@ -691,8 +691,8 @@ public final class Builtins {
|
|||||||
return vector.getType();
|
return vector.getType();
|
||||||
}
|
}
|
||||||
|
|
||||||
public Type map() {
|
public Type dictionary() {
|
||||||
return map.getType();
|
return dictionary.getType();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -35,7 +35,7 @@ import org.enso.interpreter.runtime.library.dispatch.TypesLibrary;
|
|||||||
*/
|
*/
|
||||||
@ExportLibrary(TypesLibrary.class)
|
@ExportLibrary(TypesLibrary.class)
|
||||||
@ExportLibrary(InteropLibrary.class)
|
@ExportLibrary(InteropLibrary.class)
|
||||||
@Builtin(stdlibName = "Standard.Base.Data.Map.Map", name = "Map")
|
@Builtin(stdlibName = "Standard.Base.Data.Dictionary.Dictionary", name = "Dictionary")
|
||||||
public final class EnsoHashMap implements EnsoObject {
|
public final class EnsoHashMap implements EnsoObject {
|
||||||
private final EnsoHashMapBuilder mapBuilder;
|
private final EnsoHashMapBuilder mapBuilder;
|
||||||
private final int generation;
|
private final int generation;
|
||||||
@ -150,7 +150,7 @@ public final class EnsoHashMap implements EnsoObject {
|
|||||||
|
|
||||||
@ExportMessage(library = TypesLibrary.class)
|
@ExportMessage(library = TypesLibrary.class)
|
||||||
Type getType(@Bind("$node") Node node) {
|
Type getType(@Bind("$node") Node node) {
|
||||||
return EnsoContext.get(node).getBuiltins().map();
|
return EnsoContext.get(node).getBuiltins().dictionary();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ExportMessage
|
@ExportMessage
|
||||||
@ -160,7 +160,7 @@ public final class EnsoHashMap implements EnsoObject {
|
|||||||
|
|
||||||
@ExportMessage
|
@ExportMessage
|
||||||
Type getMetaObject(@Bind("$node") Node node) {
|
Type getMetaObject(@Bind("$node") Node node) {
|
||||||
return EnsoContext.get(node).getBuiltins().map();
|
return EnsoContext.get(node).getBuiltins().dictionary();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ExportMessage
|
@ExportMessage
|
||||||
|
@ -9,7 +9,7 @@ import com.oracle.truffle.api.nodes.Node;
|
|||||||
import org.enso.interpreter.dsl.BuiltinMethod;
|
import org.enso.interpreter.dsl.BuiltinMethod;
|
||||||
|
|
||||||
@BuiltinMethod(
|
@BuiltinMethod(
|
||||||
type = "Map",
|
type = "Dictionary",
|
||||||
name = "contains_key",
|
name = "contains_key",
|
||||||
description =
|
description =
|
||||||
"""
|
"""
|
||||||
|
@ -19,11 +19,11 @@ import org.enso.interpreter.runtime.EnsoContext;
|
|||||||
import org.enso.interpreter.runtime.state.State;
|
import org.enso.interpreter.runtime.state.State;
|
||||||
|
|
||||||
@BuiltinMethod(
|
@BuiltinMethod(
|
||||||
type = "Map",
|
type = "Dictionary",
|
||||||
name = "get_builtin",
|
name = "get_builtin",
|
||||||
description =
|
description =
|
||||||
"""
|
"""
|
||||||
Gets a value from the map on the specified key, or the given default.
|
Gets a value from the dictionary on the specified key, or the given default.
|
||||||
""",
|
""",
|
||||||
autoRegister = false,
|
autoRegister = false,
|
||||||
inlineable = true)
|
inlineable = true)
|
||||||
|
@ -18,7 +18,7 @@ import org.enso.interpreter.runtime.data.text.Text;
|
|||||||
import org.enso.interpreter.runtime.error.PanicException;
|
import org.enso.interpreter.runtime.error.PanicException;
|
||||||
|
|
||||||
@BuiltinMethod(
|
@BuiltinMethod(
|
||||||
type = "Map",
|
type = "Dictionary",
|
||||||
name = "insert",
|
name = "insert",
|
||||||
description =
|
description =
|
||||||
"""
|
"""
|
||||||
|
@ -20,7 +20,7 @@ import org.enso.interpreter.runtime.EnsoContext;
|
|||||||
import org.enso.interpreter.runtime.error.DataflowError;
|
import org.enso.interpreter.runtime.error.DataflowError;
|
||||||
|
|
||||||
@BuiltinMethod(
|
@BuiltinMethod(
|
||||||
type = "Map",
|
type = "Dictionary",
|
||||||
name = "remove_builtin",
|
name = "remove_builtin",
|
||||||
description = """
|
description = """
|
||||||
Removes an entry from this map specified with the key.
|
Removes an entry from this map specified with the key.
|
||||||
|
@ -12,7 +12,7 @@ import org.enso.interpreter.runtime.data.text.Text;
|
|||||||
import org.enso.interpreter.runtime.error.PanicException;
|
import org.enso.interpreter.runtime.error.PanicException;
|
||||||
|
|
||||||
@BuiltinMethod(
|
@BuiltinMethod(
|
||||||
type = "Map",
|
type = "Dictionary",
|
||||||
name = "size",
|
name = "size",
|
||||||
description = "Returns the number of entries in this hash map",
|
description = "Returns the number of entries in this hash map",
|
||||||
autoRegister = false)
|
autoRegister = false)
|
||||||
|
@ -13,7 +13,7 @@ import org.enso.interpreter.dsl.BuiltinMethod;
|
|||||||
import org.enso.interpreter.runtime.EnsoContext;
|
import org.enso.interpreter.runtime.EnsoContext;
|
||||||
|
|
||||||
@BuiltinMethod(
|
@BuiltinMethod(
|
||||||
type = "Map",
|
type = "Dictionary",
|
||||||
name = "to_text",
|
name = "to_text",
|
||||||
description = """
|
description = """
|
||||||
Returns text representation of this hash map
|
Returns text representation of this hash map
|
||||||
|
@ -18,7 +18,7 @@ import org.enso.interpreter.runtime.data.vector.ArrayLikeHelpers;
|
|||||||
import org.enso.interpreter.runtime.error.PanicException;
|
import org.enso.interpreter.runtime.error.PanicException;
|
||||||
|
|
||||||
@BuiltinMethod(
|
@BuiltinMethod(
|
||||||
type = "Map",
|
type = "Dictionary",
|
||||||
name = "to_vector",
|
name = "to_vector",
|
||||||
description =
|
description =
|
||||||
"""
|
"""
|
||||||
|
@ -114,7 +114,7 @@ public abstract class TypeOfNode extends Node {
|
|||||||
|
|
||||||
@Specialization(guards = {"type.isMap()"})
|
@Specialization(guards = {"type.isMap()"})
|
||||||
Type doPolygotMap(Interop type, Object value) {
|
Type doPolygotMap(Interop type, Object value) {
|
||||||
return EnsoContext.get(this).getBuiltins().map();
|
return EnsoContext.get(this).getBuiltins().dictionary();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Specialization(guards = {"type.isString()"})
|
@Specialization(guards = {"type.isString()"})
|
||||||
|
@ -169,7 +169,7 @@ add_specs suite_builder =
|
|||||||
|
|
||||||
suite_builder.group "S3.head (bucket)" pending=api_pending group_builder->
|
suite_builder.group "S3.head (bucket)" pending=api_pending group_builder->
|
||||||
group_builder.specify "should be able to head a bucket" <|
|
group_builder.specify "should be able to head a bucket" <|
|
||||||
S3.head bucket_name credentials=test_credentials . should_be_a Map
|
S3.head bucket_name credentials=test_credentials . should_be_a Dictionary
|
||||||
S3.head not_a_bucket_name credentials=test_credentials . should_fail_with S3_Bucket_Not_Found
|
S3.head not_a_bucket_name credentials=test_credentials . should_fail_with S3_Bucket_Not_Found
|
||||||
|
|
||||||
suite_builder.group "S3.read_bucket" pending=api_pending group_builder->
|
suite_builder.group "S3.read_bucket" pending=api_pending group_builder->
|
||||||
|
633
test/Base_Tests/src/Data/Dictionary_Spec.enso
Normal file
633
test/Base_Tests/src/Data/Dictionary_Spec.enso
Normal file
@ -0,0 +1,633 @@
|
|||||||
|
from Standard.Base import all
|
||||||
|
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
|
||||||
|
import Standard.Base.Errors.No_Such_Key.No_Such_Key
|
||||||
|
|
||||||
|
from Standard.Test import all
|
||||||
|
|
||||||
|
polyglot java import java.util.Map as JavaMap
|
||||||
|
polyglot java import org.enso.base.file_system.File_Utils
|
||||||
|
|
||||||
|
## Type that violates reflexivity
|
||||||
|
type My_Nan
|
||||||
|
Value comment:Text
|
||||||
|
|
||||||
|
type My_Nan_Comparator
|
||||||
|
compare _ _ = Nothing
|
||||||
|
hash _ = 0
|
||||||
|
|
||||||
|
Comparable.from (_:My_Nan) = My_Nan_Comparator
|
||||||
|
|
||||||
|
type My_Key
|
||||||
|
Value hash_code:Integer value:Text idx:Integer
|
||||||
|
|
||||||
|
type My_Key_Comparator
|
||||||
|
# Comparison ignores idx field
|
||||||
|
compare x y =
|
||||||
|
if x.hash_code != y.hash_code then Nothing else
|
||||||
|
if x.value == y.value then Ordering.Equal else Nothing
|
||||||
|
|
||||||
|
hash x = x.hash_code
|
||||||
|
|
||||||
|
Comparable.from (_:My_Key) = My_Key_Comparator
|
||||||
|
|
||||||
|
|
||||||
|
foreign js js_str str = """
|
||||||
|
return new String(str)
|
||||||
|
|
||||||
|
foreign js js_null = """
|
||||||
|
return null
|
||||||
|
|
||||||
|
foreign js js_empty_dict = """
|
||||||
|
return new Map()
|
||||||
|
|
||||||
|
foreign python py_empty_dict = """
|
||||||
|
return {}
|
||||||
|
|
||||||
|
foreign js js_dict_from_vec vec = """
|
||||||
|
dict = new Map()
|
||||||
|
for (let i = 0; i < vec.length; i += 2) {
|
||||||
|
dict.set(vec[i], vec[i+1])
|
||||||
|
}
|
||||||
|
return dict
|
||||||
|
|
||||||
|
foreign python py_none = """
|
||||||
|
return None
|
||||||
|
|
||||||
|
foreign python py_dict_from_vec vec = """
|
||||||
|
d = {}
|
||||||
|
for i in range(0, len(vec), 2):
|
||||||
|
d[vec[i]] = vec[i + 1]
|
||||||
|
return d
|
||||||
|
|
||||||
|
foreign python py_dict_from_map map = """
|
||||||
|
d = dict()
|
||||||
|
for key in map.__iter__():
|
||||||
|
d[key] = map[key]
|
||||||
|
return d
|
||||||
|
|
||||||
|
foreign python py_vec_from_map map = """
|
||||||
|
vec = []
|
||||||
|
for key in map.__iter__():
|
||||||
|
value = map[key]
|
||||||
|
vec.append([key, value])
|
||||||
|
return vec
|
||||||
|
|
||||||
|
# Should throw error - updating immutable map from Enso
|
||||||
|
foreign python py_update_dict map key val = """
|
||||||
|
map[key] = val
|
||||||
|
|
||||||
|
foreign python py_wrapper obj = """
|
||||||
|
class MyClass:
|
||||||
|
def __init__(self, obj):
|
||||||
|
self.data = obj
|
||||||
|
return MyClass(obj)
|
||||||
|
|
||||||
|
pending_python_missing = if Polyglot.is_language_installed "python" then Nothing else "Can't run Python tests, Python is not installed."
|
||||||
|
|
||||||
|
type Child
|
||||||
|
Value data
|
||||||
|
|
||||||
|
type Parent
|
||||||
|
Value child
|
||||||
|
|
||||||
|
type GrandParent
|
||||||
|
Value parent
|
||||||
|
|
||||||
|
add_specs suite_builder =
|
||||||
|
languages = Vector.build builder->
|
||||||
|
builder . append ["Enso", _-> Dictionary.empty, Nothing]
|
||||||
|
builder . append ["Java", _-> JavaMap.of, Nothing]
|
||||||
|
builder . append ["JavaScript", _-> js_empty_dict, Nothing]
|
||||||
|
builder . append ["Python", _-> py_empty_dict, pending_python_missing]
|
||||||
|
languages.each entry->
|
||||||
|
lang = entry.get 0
|
||||||
|
empty_dict_fn = entry.get 1
|
||||||
|
pending = entry.get 2
|
||||||
|
add_common_specs suite_builder lang pending empty_dict_fn
|
||||||
|
|
||||||
|
suite_builder.group "Enso Dictionaries" group_builder->
|
||||||
|
group_builder.specify "should use proper hash code for keys" <|
|
||||||
|
single_key_dict key = Dictionary.singleton key 42
|
||||||
|
grand_parent_1 = GrandParent.Value (Parent.Value (Child.Value 2))
|
||||||
|
grand_parent_2 = GrandParent.Value (Parent.Value (Child.Value 2.0))
|
||||||
|
|
||||||
|
(single_key_dict 2 . at 2.0) . should_equal 42
|
||||||
|
(single_key_dict -2 . at -2.0) . should_equal 42
|
||||||
|
(single_key_dict 'ś' . at 's\u0301') . should_equal 42
|
||||||
|
(single_key_dict 's\u0301' . at 'ś') . should_equal 42
|
||||||
|
(single_key_dict 'éabc' . at 'e\u0301abc') . should_equal 42
|
||||||
|
(single_key_dict 'e\u0301abc' . at 'éabc') . should_equal 42
|
||||||
|
(single_key_dict grand_parent_1 . at grand_parent_2) . should_equal 42
|
||||||
|
(single_key_dict (Json.parse '{"a": 1}') . at (Json.parse '{"a": 1}')) . should_equal 42
|
||||||
|
(single_key_dict (Child.Value 1) . at (Child.Value 1.0)) . should_equal 42
|
||||||
|
|
||||||
|
|
||||||
|
group_builder.specify "should support another Dictionary with NaN keys as key" <|
|
||||||
|
Dictionary.singleton (Dictionary.singleton Number.nan 1) 42 . size . should_equal 1
|
||||||
|
Dictionary.singleton (Dictionary.singleton Number.nan 1) 42 . keys . at 0 . keys . to_text . should_equal "[NaN]"
|
||||||
|
Dictionary.singleton (Dictionary.singleton Number.nan 1) 42 . keys . at 0 . get Number.nan . should_equal 1
|
||||||
|
Dictionary.singleton (Dictionary.singleton Number.nan 1) 42 . at (Dictionary.singleton Number.nan 1) . should_equal 42
|
||||||
|
|
||||||
|
group_builder.specify "should support atoms with custom comparators that violate reflexivity as keys" <|
|
||||||
|
k = My_Nan.Value "foo"
|
||||||
|
k2 = My_Nan.Value "foo"
|
||||||
|
(k==k).should_be_true
|
||||||
|
(k==k2).should_be_false
|
||||||
|
Meta.is_same_object k k2 . should_be_false
|
||||||
|
Meta.is_same_object k k . should_be_true
|
||||||
|
m = Dictionary.singleton k 10
|
||||||
|
m.contains_key k . should_be_true
|
||||||
|
m.get k . should_equal 10
|
||||||
|
m.contains_key k2 . should_be_false
|
||||||
|
|
||||||
|
m2 = m.insert k2 20
|
||||||
|
m2.get k . should_equal 10
|
||||||
|
m2.get k2 . should_equal 20
|
||||||
|
m2.size . should_equal 2
|
||||||
|
|
||||||
|
m3 = m2.insert k 30
|
||||||
|
m3.size . should_equal 2
|
||||||
|
m3.get k . should_equal 30
|
||||||
|
|
||||||
|
group_builder.specify "should support atom with custom comparators with complicated hash method" <|
|
||||||
|
keys = 0.up_to 500 . map ix->
|
||||||
|
value = ["A", "B", "C", "D", "E"].at (ix % 5)
|
||||||
|
hash_code = Comparable.from value . hash value
|
||||||
|
My_Key.Value hash_code value ix
|
||||||
|
distinct_keys = keys.fold Dictionary.empty acc_dict->
|
||||||
|
item->
|
||||||
|
acc_dict.insert item True
|
||||||
|
distinct_keys.size . should_equal 5
|
||||||
|
distinct_key_values = keys.map (_.value) . fold Dictionary.empty acc_dict->
|
||||||
|
item->
|
||||||
|
acc_dict.insert item True
|
||||||
|
distinct_key_values.size . should_equal 5
|
||||||
|
|
||||||
|
group_builder.specify "should not drop warnings from keys" <|
|
||||||
|
key = Warning.attach "my_warn" "my_key"
|
||||||
|
dict = Dictionary.singleton key 42
|
||||||
|
(Warning.get_all (dict.keys.at 0)).length . should_equal 1
|
||||||
|
|
||||||
|
group_builder.specify "should not drop warnings from values" <|
|
||||||
|
val = Warning.attach "my_warn" "my_val"
|
||||||
|
dict = Dictionary.singleton 42 val
|
||||||
|
(Warning.get_all (dict.values.at 0)).length . should_equal 1
|
||||||
|
|
||||||
|
group_builder.specify "should convert the whole Dictionary to a vector" <|
|
||||||
|
m = Dictionary.empty . insert 0 0 . insert 3 -5 . insert 1 2
|
||||||
|
m.to_vector.sort on=_.first . should_equal [[0, 0], [1, 2], [3, -5]]
|
||||||
|
|
||||||
|
group_builder.specify "should allow building the Dictionary from two vectors" <|
|
||||||
|
expected = Dictionary.empty . insert 0 0 . insert 3 -5 . insert 1 2
|
||||||
|
Dictionary.from_keys_and_values [0, 3, 1] [0, -5, 2] . should_equal expected
|
||||||
|
|
||||||
|
group_builder.specify "should allow building the Dictionary from vector like things" <|
|
||||||
|
expected = Dictionary.empty . insert 0 0 . insert 1 -5 . insert 2 2
|
||||||
|
Dictionary.from_keys_and_values (0.up_to 3) [0, -5, 2] . should_equal expected
|
||||||
|
|
||||||
|
group_builder.specify "should not allow building with duplicate keys unless explicitly allowed" <|
|
||||||
|
expected = Dictionary.empty . insert 0 0 . insert 3 -5 . insert 1 2
|
||||||
|
Dictionary.from_keys_and_values [0, 3, 1, 0] [3, -5, 2, 0] . should_fail_with Illegal_Argument
|
||||||
|
Dictionary.from_keys_and_values [0, 3, 1, 0] [3, -5, 2, 0] error_on_duplicates=False . should_equal expected
|
||||||
|
|
||||||
|
group_builder.specify "should not allow different length vectors when building" <|
|
||||||
|
Dictionary.from_keys_and_values [0, 3, 1] [3, -5, 2, 0] . should_fail_with Illegal_Argument
|
||||||
|
|
||||||
|
group_builder.specify "should allow building the Dictionary from a vector" <|
|
||||||
|
expected = Dictionary.empty . insert 0 0 . insert 3 -5 . insert 1 2
|
||||||
|
vec = [[0, 0], [3, -5], [1, 2]]
|
||||||
|
Dictionary.from_vector vec . should_equal expected
|
||||||
|
|
||||||
|
group_builder.specify "should fail when building the Dictionary from wrong vector" <|
|
||||||
|
Dictionary.from_vector [["A", 1, "B", 2]] . should_fail_with Illegal_Argument
|
||||||
|
|
||||||
|
group_builder.specify "should not allow duplicates when building the Dictionary from a vector, unless explicitly allowed" <|
|
||||||
|
vec = [[0, 0], [3, -5], [1, 2], [0, 1]]
|
||||||
|
d1 = Dictionary.from_vector vec
|
||||||
|
d1.should_fail_with Illegal_Argument
|
||||||
|
d1.catch.message . should_equal "`Dictionary.from_vector` encountered duplicate key: 0"
|
||||||
|
|
||||||
|
d2 = Dictionary.from_vector vec error_on_duplicates=False
|
||||||
|
Problems.assume_no_problems d2
|
||||||
|
d2.get 0 . should_equal 1
|
||||||
|
d2.get 3 . should_equal -5
|
||||||
|
|
||||||
|
group_builder.specify "should disallow duplicate keys when transforming the Dictionary" <|
|
||||||
|
d = Dictionary.from_vector [[1, 2], [11, 3]]
|
||||||
|
d2 = d.transform (k -> v -> [k % 10, v*2])
|
||||||
|
d2.should_fail_with Illegal_Argument
|
||||||
|
d2.catch.message . should_equal "`Dictionary.transform` encountered duplicate key: 1"
|
||||||
|
|
||||||
|
group_builder.specify "should allow mapping over values" <|
|
||||||
|
d = Dictionary.empty . insert 1 2 . insert 2 4
|
||||||
|
expected = Dictionary.empty . insert 1 4 . insert 2 8
|
||||||
|
d.map (v -> v*2) . should_equal expected
|
||||||
|
|
||||||
|
group_builder.specify "should allow mapping over keys" <|
|
||||||
|
d = Dictionary.empty . insert 1 2 . insert 2 4
|
||||||
|
expected = Dictionary.empty . insert 2 2 . insert 4 4
|
||||||
|
d.map_keys (k -> k*2) . should_equal expected
|
||||||
|
|
||||||
|
group_builder.specify "should allow mapping with keys" <|
|
||||||
|
d = Dictionary.empty . insert 1 2 . insert 2 4
|
||||||
|
expected = Dictionary.empty . insert 1 3 . insert 2 6
|
||||||
|
d.map_with_key (k -> v -> k + v) . should_equal expected
|
||||||
|
|
||||||
|
group_builder.specify "should allow iterating over each value" <|
|
||||||
|
d = Dictionary.empty . insert 1 2 . insert 2 4
|
||||||
|
expected_vec = [2, 4]
|
||||||
|
vec = Vector.build builder->
|
||||||
|
d.each (v -> builder.append v)
|
||||||
|
vec . should_equal expected_vec
|
||||||
|
|
||||||
|
group_builder.specify "should allow iterating over each key-value pair" <|
|
||||||
|
d = Dictionary.empty . insert 1 2 . insert 2 4
|
||||||
|
expected_vec = [3, 6]
|
||||||
|
vec = Vector.build builder->
|
||||||
|
d.each_with_key (k -> v -> builder.append (k+v))
|
||||||
|
vec . should_equal expected_vec
|
||||||
|
|
||||||
|
group_builder.specify "should allow folding over the values" <|
|
||||||
|
d = Dictionary.empty . insert 1 2 . insert 2 4
|
||||||
|
d.fold 0 (+) . should_equal 6
|
||||||
|
|
||||||
|
group_builder.specify "should allow folding over the key-value pairs" <|
|
||||||
|
d = Dictionary.empty . insert 1 2 . insert 2 4
|
||||||
|
d.fold_with_key 0 (l -> k -> v -> l + k + v) . should_equal 9
|
||||||
|
|
||||||
|
group_builder.specify "should be able to add a Nothing key to a Dictionary of Text" <|
|
||||||
|
m = Dictionary.empty . insert "A" 2 . insert Nothing 1 . insert "B" 3
|
||||||
|
m.at "A" . should_equal 2
|
||||||
|
m.at "B" . should_equal 3
|
||||||
|
m.at Nothing . should_equal 1
|
||||||
|
|
||||||
|
group_builder.specify "should be able to add a Nothing key to a Dictionary of Integer" <|
|
||||||
|
m = Dictionary.empty . insert 100 2 . insert Nothing 1 . insert 200 3
|
||||||
|
m.at 100 . should_equal 2
|
||||||
|
m.at 200 . should_equal 3
|
||||||
|
m.at Nothing . should_equal 1
|
||||||
|
|
||||||
|
suite_builder.group "Polyglot keys and values" group_builder->
|
||||||
|
group_builder.specify "should support polyglot keys" <|
|
||||||
|
dict = Dictionary.singleton (js_str "A") 42
|
||||||
|
dict.size.should_equal 1
|
||||||
|
dict.get "A" . should_equal 42
|
||||||
|
dict.get (js_str "A") . should_equal 42
|
||||||
|
|
||||||
|
group_builder.specify "should support host objects as keys" <|
|
||||||
|
# java.nio.path.Path has proper implementation of hashCode
|
||||||
|
dict = Dictionary.singleton (File_Utils.toPath "/home/user/file.txt") 42
|
||||||
|
dict.get "X" . should_equal Nothing
|
||||||
|
dict.get "A" . should_equal Nothing
|
||||||
|
dict.get (File_Utils.toPath "/home/user/file.txt") . should_equal 42
|
||||||
|
|
||||||
|
group_builder.specify "should support Python objects as keys" pending=pending_python_missing <|
|
||||||
|
py_obj = py_wrapper 42
|
||||||
|
dict = Dictionary.singleton py_obj "Value"
|
||||||
|
dict.get py_obj . should_equal "Value"
|
||||||
|
|
||||||
|
group_builder.specify "should support Python objects as values" pending=pending_python_missing <|
|
||||||
|
dict = Dictionary.singleton "A" (py_wrapper 42)
|
||||||
|
dict.get "A" . data . should_equal 42
|
||||||
|
|
||||||
|
group_builder.specify "should insert entries to a polyglot map" pending=pending_python_missing <|
|
||||||
|
dict = py_dict_from_vec ["A", 1, "B", 2]
|
||||||
|
dict.insert "C" 3 . keys . sort . should_equal ["A", "B", "C"]
|
||||||
|
|
||||||
|
group_builder.specify "should remove entries from a polyglot map" pending=pending_python_missing <|
|
||||||
|
dict = py_dict_from_vec ["A", 1, "B", 2]
|
||||||
|
dict.remove "B" . to_vector . should_equal [["A", 1]]
|
||||||
|
|
||||||
|
suite_builder.group "non-linear inserts" group_builder->
|
||||||
|
group_builder.specify "should handle inserts with different keys" <|
|
||||||
|
d1 = Dictionary.singleton "A" 1
|
||||||
|
d2 = d1.insert "B" 2
|
||||||
|
d3 = d1.insert "C" 3
|
||||||
|
d2.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2]]
|
||||||
|
d3.to_vector.sort on=_.first . should_equal [["A", 1], ["C", 3]]
|
||||||
|
|
||||||
|
group_builder.specify "should handle inserts with same keys (1)" <|
|
||||||
|
d1 = Dictionary.singleton "A" 1
|
||||||
|
d2 = d1.insert "A" 2
|
||||||
|
d3 = d1.insert "A" 3
|
||||||
|
d4 = d1.insert "B" 4
|
||||||
|
d2.to_vector.sort on=_.first . should_equal [["A", 2]]
|
||||||
|
d3.to_vector.sort on=_.first . should_equal [["A", 3]]
|
||||||
|
d4.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 4]]
|
||||||
|
|
||||||
|
group_builder.specify "should handle inserts with same keys (2)" <|
|
||||||
|
d1 = Dictionary.singleton "foo" 1
|
||||||
|
d2 = d1.insert "baz" 2
|
||||||
|
d3 = d2.insert "foo" 3
|
||||||
|
d1.to_vector.sort on=_.first . should_equal [['foo', 1]]
|
||||||
|
d2.to_vector.sort on=_.first . should_equal [['baz', 2], ['foo', 1]]
|
||||||
|
d3.to_vector.sort on=_.first . should_equal [['baz', 2], ['foo', 3]]
|
||||||
|
|
||||||
|
group_builder.specify "should handle inserts with same keys (3)" <|
|
||||||
|
d1 = Dictionary.singleton "A" 1
|
||||||
|
d2 = d1.insert "B" 2
|
||||||
|
d3 = d2.insert "A" 3
|
||||||
|
d4 = d2.insert "C" 4
|
||||||
|
d1.to_vector.sort on=_.first . should_equal [["A", 1]]
|
||||||
|
d2.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2]]
|
||||||
|
d3.to_vector.sort on=_.first . should_equal [["A", 3], ["B", 2]]
|
||||||
|
d4.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2], ["C", 4]]
|
||||||
|
|
||||||
|
group_builder.specify "should handle inserts with same keys (4)" <|
|
||||||
|
d1 = Dictionary.singleton "A" 1
|
||||||
|
d2 = d1.insert "B" 2
|
||||||
|
d3 = d2.insert "C" 3
|
||||||
|
d4 = d2.insert "D" 4
|
||||||
|
d2.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2]]
|
||||||
|
d3.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2], ["C", 3]]
|
||||||
|
d4.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2], ["D", 4]]
|
||||||
|
|
||||||
|
group_builder.specify "should handle inserts with same keys (5)" <|
|
||||||
|
d1 = Dictionary.singleton "A" 1
|
||||||
|
d2 = d1.insert "B" 2
|
||||||
|
d3 = d2.insert "A" 3
|
||||||
|
d4 = d2.insert "A" 4
|
||||||
|
d2.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2]]
|
||||||
|
d3.to_vector.sort on=_.first . should_equal [["A", 3], ["B", 2]]
|
||||||
|
d4.to_vector.sort on=_.first . should_equal [["A", 4], ["B", 2]]
|
||||||
|
|
||||||
|
group_builder.specify "should handle inserts with same keys (6)" <|
|
||||||
|
d1 = Dictionary.singleton "A" 1
|
||||||
|
d2 = d1.insert "B" 2
|
||||||
|
d3 = d2.insert "C" 3
|
||||||
|
d4 = d2.insert "A" 4
|
||||||
|
d2.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2]]
|
||||||
|
d3.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2], ["C", 3]]
|
||||||
|
d4.to_vector.sort on=_.first . should_equal [["A", 4], ["B", 2]]
|
||||||
|
|
||||||
|
group_builder.specify "should handle inserts with same keys (7)" <|
|
||||||
|
d1 = Dictionary.singleton "A" 1
|
||||||
|
d2 = d1.insert "B" 2
|
||||||
|
d3 = d2.insert "C" 3
|
||||||
|
d4 = d3.insert "D" 4
|
||||||
|
d5 = d2.insert "A" 5
|
||||||
|
d2.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2]]
|
||||||
|
d3.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2], ["C", 3]]
|
||||||
|
d4.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2], ["C", 3], ["D", 4]]
|
||||||
|
d5.to_vector.sort on=_.first . should_equal [["A", 5], ["B", 2]]
|
||||||
|
|
||||||
|
group_builder.specify "should handle inserts with same keys (8)" <|
|
||||||
|
d1 = Dictionary.singleton "A" 1
|
||||||
|
d2 = d1.insert "B" 2
|
||||||
|
d3 = d2.insert "C" 3
|
||||||
|
d4 = d3.insert "A" 4
|
||||||
|
d5 = d2.insert "A" 5
|
||||||
|
d2.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2]]
|
||||||
|
d3.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2], ["C", 3]]
|
||||||
|
d4.to_vector.sort on=_.first . should_equal [["A", 4], ["B", 2], ["C", 3]]
|
||||||
|
d5.to_vector.sort on=_.first . should_equal [["A", 5], ["B", 2]]
|
||||||
|
|
||||||
|
group_builder.specify "should handle inserts with same keys (9)" <|
|
||||||
|
d1 = Dictionary.singleton "A" 1
|
||||||
|
d2 = d1.insert "B" 2
|
||||||
|
d3 = d2.insert "A" 3
|
||||||
|
d4 = d2.insert "B" 4
|
||||||
|
d5 = d2.insert "C" 5
|
||||||
|
d2.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2]]
|
||||||
|
d3.to_vector.sort on=_.first . should_equal [["A", 3], ["B", 2]]
|
||||||
|
d4.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 4]]
|
||||||
|
d5.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2], ["C", 5]]
|
||||||
|
|
||||||
|
group_builder.specify "should handle inserts with same keys (10)" <|
|
||||||
|
d1 = Dictionary.singleton "A" 1
|
||||||
|
d2 = d1.insert "B" 2
|
||||||
|
d3 = d2.insert "C" 3
|
||||||
|
d4 = d2.insert "D" 4
|
||||||
|
d5 = d2.insert "E" 5
|
||||||
|
d2.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2]]
|
||||||
|
d3.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2], ["C", 3]]
|
||||||
|
d4.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2], ["D", 4]]
|
||||||
|
d5.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2], ["E", 5]]
|
||||||
|
|
||||||
|
suite_builder.group "Polyglot hash maps" group_builder->
|
||||||
|
group_builder.specify "should pass Dictionaries as immutable maps to other languages" pending=pending_python_missing <|
|
||||||
|
dict = Dictionary.singleton "A" 1
|
||||||
|
# Python's KeyError should be raised
|
||||||
|
Test.expect_panic_with (py_update_dict dict "A" 2) Any
|
||||||
|
dict.get "A" . should_equal 1
|
||||||
|
|
||||||
|
group_builder.specify "should treat JavaScript maps as Enso Dictionaries" <|
|
||||||
|
js_dict = js_dict_from_vec ["A", 1, "B", 2]
|
||||||
|
dict = js_dict.insert "C" 3
|
||||||
|
js_dict.to_vector.should_equal [["A", 1], ["B", 2]]
|
||||||
|
dict.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2], ["C", 3]]
|
||||||
|
|
||||||
|
group_builder.specify "should treat Java Map as Enso Dictionary" <|
|
||||||
|
sort_by_keys vec = vec.sort by=x-> y-> Ordering.compare x.first y.first
|
||||||
|
dict = JavaMap.of "A" 1 "B" 2
|
||||||
|
(sort_by_keys dict.to_vector) . should_equal [["A", 1], ["B", 2]]
|
||||||
|
(sort_by_keys (dict.insert "C" 3 . to_vector)) . should_equal [["A", 1], ["B", 2], ["C", 3]]
|
||||||
|
|
||||||
|
group_builder.specify "should treat Python dicts as Enso Dictionaries" pending=pending_python_missing <|
|
||||||
|
py_dict = py_dict_from_vec ["A", 1, "B", 2]
|
||||||
|
dict = py_dict.insert "C" 3
|
||||||
|
py_dict.not_empty . should_be_true
|
||||||
|
py_dict.to_vector . should_contain_the_same_elements_as [["A", 1], ["B", 2]]
|
||||||
|
dict.to_vector . should_contain_the_same_elements_as [["A", 1], ["B", 2], ["C", 3]]
|
||||||
|
py_empty_dict.is_empty.should_be_true
|
||||||
|
py_empty_dict.insert "A" 1 . insert "A" 2 . get "A" . should_equal 2
|
||||||
|
|
||||||
|
group_builder.specify "should be able to remove entries" pending=pending_python_missing <|
|
||||||
|
py_dict_from_vec ["A", 1, "B", 2] . remove "A" . size . should_equal 1
|
||||||
|
py_dict_from_vec ["A", 1, "B", 2] . remove "A" . get "B" . should_equal 2
|
||||||
|
|
||||||
|
group_builder.specify "should be able to remove NaN keys" pending=pending_python_missing <|
|
||||||
|
py_dict_from_vec [Number.nan, 1] . remove Number.nan . size . should_equal 0
|
||||||
|
|
||||||
|
group_builder.specify "should pass Dictionaries with null keys to Python and back" pending=pending_python_missing <|
|
||||||
|
# Python supports None as keys, Enso support Nothing as keys
|
||||||
|
py_dict = py_dict_from_map (Dictionary.singleton Nothing 42)
|
||||||
|
py_dict.get Nothing . should_equal 42
|
||||||
|
py_dict.insert "A" 23 . get Nothing . should_equal 42
|
||||||
|
py_dict.insert Nothing 23 . get Nothing . should_equal 23
|
||||||
|
|
||||||
|
group_builder.specify "should treat Enso Dictionaries as Python dicts when passed to Python" pending=pending_python_missing <|
|
||||||
|
dict1 = Dictionary.singleton "A" 1 . insert "B" 2
|
||||||
|
py_vec_from_map dict1 . should_contain_the_same_elements_as [["A", 1], ["B", 2]]
|
||||||
|
dict2 = Dictionary.singleton "A" 1 . insert Nothing 2
|
||||||
|
py_vec_from_map dict2 . should_contain_the_same_elements_as [["A", 1], [Nothing, 2]]
|
||||||
|
|
||||||
|
|
||||||
|
add_common_specs suite_builder prefix:Text (pending : (Text | Nothing)) (empty_dict_fn : (Nothing -> Dictionary)) =
|
||||||
|
# Not on a single line - empty_dict is a method, not a variable
|
||||||
|
empty_dict =
|
||||||
|
empty_dict_fn Nothing
|
||||||
|
|
||||||
|
suite_builder.group prefix+": Common polyglot Dictionary operations" pending=pending group_builder->
|
||||||
|
group_builder.specify "should get the default comparator for polyglot maps" <|
|
||||||
|
Comparable.from empty_dict . should_equal Default_Comparator
|
||||||
|
|
||||||
|
group_builder.specify "should compare two hash maps" <|
|
||||||
|
(empty_dict.insert "a" 1).should_equal (empty_dict.insert "a" 1)
|
||||||
|
(empty_dict.insert "b" 2).should_not_equal (empty_dict.insert "a" 1)
|
||||||
|
empty_dict.should_equal empty_dict
|
||||||
|
empty_dict.should_not_equal (empty_dict.insert "a" 1)
|
||||||
|
(empty_dict.insert "a" 1 . insert "b" 2).should_equal (empty_dict.insert "b" 2 . insert "a" 1)
|
||||||
|
|
||||||
|
group_builder.specify "should allow checking for non emptiness" <|
|
||||||
|
non_empty = empty_dict . insert "foo" 1234
|
||||||
|
empty_dict.not_empty . should_be_false
|
||||||
|
non_empty.not_empty . should_be_true
|
||||||
|
|
||||||
|
group_builder.specify "should allow checking its size" <|
|
||||||
|
non_empty = empty_dict.insert "a" "b" . insert "x" "y"
|
||||||
|
empty_dict.size . should_equal 0
|
||||||
|
non_empty.size . should_equal 2
|
||||||
|
|
||||||
|
group_builder.specify "should allow checking for emptiness" <|
|
||||||
|
non_empty = empty_dict . insert "foo" 1234
|
||||||
|
empty_dict.is_empty . should_be_true
|
||||||
|
non_empty.is_empty . should_be_false
|
||||||
|
|
||||||
|
group_builder.specify "should handle incomparable values as keys" <|
|
||||||
|
empty_dict.insert Number.nan 1 . insert Number.nan 2 . get Number.nan . should_equal 2
|
||||||
|
|
||||||
|
group_builder.specify "should handle Nothing as values" <|
|
||||||
|
empty_dict.insert 1 Nothing . at 1 . should_equal Nothing
|
||||||
|
empty_dict.insert Nothing Nothing . at Nothing . should_equal Nothing
|
||||||
|
|
||||||
|
group_builder.specify "should support rewriting values with same keys" <|
|
||||||
|
dict = Dictionary.singleton "a" 1 . insert "a" 42
|
||||||
|
dict.size.should_equal 1
|
||||||
|
dict.get "a" . should_equal 42
|
||||||
|
|
||||||
|
group_builder.specify "should allow storing atoms as values" <|
|
||||||
|
json = Json.parse '{"a": 1}'
|
||||||
|
pair = Pair.new "first" "second"
|
||||||
|
dict = Dictionary.singleton 0 json . insert 1 pair
|
||||||
|
dict.get 0 . should_equal json
|
||||||
|
dict.get 1 . should_equal pair
|
||||||
|
|
||||||
|
group_builder.specify "should support NaN as keys" <|
|
||||||
|
empty_dict.insert Number.nan 1 . contains_key Number.nan . should_be_true
|
||||||
|
empty_dict.insert Number.nan 1 . values . should_equal [1]
|
||||||
|
empty_dict.insert Number.nan 1 . insert Number.nan 2 . contains_key Number.nan . should_be_true
|
||||||
|
empty_dict.insert Number.nan 1 . insert Number.nan 2 . values . should_equal [2]
|
||||||
|
empty_dict.insert Number.nan 1 . insert "key" 2 . insert Number.nan 3 . contains_key Number.nan . should_be_true
|
||||||
|
empty_dict.insert Number.nan 1 . insert "key" 2 . insert Number.nan 3 . contains_key "key" . should_be_true
|
||||||
|
empty_dict.insert Number.nan 1 . insert "key" 2 . insert Number.nan 3 . at Number.nan . should_equal 3
|
||||||
|
empty_dict.insert Number.nan 1 . insert "key" 2 . insert Number.nan 3 . at "key" . should_equal 2
|
||||||
|
empty_dict.insert Number.nan 1 . insert Number.nan Number.nan . at Number.nan . to_text . should_equal "NaN"
|
||||||
|
empty_dict.insert Number.nan 1 . insert Number.nan Number.nan . remove Number.nan . size . should_equal 0
|
||||||
|
|
||||||
|
group_builder.specify "should support arbitrary atoms as keys" <|
|
||||||
|
dict = empty_dict . insert (Pair.new "one" "two") 42
|
||||||
|
(dict.get (Pair.new "one" "two")).should_equal 42
|
||||||
|
(dict.get (Pair.new "A" "B")).should_equal Nothing
|
||||||
|
(dict.get (Pair.new "two" "two")).should_equal Nothing
|
||||||
|
|
||||||
|
group_builder.specify "should support vectors as keys" <|
|
||||||
|
dict = empty_dict . insert [1, "a", 2] "Value"
|
||||||
|
dict.size.should_equal 1
|
||||||
|
dict.get [1, "a", 2] . should_equal "Value"
|
||||||
|
|
||||||
|
group_builder.specify "should support dates as keys" <|
|
||||||
|
dict = empty_dict.insert (Date.new 1993) 1 . insert (Date.new 1993 2 5) 2 . insert (Date_Time.new 1993 2 5 13 45) 3
|
||||||
|
dict.size.should_equal 3
|
||||||
|
dict.get (Date.new 1993 6 7) . should_equal Nothing
|
||||||
|
dict.get (Date.new 1993) . should_equal 1
|
||||||
|
dict.get (Date_Time.new 1993) . should_equal Nothing
|
||||||
|
dict.get (Date.new 1993 2 5) . should_equal 2
|
||||||
|
dict.get (Date_Time.new 1993 2 5) . should_equal Nothing
|
||||||
|
dict.get (Date_Time.new 1993 2 5 13 45) . should_equal 3
|
||||||
|
|
||||||
|
group_builder.specify "should support another hash map as key" <|
|
||||||
|
keys = empty_dict.insert (Pair.new "one" "two") 42
|
||||||
|
dict = empty_dict.insert keys 23
|
||||||
|
dict.size.should_equal 1
|
||||||
|
(dict.get "A").should_equal Nothing
|
||||||
|
(dict.get keys).should_equal 23
|
||||||
|
(dict.get dict).should_equal Nothing
|
||||||
|
|
||||||
|
group_builder.specify "should handle keys with standard equality semantics" <|
|
||||||
|
dict = empty_dict.insert 2 "Hello"
|
||||||
|
(dict.get 2).should_equal "Hello"
|
||||||
|
(dict.get 2.0).should_equal "Hello"
|
||||||
|
(empty_dict.insert 2 "Hello").should_equal (empty_dict.insert 2.0 "Hello")
|
||||||
|
|
||||||
|
group_builder.specify "should handle Nothing as keys" <|
|
||||||
|
empty_dict.insert Nothing 3 . get Nothing . should_equal 3
|
||||||
|
empty_dict.insert Nothing 1 . insert Nothing 2 . get Nothing . should_equal 2
|
||||||
|
empty_dict.insert Nothing 1 . should_equal (empty_dict.insert Nothing 1)
|
||||||
|
empty_dict.insert Nothing 1 . insert Nothing 2 . at Nothing . should_equal 2
|
||||||
|
|
||||||
|
group_builder.specify "should handle JavaScript null as keys" <|
|
||||||
|
empty_dict.insert js_null 1 . at Nothing . should_equal 1
|
||||||
|
|
||||||
|
group_builder.specify "should handle Python None as keys" pending=pending_python_missing <|
|
||||||
|
empty_dict.insert py_none 1 . at Nothing . should_equal 1
|
||||||
|
|
||||||
|
group_builder.specify "should define a well-defined text conversion" <|
|
||||||
|
d = empty_dict . insert 0 0 . insert 3 -5 . insert 1 2
|
||||||
|
d.to_text . should_contain "0=0"
|
||||||
|
d.to_text . should_contain "3=-5"
|
||||||
|
d.to_text . should_contain "1=2"
|
||||||
|
|
||||||
|
group_builder.specify "should define structural equality" <|
|
||||||
|
dict_1 = empty_dict . insert "1" 2 . insert "2" "1"
|
||||||
|
dict_2 = empty_dict . insert "1" 2 . insert "2" "1"
|
||||||
|
dict_3 = empty_dict
|
||||||
|
dict_1==dict_2 . should_be_true
|
||||||
|
dict_1==dict_3 . should_be_false
|
||||||
|
dict_2==dict_3 . should_be_false
|
||||||
|
|
||||||
|
group_builder.specify "should allow inserting and looking up values" <|
|
||||||
|
m = empty_dict . insert "foo" 134 . insert "bar" 654 . insert "baz" "spam"
|
||||||
|
m.at "foo" . should_equal 134
|
||||||
|
m.at "bar" . should_equal 654
|
||||||
|
m.at "baz" . should_equal "spam"
|
||||||
|
(m.at "nope").should_fail_with No_Such_Key
|
||||||
|
|
||||||
|
group_builder.specify "should support get" <|
|
||||||
|
m = empty_dict . insert 2 3
|
||||||
|
m.get 2 0 . should_equal 3
|
||||||
|
m.get 1 10 . should_equal 10
|
||||||
|
m.get 2 (Panic.throw "missing") . should_equal 3
|
||||||
|
|
||||||
|
group_builder.specify "should allow getting a vector of the keys" <|
|
||||||
|
m = empty_dict . insert 1 2 . insert 2 4
|
||||||
|
m.keys . should_equal [1, 2]
|
||||||
|
|
||||||
|
group_builder.specify "should allow getting a vector of the values" <|
|
||||||
|
m = empty_dict . insert 1 2 . insert 2 4
|
||||||
|
m.values . should_equal [2, 4]
|
||||||
|
|
||||||
|
group_builder.specify "should support contains_key" <|
|
||||||
|
m = empty_dict . insert 2 3
|
||||||
|
m.contains_key 2 . should_be_true
|
||||||
|
m.contains_key 1 . should_be_false
|
||||||
|
|
||||||
|
group_builder.specify "should allow transforming the dictionary" <|
|
||||||
|
m = empty_dict . insert 1 2 . insert 2 4
|
||||||
|
expected = empty_dict . insert "1" 4 . insert "2" 8
|
||||||
|
m.transform (k -> v -> [k.to_text, v*2]) . should_equal expected
|
||||||
|
|
||||||
|
group_builder.specify "should be able to remove entries (1)" <|
|
||||||
|
m1 = empty_dict.insert "A" 1 . insert "B" 2
|
||||||
|
m2 = m1.remove "B"
|
||||||
|
m2.get "A" . should_equal 1
|
||||||
|
m2.remove "A" . should_equal empty_dict
|
||||||
|
m1.remove "foo" . should_fail_with No_Such_Key
|
||||||
|
|
||||||
|
group_builder.specify "should be able to remove entries (2)" <|
|
||||||
|
m1 = empty_dict.insert "A" 1
|
||||||
|
m2 = m1.insert "B" 2
|
||||||
|
m3 = m1.insert "C" 3
|
||||||
|
m2.remove "A" . to_vector . should_equal [["B", 2]]
|
||||||
|
m2.remove "B" . to_vector . should_equal [["A", 1]]
|
||||||
|
m3.remove "A" . to_vector . should_equal [["C", 3]]
|
||||||
|
m3.remove "C" . to_vector . should_equal [["A", 1]]
|
||||||
|
|
||||||
|
group_builder.specify "should be able to remove entries (3)" <|
|
||||||
|
m = empty_dict.insert "A" 1 . insert "B" 2 . insert "C" 3
|
||||||
|
m.remove "B" . should_equal (empty_dict.insert "A" 1 . insert "C" 3)
|
||||||
|
|
||||||
|
main filter=Nothing =
|
||||||
|
suite = Test.build suite_builder->
|
||||||
|
add_specs suite_builder
|
||||||
|
suite.run_with_filter filter
|
@ -5,10 +5,11 @@ from Standard.Test import all
|
|||||||
|
|
||||||
|
|
||||||
add_specs suite_builder =
|
add_specs suite_builder =
|
||||||
suite_builder.group "Enso Set" group_builder->
|
suite_builder.group "Enso Hashset" group_builder->
|
||||||
group_builder.specify "should allow checking for emptiness" <|
|
group_builder.specify "should allow checking for emptiness" <|
|
||||||
empty_map = Set.empty
|
empty_map =
|
||||||
non_empty = Set.empty . insert "foo"
|
Hashset.empty
|
||||||
|
non_empty = Hashset.empty . insert "foo"
|
||||||
empty_map.is_empty . should_be_true
|
empty_map.is_empty . should_be_true
|
||||||
non_empty.is_empty . should_be_false
|
non_empty.is_empty . should_be_false
|
||||||
|
|
||||||
@ -16,34 +17,34 @@ add_specs suite_builder =
|
|||||||
non_empty.not_empty . should_be_true
|
non_empty.not_empty . should_be_true
|
||||||
|
|
||||||
group_builder.specify "should be constructed from a vector" <|
|
group_builder.specify "should be constructed from a vector" <|
|
||||||
s1 = Set.from_vector [1, 2, 3, 2]
|
s1 = Hashset.from_vector [1, 2, 3, 2]
|
||||||
s1.size . should_equal 3
|
s1.size . should_equal 3
|
||||||
s1.to_vector.sort . should_equal [1, 2, 3]
|
s1.to_vector.sort . should_equal [1, 2, 3]
|
||||||
|
|
||||||
r2 = Set.from_vector [1, 2, 2] error_on_duplicates=True
|
r2 = Hashset.from_vector [1, 2, 2] error_on_duplicates=True
|
||||||
r2.should_fail_with Illegal_Argument
|
r2.should_fail_with Illegal_Argument
|
||||||
|
|
||||||
group_builder.specify "should allow checking contains" <|
|
group_builder.specify "should allow checking contains" <|
|
||||||
s1 = Set.from_vector [1, 2, 3, 2]
|
s1 = Hashset.from_vector [1, 2, 3, 2]
|
||||||
s1.contains 1 . should_be_true
|
s1.contains 1 . should_be_true
|
||||||
s1.contains 2 . should_be_true
|
s1.contains 2 . should_be_true
|
||||||
s1.contains 3 . should_be_true
|
s1.contains 3 . should_be_true
|
||||||
s1.contains 4 . should_be_false
|
s1.contains 4 . should_be_false
|
||||||
|
|
||||||
group_builder.specify "should allow checking contains with relational NULL logic" <|
|
group_builder.specify "should allow checking contains with relational NULL logic" <|
|
||||||
Set.from_vector [1, 2] . contains_relational 1 . should_be_true
|
Hashset.from_vector [1, 2] . contains_relational 1 . should_be_true
|
||||||
Set.from_vector [1, 2] . contains_relational 3 . should_be_false
|
Hashset.from_vector [1, 2] . contains_relational 3 . should_be_false
|
||||||
Set.from_vector [1, 2, Nothing] . contains_relational 1 . should_be_true
|
Hashset.from_vector [1, 2, Nothing] . contains_relational 1 . should_be_true
|
||||||
Set.from_vector [1, 2, Nothing] . contains_relational 3 . should_equal Nothing
|
Hashset.from_vector [1, 2, Nothing] . contains_relational 3 . should_equal Nothing
|
||||||
Set.from_vector [1, 2, Nothing] . contains_relational Nothing . should_equal Nothing
|
Hashset.from_vector [1, 2, Nothing] . contains_relational Nothing . should_equal Nothing
|
||||||
Set.from_vector [1, 2] . contains_relational Nothing . should_equal Nothing
|
Hashset.from_vector [1, 2] . contains_relational Nothing . should_equal Nothing
|
||||||
Set.from_vector [Nothing] . contains_relational Nothing . should_equal Nothing
|
Hashset.from_vector [Nothing] . contains_relational Nothing . should_equal Nothing
|
||||||
Set.from_vector [] . contains_relational Nothing . should_be_false
|
Hashset.from_vector [] . contains_relational Nothing . should_be_false
|
||||||
|
|
||||||
group_builder.specify "should allow to compute a union, intersection and difference" <|
|
group_builder.specify "should allow to compute a union, intersection and difference" <|
|
||||||
s1 = Set.from_vector [1, 2]
|
s1 = Hashset.from_vector [1, 2]
|
||||||
s2 = Set.from_vector [2, 3]
|
s2 = Hashset.from_vector [2, 3]
|
||||||
s3 = Set.from_vector [3, 4]
|
s3 = Hashset.from_vector [3, 4]
|
||||||
|
|
||||||
(s1.union s2).to_vector.sort . should_equal [1, 2, 3]
|
(s1.union s2).to_vector.sort . should_equal [1, 2, 3]
|
||||||
(s1.union s3).to_vector.sort . should_equal [1, 2, 3, 4]
|
(s1.union s3).to_vector.sort . should_equal [1, 2, 3, 4]
|
||||||
@ -54,19 +55,19 @@ add_specs suite_builder =
|
|||||||
(s1.difference s1).to_vector . should_equal []
|
(s1.difference s1).to_vector . should_equal []
|
||||||
|
|
||||||
group_builder.specify "should allow to check for equality of two sets" <|
|
group_builder.specify "should allow to check for equality of two sets" <|
|
||||||
s1 = Set.from_vector [1, 2]
|
s1 = Hashset.from_vector [1, 2]
|
||||||
s2 = Set.from_vector [2, 1, 1]
|
s2 = Hashset.from_vector [2, 1, 1]
|
||||||
s3 = Set.from_vector [1, 2, 3]
|
s3 = Hashset.from_vector [1, 2, 3]
|
||||||
|
|
||||||
(s1 == s2) . should_be_true
|
(s1 == s2) . should_be_true
|
||||||
(s1 == s1) . should_be_true
|
(s1 == s1) . should_be_true
|
||||||
(s1 == s3) . should_be_false
|
(s1 == s3) . should_be_false
|
||||||
|
|
||||||
group_builder.specify "should be able to convert to text" <|
|
group_builder.specify "should be able to convert to text" <|
|
||||||
s1 = Set.from_vector ["1", "2", "3"]
|
s1 = Hashset.from_vector ["1", "2", "3"]
|
||||||
s2 = Set.from_vector [1, 2, 3]
|
s2 = Hashset.from_vector [1, 2, 3]
|
||||||
s1.to_text.should_equal "Set{'1', '2', '3'}"
|
s1.to_text.should_equal "Hashset{'1', '2', '3'}"
|
||||||
s2.to_text.should_equal "Set{1, 2, 3}"
|
s2.to_text.should_equal "Hashset{1, 2, 3}"
|
||||||
|
|
||||||
main filter=Nothing =
|
main filter=Nothing =
|
||||||
suite = Test.build suite_builder->
|
suite = Test.build suite_builder->
|
@ -85,13 +85,13 @@ add_specs suite_builder =
|
|||||||
Json.parse '{"constructor": "Skew", "population": true}' . into Statistic . should_equal (Statistic.Skew True)
|
Json.parse '{"constructor": "Skew", "population": true}' . into Statistic . should_equal (Statistic.Skew True)
|
||||||
Json.parse '{"constructor": "NotARealOne", "population": true}' . into Statistic . should_fail_with Illegal_Argument
|
Json.parse '{"constructor": "NotARealOne", "population": true}' . into Statistic . should_fail_with Illegal_Argument
|
||||||
|
|
||||||
group_builder.specify "should be able to convert a JS_Object into a Map using into" <|
|
group_builder.specify "should be able to convert a JS_Object into a Dictionary using into" <|
|
||||||
Json.parse '{"a": 15, "b": 20, "c": "X", "d": null}' . into Map . should_equal (Map.from_vector [["a", 15], ["b", 20], ["c", "X"], ["d", Nothing]])
|
Json.parse '{"a": 15, "b": 20, "c": "X", "d": null}' . into Dictionary . should_equal (Dictionary.from_vector [["a", 15], ["b", 20], ["c", "X"], ["d", Nothing]])
|
||||||
Json.parse '{}' . into Map . should_equal Map.empty
|
Json.parse '{}' . into Dictionary . should_equal Dictionary.empty
|
||||||
|
|
||||||
# [] parses as a vector/array which does not have the `into` method, that only works for {} objects:
|
# [] parses as a vector/array which does not have the `into` method, that only works for {} objects:
|
||||||
Test.expect_panic No_Such_Method <|
|
Test.expect_panic No_Such_Method <|
|
||||||
Json.parse '[]' . into Map
|
Json.parse '[]' . into Dictionary
|
||||||
|
|
||||||
group_builder.specify "should be able to deserialize Date" <|
|
group_builder.specify "should be able to deserialize Date" <|
|
||||||
'{"type": "Date", "constructor": "new", "year": 2018, "month": 7, "day": 3}'.should_parse_as (Date.new 2018 7 3)
|
'{"type": "Date", "constructor": "new", "year": 2018, "month": 7, "day": 3}'.should_parse_as (Date.new 2018 7 3)
|
||||||
|
@ -1,637 +0,0 @@
|
|||||||
from Standard.Base import all
|
|
||||||
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
|
|
||||||
import Standard.Base.Errors.No_Such_Key.No_Such_Key
|
|
||||||
|
|
||||||
from Standard.Test import all
|
|
||||||
|
|
||||||
|
|
||||||
polyglot java import java.util.Map as JavaMap
|
|
||||||
polyglot java import org.enso.base.file_system.File_Utils
|
|
||||||
|
|
||||||
## Type that violates reflexivity
|
|
||||||
type My_Nan
|
|
||||||
Value comment:Text
|
|
||||||
|
|
||||||
type My_Nan_Comparator
|
|
||||||
compare _ _ = Nothing
|
|
||||||
hash _ = 0
|
|
||||||
|
|
||||||
Comparable.from (_:My_Nan) = My_Nan_Comparator
|
|
||||||
|
|
||||||
type My_Key
|
|
||||||
Value hash_code:Integer value:Text idx:Integer
|
|
||||||
|
|
||||||
type My_Key_Comparator
|
|
||||||
# Comparison ignores idx field
|
|
||||||
compare x y =
|
|
||||||
if x.hash_code != y.hash_code then Nothing else
|
|
||||||
if x.value == y.value then Ordering.Equal else Nothing
|
|
||||||
|
|
||||||
hash x = x.hash_code
|
|
||||||
|
|
||||||
Comparable.from (_:My_Key) = My_Key_Comparator
|
|
||||||
|
|
||||||
|
|
||||||
foreign js js_str str = """
|
|
||||||
return new String(str)
|
|
||||||
|
|
||||||
foreign js js_null = """
|
|
||||||
return null
|
|
||||||
|
|
||||||
foreign js js_empty_dict = """
|
|
||||||
return new Map()
|
|
||||||
|
|
||||||
foreign python py_empty_dict = """
|
|
||||||
return {}
|
|
||||||
|
|
||||||
foreign js js_dict_from_vec vec = """
|
|
||||||
dict = new Map()
|
|
||||||
for (let i = 0; i < vec.length; i += 2) {
|
|
||||||
dict.set(vec[i], vec[i+1])
|
|
||||||
}
|
|
||||||
return dict
|
|
||||||
|
|
||||||
foreign python py_none = """
|
|
||||||
return None
|
|
||||||
|
|
||||||
foreign python py_dict_from_vec vec = """
|
|
||||||
d = {}
|
|
||||||
for i in range(0, len(vec), 2):
|
|
||||||
d[vec[i]] = vec[i + 1]
|
|
||||||
return d
|
|
||||||
|
|
||||||
foreign python py_dict_from_map map = """
|
|
||||||
d = dict()
|
|
||||||
for key in map.__iter__():
|
|
||||||
d[key] = map[key]
|
|
||||||
return d
|
|
||||||
|
|
||||||
foreign python py_vec_from_map map = """
|
|
||||||
vec = []
|
|
||||||
for key in map.__iter__():
|
|
||||||
value = map[key]
|
|
||||||
vec.append([key, value])
|
|
||||||
return vec
|
|
||||||
|
|
||||||
# Should throw error - updating immutable map from Enso
|
|
||||||
foreign python py_update_dict map key val = """
|
|
||||||
map[key] = val
|
|
||||||
|
|
||||||
foreign python py_wrapper obj = """
|
|
||||||
class MyClass:
|
|
||||||
def __init__(self, obj):
|
|
||||||
self.data = obj
|
|
||||||
return MyClass(obj)
|
|
||||||
|
|
||||||
pending_python_missing = if Polyglot.is_language_installed "python" then Nothing else "Can't run Python tests, Python is not installed."
|
|
||||||
|
|
||||||
type Child
|
|
||||||
Value data
|
|
||||||
|
|
||||||
type Parent
|
|
||||||
Value child
|
|
||||||
|
|
||||||
type GrandParent
|
|
||||||
Value parent
|
|
||||||
|
|
||||||
add_specs suite_builder =
|
|
||||||
languages = Vector.build builder->
|
|
||||||
builder . append ["Enso", _-> Map.empty, Nothing]
|
|
||||||
builder . append ["Java", _-> JavaMap.of, Nothing]
|
|
||||||
builder . append ["JavaScript", _-> js_empty_dict, Nothing]
|
|
||||||
builder . append ["Python", _-> py_empty_dict, pending_python_missing]
|
|
||||||
languages.each entry->
|
|
||||||
lang = entry.get 0
|
|
||||||
empty_map_fn = entry.get 1
|
|
||||||
pending = entry.get 2
|
|
||||||
add_common_specs suite_builder lang pending empty_map_fn
|
|
||||||
|
|
||||||
suite_builder.group "Enso maps" group_builder->
|
|
||||||
|
|
||||||
|
|
||||||
group_builder.specify "should use proper hash code for keys" <|
|
|
||||||
single_key_map key = Map.singleton key 42
|
|
||||||
grand_parent_1 = GrandParent.Value (Parent.Value (Child.Value 2))
|
|
||||||
grand_parent_2 = GrandParent.Value (Parent.Value (Child.Value 2.0))
|
|
||||||
|
|
||||||
(single_key_map 2 . at 2.0) . should_equal 42
|
|
||||||
(single_key_map -2 . at -2.0) . should_equal 42
|
|
||||||
(single_key_map 'ś' . at 's\u0301') . should_equal 42
|
|
||||||
(single_key_map 's\u0301' . at 'ś') . should_equal 42
|
|
||||||
(single_key_map 'éabc' . at 'e\u0301abc') . should_equal 42
|
|
||||||
(single_key_map 'e\u0301abc' . at 'éabc') . should_equal 42
|
|
||||||
(single_key_map grand_parent_1 . at grand_parent_2) . should_equal 42
|
|
||||||
(single_key_map (Json.parse '{"a": 1}') . at (Json.parse '{"a": 1}')) . should_equal 42
|
|
||||||
(single_key_map (Child.Value 1) . at (Child.Value 1.0)) . should_equal 42
|
|
||||||
|
|
||||||
|
|
||||||
group_builder.specify "should support another hash map with NaN keys as key" <|
|
|
||||||
Map.singleton (Map.singleton Number.nan 1) 42 . size . should_equal 1
|
|
||||||
Map.singleton (Map.singleton Number.nan 1) 42 . keys . at 0 . keys . to_text . should_equal "[NaN]"
|
|
||||||
Map.singleton (Map.singleton Number.nan 1) 42 . keys . at 0 . get Number.nan . should_equal 1
|
|
||||||
Map.singleton (Map.singleton Number.nan 1) 42 . at (Map.singleton Number.nan 1) . should_equal 42
|
|
||||||
|
|
||||||
group_builder.specify "should support atoms with custom comparators that violate reflexivity as keys" <|
|
|
||||||
k = My_Nan.Value "foo"
|
|
||||||
k2 = My_Nan.Value "foo"
|
|
||||||
(k==k).should_be_true
|
|
||||||
(k==k2).should_be_false
|
|
||||||
Meta.is_same_object k k2 . should_be_false
|
|
||||||
Meta.is_same_object k k . should_be_true
|
|
||||||
m = Map.empty.insert k 10
|
|
||||||
m.contains_key k . should_be_true
|
|
||||||
m.get k . should_equal 10
|
|
||||||
m.contains_key k2 . should_be_false
|
|
||||||
|
|
||||||
m2 = m.insert k2 20
|
|
||||||
m2.get k . should_equal 10
|
|
||||||
m2.get k2 . should_equal 20
|
|
||||||
m2.size . should_equal 2
|
|
||||||
|
|
||||||
m3 = m2.insert k 30
|
|
||||||
m3.size . should_equal 2
|
|
||||||
m3.get k . should_equal 30
|
|
||||||
|
|
||||||
group_builder.specify "should support atom with custom comparators with complicated hash method" <|
|
|
||||||
keys = 0.up_to 500 . map ix->
|
|
||||||
value = ["A", "B", "C", "D", "E"].at (ix % 5)
|
|
||||||
hash_code = Comparable.from value . hash value
|
|
||||||
My_Key.Value hash_code value ix
|
|
||||||
distinct_keys = keys.fold Map.empty acc_map->
|
|
||||||
item->
|
|
||||||
acc_map.insert item True
|
|
||||||
distinct_keys.size . should_equal 5
|
|
||||||
distinct_key_values = keys.map (_.value) . fold Map.empty acc_map->
|
|
||||||
item->
|
|
||||||
acc_map.insert item True
|
|
||||||
distinct_key_values.size . should_equal 5
|
|
||||||
|
|
||||||
group_builder.specify "should not drop warnings from keys" <|
|
|
||||||
key = Warning.attach "my_warn" "my_key"
|
|
||||||
map = Map.singleton key 42
|
|
||||||
(Warning.get_all (map.keys.at 0)).length . should_equal 1
|
|
||||||
|
|
||||||
group_builder.specify "should not drop warnings from values" <|
|
|
||||||
val = Warning.attach "my_warn" "my_val"
|
|
||||||
map = Map.singleton 42 val
|
|
||||||
(Warning.get_all (map.values.at 0)).length . should_equal 1
|
|
||||||
|
|
||||||
group_builder.specify "should convert the whole map to a vector" <|
|
|
||||||
m = Map.empty . insert 0 0 . insert 3 -5 . insert 1 2
|
|
||||||
m.to_vector.sort on=_.first . should_equal [[0, 0], [1, 2], [3, -5]]
|
|
||||||
|
|
||||||
group_builder.specify "should allow building the map from two vectors" <|
|
|
||||||
expected = Map.empty . insert 0 0 . insert 3 -5 . insert 1 2
|
|
||||||
Map.from_keys_and_values [0, 3, 1] [0, -5, 2] . should_equal expected
|
|
||||||
|
|
||||||
group_builder.specify "should allow building the map from vector like things" <|
|
|
||||||
expected = Map.empty . insert 0 0 . insert 1 -5 . insert 2 2
|
|
||||||
Map.from_keys_and_values (0.up_to 3) [0, -5, 2] . should_equal expected
|
|
||||||
|
|
||||||
group_builder.specify "should not allow building with duplicate keys unless explicitly allowed" <|
|
|
||||||
expected = Map.empty . insert 0 0 . insert 3 -5 . insert 1 2
|
|
||||||
Map.from_keys_and_values [0, 3, 1, 0] [3, -5, 2, 0] . should_fail_with Illegal_Argument
|
|
||||||
Map.from_keys_and_values [0, 3, 1, 0] [3, -5, 2, 0] error_on_duplicates=False . should_equal expected
|
|
||||||
|
|
||||||
group_builder.specify "should not allow different length vectors when building" <|
|
|
||||||
Map.from_keys_and_values [0, 3, 1] [3, -5, 2, 0] . should_fail_with Illegal_Argument
|
|
||||||
|
|
||||||
group_builder.specify "should allow building the map from a vector" <|
|
|
||||||
expected = Map.empty . insert 0 0 . insert 3 -5 . insert 1 2
|
|
||||||
vec = [[0, 0], [3, -5], [1, 2]]
|
|
||||||
Map.from_vector vec . should_equal expected
|
|
||||||
|
|
||||||
group_builder.specify "should fail when building the map from wrong vector" <|
|
|
||||||
Map.from_vector [["A", 1, "B", 2]] . should_fail_with Illegal_Argument
|
|
||||||
|
|
||||||
group_builder.specify "should not allow duplicates when building the map from a vector, unless explicitly allowed" <|
|
|
||||||
vec = [[0, 0], [3, -5], [1, 2], [0, 1]]
|
|
||||||
m1 = Map.from_vector vec
|
|
||||||
m1.should_fail_with Illegal_Argument
|
|
||||||
m1.catch.message . should_equal "`Map.from_vector` encountered duplicate key: 0"
|
|
||||||
|
|
||||||
m2 = Map.from_vector vec error_on_duplicates=False
|
|
||||||
Problems.assume_no_problems m2
|
|
||||||
m2.get 0 . should_equal 1
|
|
||||||
m2.get 3 . should_equal -5
|
|
||||||
|
|
||||||
group_builder.specify "should disallow duplicate keys when transforming the map" <|
|
|
||||||
m = Map.from_vector [[1, 2], [11, 3]]
|
|
||||||
m2 = m.transform (k -> v -> [k % 10, v*2])
|
|
||||||
m2.should_fail_with Illegal_Argument
|
|
||||||
m2.catch.message . should_equal "`Map.transform` encountered duplicate key: 1"
|
|
||||||
|
|
||||||
group_builder.specify "should allow mapping over values" <|
|
|
||||||
m = Map.empty . insert 1 2 . insert 2 4
|
|
||||||
expected = Map.empty . insert 1 4 . insert 2 8
|
|
||||||
m.map (v -> v*2) . should_equal expected
|
|
||||||
|
|
||||||
group_builder.specify "should allow mapping over keys" <|
|
|
||||||
m = Map.empty . insert 1 2 . insert 2 4
|
|
||||||
expected = Map.empty . insert 2 2 . insert 4 4
|
|
||||||
m.map_keys (k -> k*2) . should_equal expected
|
|
||||||
|
|
||||||
group_builder.specify "should allow mapping with keys" <|
|
|
||||||
m = Map.empty . insert 1 2 . insert 2 4
|
|
||||||
expected = Map.empty . insert 1 3 . insert 2 6
|
|
||||||
m.map_with_key (k -> v -> k + v) . should_equal expected
|
|
||||||
|
|
||||||
group_builder.specify "should allow iterating over each value" <|
|
|
||||||
m = Map.empty . insert 1 2 . insert 2 4
|
|
||||||
expected_vec = [2, 4]
|
|
||||||
vec = Vector.build builder->
|
|
||||||
m.each (v -> builder.append v)
|
|
||||||
vec . should_equal expected_vec
|
|
||||||
|
|
||||||
group_builder.specify "should allow iterating over each key-value pair" <|
|
|
||||||
m = Map.empty . insert 1 2 . insert 2 4
|
|
||||||
expected_vec = [3, 6]
|
|
||||||
vec = Vector.build builder->
|
|
||||||
m.each_with_key (k -> v -> builder.append (k+v))
|
|
||||||
vec . should_equal expected_vec
|
|
||||||
|
|
||||||
group_builder.specify "should allow folding over the values" <|
|
|
||||||
m = Map.empty . insert 1 2 . insert 2 4
|
|
||||||
m.fold 0 (+) . should_equal 6
|
|
||||||
|
|
||||||
group_builder.specify "should allow folding over the key-value pairs" <|
|
|
||||||
m = Map.empty . insert 1 2 . insert 2 4
|
|
||||||
m.fold_with_key 0 (l -> k -> v -> l + k + v) . should_equal 9
|
|
||||||
|
|
||||||
group_builder.specify "should be able to add a Nothing key to the map of Text" <|
|
|
||||||
m = Map.empty . insert "A" 2 . insert Nothing 1 . insert "B" 3
|
|
||||||
m.at "A" . should_equal 2
|
|
||||||
m.at "B" . should_equal 3
|
|
||||||
m.at Nothing . should_equal 1
|
|
||||||
|
|
||||||
group_builder.specify "should be able to add a Nothing key to the map of Integer" <|
|
|
||||||
m = Map.empty . insert 100 2 . insert Nothing 1 . insert 200 3
|
|
||||||
m.at 100 . should_equal 2
|
|
||||||
m.at 200 . should_equal 3
|
|
||||||
m.at Nothing . should_equal 1
|
|
||||||
|
|
||||||
suite_builder.group "Polyglot keys and values" group_builder->
|
|
||||||
group_builder.specify "should support polyglot keys" <|
|
|
||||||
map = Map.singleton (js_str "A") 42
|
|
||||||
map.size.should_equal 1
|
|
||||||
map.get "A" . should_equal 42
|
|
||||||
map.get (js_str "A") . should_equal 42
|
|
||||||
|
|
||||||
group_builder.specify "should support host objects as keys" <|
|
|
||||||
# java.nio.path.Path has proper implementation of hashCode
|
|
||||||
map = Map.singleton (File_Utils.toPath "/home/user/file.txt") 42
|
|
||||||
map.get "X" . should_equal Nothing
|
|
||||||
map.get "A" . should_equal Nothing
|
|
||||||
map.get (File_Utils.toPath "/home/user/file.txt") . should_equal 42
|
|
||||||
|
|
||||||
group_builder.specify "should support Python objects as keys" pending=pending_python_missing <|
|
|
||||||
py_obj = py_wrapper 42
|
|
||||||
map = Map.singleton py_obj "Value"
|
|
||||||
map.get py_obj . should_equal "Value"
|
|
||||||
|
|
||||||
group_builder.specify "should support Python objects as values" pending=pending_python_missing <|
|
|
||||||
map = Map.singleton "A" (py_wrapper 42)
|
|
||||||
map.get "A" . data . should_equal 42
|
|
||||||
|
|
||||||
group_builder.specify "should insert entries to a polyglot map" pending=pending_python_missing <|
|
|
||||||
dict = py_dict_from_vec ["A", 1, "B", 2]
|
|
||||||
dict.insert "C" 3 . keys . sort . should_equal ["A", "B", "C"]
|
|
||||||
|
|
||||||
group_builder.specify "should remove entries from a polyglot map" pending=pending_python_missing <|
|
|
||||||
dict = py_dict_from_vec ["A", 1, "B", 2]
|
|
||||||
dict.remove "B" . to_vector . should_equal [["A", 1]]
|
|
||||||
|
|
||||||
suite_builder.group "non-linear inserts" group_builder->
|
|
||||||
group_builder.specify "should handle inserts with different keys" <|
|
|
||||||
m1 = Map.singleton "A" 1
|
|
||||||
m2 = m1.insert "B" 2
|
|
||||||
m3 = m1.insert "C" 3
|
|
||||||
m2.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2]]
|
|
||||||
m3.to_vector.sort on=_.first . should_equal [["A", 1], ["C", 3]]
|
|
||||||
|
|
||||||
group_builder.specify "should handle inserts with same keys (1)" <|
|
|
||||||
m1 = Map.singleton "A" 1
|
|
||||||
m2 = m1.insert "A" 2
|
|
||||||
m3 = m1.insert "A" 3
|
|
||||||
m4 = m1.insert "B" 4
|
|
||||||
m2.to_vector.sort on=_.first . should_equal [["A", 2]]
|
|
||||||
m3.to_vector.sort on=_.first . should_equal [["A", 3]]
|
|
||||||
m4.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 4]]
|
|
||||||
|
|
||||||
group_builder.specify "should handle inserts with same keys (2)" <|
|
|
||||||
m1 = Map.singleton "foo" 1
|
|
||||||
m2 = m1.insert "baz" 2
|
|
||||||
m3 = m2.insert "foo" 3
|
|
||||||
m1.to_vector.sort on=_.first . should_equal [['foo', 1]]
|
|
||||||
m2.to_vector.sort on=_.first . should_equal [['baz', 2], ['foo', 1]]
|
|
||||||
m3.to_vector.sort on=_.first . should_equal [['baz', 2], ['foo', 3]]
|
|
||||||
|
|
||||||
group_builder.specify "should handle inserts with same keys (3)" <|
|
|
||||||
m1 = Map.singleton "A" 1
|
|
||||||
m2 = m1.insert "B" 2
|
|
||||||
m3 = m2.insert "A" 3
|
|
||||||
m4 = m2.insert "C" 4
|
|
||||||
m1.to_vector.sort on=_.first . should_equal [["A", 1]]
|
|
||||||
m2.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2]]
|
|
||||||
m3.to_vector.sort on=_.first . should_equal [["A", 3], ["B", 2]]
|
|
||||||
m4.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2], ["C", 4]]
|
|
||||||
|
|
||||||
group_builder.specify "should handle inserts with same keys (4)" <|
|
|
||||||
m1 = Map.singleton "A" 1
|
|
||||||
m2 = m1.insert "B" 2
|
|
||||||
m3 = m2.insert "C" 3
|
|
||||||
m4 = m2.insert "D" 4
|
|
||||||
m2.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2]]
|
|
||||||
m3.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2], ["C", 3]]
|
|
||||||
m4.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2], ["D", 4]]
|
|
||||||
|
|
||||||
group_builder.specify "should handle inserts with same keys (5)" <|
|
|
||||||
m1 = Map.singleton "A" 1
|
|
||||||
m2 = m1.insert "B" 2
|
|
||||||
m3 = m2.insert "A" 3
|
|
||||||
m4 = m2.insert "A" 4
|
|
||||||
m2.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2]]
|
|
||||||
m3.to_vector.sort on=_.first . should_equal [["A", 3], ["B", 2]]
|
|
||||||
m4.to_vector.sort on=_.first . should_equal [["A", 4], ["B", 2]]
|
|
||||||
|
|
||||||
group_builder.specify "should handle inserts with same keys (6)" <|
|
|
||||||
m1 = Map.singleton "A" 1
|
|
||||||
m2 = m1.insert "B" 2
|
|
||||||
m3 = m2.insert "C" 3
|
|
||||||
m4 = m2.insert "A" 4
|
|
||||||
m2.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2]]
|
|
||||||
m3.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2], ["C", 3]]
|
|
||||||
m4.to_vector.sort on=_.first . should_equal [["A", 4], ["B", 2]]
|
|
||||||
|
|
||||||
group_builder.specify "should handle inserts with same keys (7)" <|
|
|
||||||
m1 = Map.singleton "A" 1
|
|
||||||
m2 = m1.insert "B" 2
|
|
||||||
m3 = m2.insert "C" 3
|
|
||||||
m4 = m3.insert "D" 4
|
|
||||||
m5 = m2.insert "A" 5
|
|
||||||
m2.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2]]
|
|
||||||
m3.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2], ["C", 3]]
|
|
||||||
m4.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2], ["C", 3], ["D", 4]]
|
|
||||||
m5.to_vector.sort on=_.first . should_equal [["A", 5], ["B", 2]]
|
|
||||||
|
|
||||||
group_builder.specify "should handle inserts with same keys (8)" <|
|
|
||||||
m1 = Map.singleton "A" 1
|
|
||||||
m2 = m1.insert "B" 2
|
|
||||||
m3 = m2.insert "C" 3
|
|
||||||
m4 = m3.insert "A" 4
|
|
||||||
m5 = m2.insert "A" 5
|
|
||||||
m2.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2]]
|
|
||||||
m3.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2], ["C", 3]]
|
|
||||||
m4.to_vector.sort on=_.first . should_equal [["A", 4], ["B", 2], ["C", 3]]
|
|
||||||
m5.to_vector.sort on=_.first . should_equal [["A", 5], ["B", 2]]
|
|
||||||
|
|
||||||
group_builder.specify "should handle inserts with same keys (9)" <|
|
|
||||||
m1 = Map.singleton "A" 1
|
|
||||||
m2 = m1.insert "B" 2
|
|
||||||
m3 = m2.insert "A" 3
|
|
||||||
m4 = m2.insert "B" 4
|
|
||||||
m5 = m2.insert "C" 5
|
|
||||||
m2.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2]]
|
|
||||||
m3.to_vector.sort on=_.first . should_equal [["A", 3], ["B", 2]]
|
|
||||||
m4.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 4]]
|
|
||||||
m5.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2], ["C", 5]]
|
|
||||||
|
|
||||||
group_builder.specify "should handle inserts with same keys (10)" <|
|
|
||||||
m1 = Map.singleton "A" 1
|
|
||||||
m2 = m1.insert "B" 2
|
|
||||||
m3 = m2.insert "C" 3
|
|
||||||
m4 = m2.insert "D" 4
|
|
||||||
m5 = m2.insert "E" 5
|
|
||||||
m2.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2]]
|
|
||||||
m3.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2], ["C", 3]]
|
|
||||||
m4.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2], ["D", 4]]
|
|
||||||
m5.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2], ["E", 5]]
|
|
||||||
|
|
||||||
suite_builder.group "Polyglot hash maps" group_builder->
|
|
||||||
group_builder.specify "should pass maps as immutable maps to other langs" pending=pending_python_missing <|
|
|
||||||
map = Map.singleton "A" 1
|
|
||||||
# Python's KeyError should be raised
|
|
||||||
Test.expect_panic_with (py_update_dict map "A" 2) Any
|
|
||||||
map.get "A" . should_equal 1
|
|
||||||
|
|
||||||
group_builder.specify "should treat JavaScript maps as Enso maps" <|
|
|
||||||
js_dict = js_dict_from_vec ["A", 1, "B", 2]
|
|
||||||
map = js_dict.insert "C" 3
|
|
||||||
js_dict.to_vector.should_equal [["A", 1], ["B", 2]]
|
|
||||||
map.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2], ["C", 3]]
|
|
||||||
|
|
||||||
group_builder.specify "should treat Java Map as Enso map" <|
|
|
||||||
sort_by_keys vec = vec.sort by=x-> y-> Ordering.compare x.first y.first
|
|
||||||
jmap = JavaMap.of "A" 1 "B" 2
|
|
||||||
(sort_by_keys jmap.to_vector) . should_equal [["A", 1], ["B", 2]]
|
|
||||||
(sort_by_keys (jmap.insert "C" 3 . to_vector)) . should_equal [["A", 1], ["B", 2], ["C", 3]]
|
|
||||||
|
|
||||||
group_builder.specify "should treat Python dicts as Enso maps" pending=pending_python_missing <|
|
|
||||||
py_dict = py_dict_from_vec ["A", 1, "B", 2]
|
|
||||||
map = py_dict.insert "C" 3
|
|
||||||
py_dict.not_empty . should_be_true
|
|
||||||
py_dict.to_vector . should_contain_the_same_elements_as [["A", 1], ["B", 2]]
|
|
||||||
map.to_vector . should_contain_the_same_elements_as [["A", 1], ["B", 2], ["C", 3]]
|
|
||||||
py_empty_dict.is_empty.should_be_true
|
|
||||||
py_empty_dict.insert "A" 1 . insert "A" 2 . get "A" . should_equal 2
|
|
||||||
|
|
||||||
group_builder.specify "should be able to remove entries" pending=pending_python_missing <|
|
|
||||||
py_dict_from_vec ["A", 1, "B", 2] . remove "A" . size . should_equal 1
|
|
||||||
py_dict_from_vec ["A", 1, "B", 2] . remove "A" . get "B" . should_equal 2
|
|
||||||
|
|
||||||
group_builder.specify "should be able to remove NaN keys" pending=pending_python_missing <|
|
|
||||||
py_dict_from_vec [Number.nan, 1] . remove Number.nan . size . should_equal 0
|
|
||||||
|
|
||||||
group_builder.specify "should pass maps with null keys to Python and back" pending=pending_python_missing <|
|
|
||||||
# Python supports None as keys, Enso support Nothing as keys
|
|
||||||
py_dict = py_dict_from_map (Map.singleton Nothing 42)
|
|
||||||
py_dict.get Nothing . should_equal 42
|
|
||||||
py_dict.insert "A" 23 . get Nothing . should_equal 42
|
|
||||||
py_dict.insert Nothing 23 . get Nothing . should_equal 23
|
|
||||||
|
|
||||||
group_builder.specify "should treat Enso maps as Python dicts when passed to Python" pending=pending_python_missing <|
|
|
||||||
map1 = Map.empty.insert "A" 1 . insert "B" 2
|
|
||||||
py_vec_from_map map1 . should_contain_the_same_elements_as [["A", 1], ["B", 2]]
|
|
||||||
map2 = Map.empty.insert "A" 1 . insert Nothing 2
|
|
||||||
py_vec_from_map map2 . should_contain_the_same_elements_as [["A", 1], [Nothing, 2]]
|
|
||||||
|
|
||||||
|
|
||||||
add_common_specs suite_builder prefix:Text (pending : (Text | Nothing)) (empty_map_fn : (Nothing -> Map)) =
|
|
||||||
# Not on a single line - empty_map is a method, not a variable
|
|
||||||
empty_map =
|
|
||||||
empty_map_fn Nothing
|
|
||||||
|
|
||||||
suite_builder.group prefix+": Common polyglot Map operations" pending=pending group_builder->
|
|
||||||
group_builder.specify "should get the default comparator for polyglot maps" <|
|
|
||||||
Comparable.from empty_map . should_equal Default_Comparator
|
|
||||||
|
|
||||||
group_builder.specify "should compare two hash maps" <|
|
|
||||||
(empty_map.insert "a" 1).should_equal (empty_map.insert "a" 1)
|
|
||||||
(empty_map.insert "b" 2).should_not_equal (empty_map.insert "a" 1)
|
|
||||||
empty_map.should_equal empty_map
|
|
||||||
empty_map.should_not_equal (empty_map.insert "a" 1)
|
|
||||||
(empty_map.insert "a" 1 . insert "b" 2).should_equal (empty_map.insert "b" 2 . insert "a" 1)
|
|
||||||
|
|
||||||
group_builder.specify "should allow checking for non emptiness" <|
|
|
||||||
non_empty = empty_map . insert "foo" 1234
|
|
||||||
empty_map.not_empty . should_be_false
|
|
||||||
non_empty.not_empty . should_be_true
|
|
||||||
|
|
||||||
group_builder.specify "should allow checking its size" <|
|
|
||||||
non_empty = empty_map.insert "a" "b" . insert "x" "y"
|
|
||||||
empty_map.size . should_equal 0
|
|
||||||
non_empty.size . should_equal 2
|
|
||||||
|
|
||||||
group_builder.specify "should allow checking for emptiness" <|
|
|
||||||
non_empty = empty_map . insert "foo" 1234
|
|
||||||
empty_map.is_empty . should_be_true
|
|
||||||
non_empty.is_empty . should_be_false
|
|
||||||
|
|
||||||
group_builder.specify "should handle incomparable values as keys" <|
|
|
||||||
empty_map.insert Number.nan 1 . insert Number.nan 2 . get Number.nan . should_equal 2
|
|
||||||
|
|
||||||
group_builder.specify "should handle Nothing as values" <|
|
|
||||||
empty_map.insert 1 Nothing . at 1 . should_equal Nothing
|
|
||||||
empty_map.insert Nothing Nothing . at Nothing . should_equal Nothing
|
|
||||||
|
|
||||||
group_builder.specify "should support rewriting values with same keys" <|
|
|
||||||
map = Map.empty.insert "a" 1 . insert "a" 42
|
|
||||||
map.size.should_equal 1
|
|
||||||
map.get "a" . should_equal 42
|
|
||||||
|
|
||||||
group_builder.specify "should allow storing atoms as values" <|
|
|
||||||
json = Json.parse '{"a": 1}'
|
|
||||||
pair = Pair.new "first" "second"
|
|
||||||
map = Map.empty.insert 0 json . insert 1 pair
|
|
||||||
map.get 0 . should_equal json
|
|
||||||
map.get 1 . should_equal pair
|
|
||||||
|
|
||||||
group_builder.specify "should support NaN as keys" <|
|
|
||||||
empty_map.insert Number.nan 1 . contains_key Number.nan . should_be_true
|
|
||||||
empty_map.insert Number.nan 1 . values . should_equal [1]
|
|
||||||
empty_map.insert Number.nan 1 . insert Number.nan 2 . contains_key Number.nan . should_be_true
|
|
||||||
empty_map.insert Number.nan 1 . insert Number.nan 2 . values . should_equal [2]
|
|
||||||
empty_map.insert Number.nan 1 . insert "key" 2 . insert Number.nan 3 . contains_key Number.nan . should_be_true
|
|
||||||
empty_map.insert Number.nan 1 . insert "key" 2 . insert Number.nan 3 . contains_key "key" . should_be_true
|
|
||||||
empty_map.insert Number.nan 1 . insert "key" 2 . insert Number.nan 3 . at Number.nan . should_equal 3
|
|
||||||
empty_map.insert Number.nan 1 . insert "key" 2 . insert Number.nan 3 . at "key" . should_equal 2
|
|
||||||
empty_map.insert Number.nan 1 . insert Number.nan Number.nan . at Number.nan . to_text . should_equal "NaN"
|
|
||||||
empty_map.insert Number.nan 1 . insert Number.nan Number.nan . remove Number.nan . size . should_equal 0
|
|
||||||
|
|
||||||
group_builder.specify "should support arbitrary atoms as keys" <|
|
|
||||||
map = empty_map . insert (Pair.new "one" "two") 42
|
|
||||||
(map.get (Pair.new "one" "two")).should_equal 42
|
|
||||||
(map.get (Pair.new "A" "B")).should_equal Nothing
|
|
||||||
(map.get (Pair.new "two" "two")).should_equal Nothing
|
|
||||||
|
|
||||||
group_builder.specify "should support vectors as keys" <|
|
|
||||||
map = empty_map . insert [1, "a", 2] "Value"
|
|
||||||
map.size.should_equal 1
|
|
||||||
map.get [1, "a", 2] . should_equal "Value"
|
|
||||||
|
|
||||||
group_builder.specify "should support dates as keys" <|
|
|
||||||
map = empty_map.insert (Date.new 1993) 1 . insert (Date.new 1993 2 5) 2 . insert (Date_Time.new 1993 2 5 13 45) 3
|
|
||||||
map.size.should_equal 3
|
|
||||||
map.get (Date.new 1993 6 7) . should_equal Nothing
|
|
||||||
map.get (Date.new 1993) . should_equal 1
|
|
||||||
map.get (Date_Time.new 1993) . should_equal Nothing
|
|
||||||
map.get (Date.new 1993 2 5) . should_equal 2
|
|
||||||
map.get (Date_Time.new 1993 2 5) . should_equal Nothing
|
|
||||||
map.get (Date_Time.new 1993 2 5 13 45) . should_equal 3
|
|
||||||
|
|
||||||
group_builder.specify "should support another hash map as key" <|
|
|
||||||
key_map = empty_map.insert (Pair.new "one" "two") 42
|
|
||||||
map = empty_map.insert key_map 23
|
|
||||||
map.size.should_equal 1
|
|
||||||
(map.get "A").should_equal Nothing
|
|
||||||
(map.get key_map).should_equal 23
|
|
||||||
(map.get map).should_equal Nothing
|
|
||||||
|
|
||||||
group_builder.specify "should handle keys with standard equality semantics" <|
|
|
||||||
map = empty_map.insert 2 "Hello"
|
|
||||||
(map.get 2).should_equal "Hello"
|
|
||||||
(map.get 2.0).should_equal "Hello"
|
|
||||||
(empty_map.insert 2 "Hello").should_equal (empty_map.insert 2.0 "Hello")
|
|
||||||
|
|
||||||
group_builder.specify "should handle Nothing as keys" <|
|
|
||||||
empty_map.insert Nothing 3 . get Nothing . should_equal 3
|
|
||||||
empty_map.insert Nothing 1 . insert Nothing 2 . get Nothing . should_equal 2
|
|
||||||
empty_map.insert Nothing 1 . should_equal (empty_map.insert Nothing 1)
|
|
||||||
empty_map.insert Nothing 1 . insert Nothing 2 . at Nothing . should_equal 2
|
|
||||||
|
|
||||||
group_builder.specify "should handle JavaScript null as keys" <|
|
|
||||||
empty_map.insert js_null 1 . at Nothing . should_equal 1
|
|
||||||
|
|
||||||
group_builder.specify "should handle Python None as keys" pending=pending_python_missing <|
|
|
||||||
empty_map.insert py_none 1 . at Nothing . should_equal 1
|
|
||||||
|
|
||||||
group_builder.specify "should define a well-defined text conversion" <|
|
|
||||||
m = empty_map . insert 0 0 . insert 3 -5 . insert 1 2
|
|
||||||
m.to_text . should_contain "0=0"
|
|
||||||
m.to_text . should_contain "3=-5"
|
|
||||||
m.to_text . should_contain "1=2"
|
|
||||||
|
|
||||||
group_builder.specify "should define structural equality" <|
|
|
||||||
map_1 = empty_map . insert "1" 2 . insert "2" "1"
|
|
||||||
map_2 = empty_map . insert "1" 2 . insert "2" "1"
|
|
||||||
map_3 = empty_map
|
|
||||||
map_1==map_2 . should_be_true
|
|
||||||
map_1==map_3 . should_be_false
|
|
||||||
map_2==map_3 . should_be_false
|
|
||||||
|
|
||||||
group_builder.specify "should allow inserting and looking up values" <|
|
|
||||||
m = empty_map . insert "foo" 134 . insert "bar" 654 . insert "baz" "spam"
|
|
||||||
m.at "foo" . should_equal 134
|
|
||||||
m.at "bar" . should_equal 654
|
|
||||||
m.at "baz" . should_equal "spam"
|
|
||||||
(m.at "nope").should_fail_with No_Such_Key
|
|
||||||
|
|
||||||
group_builder.specify "should support get" <|
|
|
||||||
m = empty_map . insert 2 3
|
|
||||||
m.get 2 0 . should_equal 3
|
|
||||||
m.get 1 10 . should_equal 10
|
|
||||||
m.get 2 (Panic.throw "missing") . should_equal 3
|
|
||||||
|
|
||||||
group_builder.specify "should allow getting a vector of the keys" <|
|
|
||||||
m = empty_map . insert 1 2 . insert 2 4
|
|
||||||
m.keys . should_equal [1, 2]
|
|
||||||
|
|
||||||
group_builder.specify "should allow getting a vector of the values" <|
|
|
||||||
m = empty_map . insert 1 2 . insert 2 4
|
|
||||||
m.values . should_equal [2, 4]
|
|
||||||
|
|
||||||
group_builder.specify "should support contains_key" <|
|
|
||||||
m = empty_map . insert 2 3
|
|
||||||
m.contains_key 2 . should_be_true
|
|
||||||
m.contains_key 1 . should_be_false
|
|
||||||
|
|
||||||
group_builder.specify "should allow transforming the map" <|
|
|
||||||
m = empty_map . insert 1 2 . insert 2 4
|
|
||||||
expected = empty_map . insert "1" 4 . insert "2" 8
|
|
||||||
m.transform (k -> v -> [k.to_text, v*2]) . should_equal expected
|
|
||||||
|
|
||||||
group_builder.specify "should be able to remove entries (1)" <|
|
|
||||||
m1 = empty_map.insert "A" 1 . insert "B" 2
|
|
||||||
m2 = m1.remove "B"
|
|
||||||
m2.get "A" . should_equal 1
|
|
||||||
m2.remove "A" . should_equal empty_map
|
|
||||||
m1.remove "foo" . should_fail_with No_Such_Key
|
|
||||||
|
|
||||||
group_builder.specify "should be able to remove entries (2)" <|
|
|
||||||
m1 = empty_map.insert "A" 1
|
|
||||||
m2 = m1.insert "B" 2
|
|
||||||
m3 = m1.insert "C" 3
|
|
||||||
m2.remove "A" . to_vector . should_equal [["B", 2]]
|
|
||||||
m2.remove "B" . to_vector . should_equal [["A", 1]]
|
|
||||||
m3.remove "A" . to_vector . should_equal [["C", 3]]
|
|
||||||
m3.remove "C" . to_vector . should_equal [["A", 1]]
|
|
||||||
|
|
||||||
group_builder.specify "should be able to remove entries (3)" <|
|
|
||||||
m = empty_map.insert "A" 1 . insert "B" 2 . insert "C" 3
|
|
||||||
m.remove "B" . should_equal (empty_map.insert "A" 1 . insert "C" 3)
|
|
||||||
|
|
||||||
|
|
||||||
main filter=Nothing =
|
|
||||||
suite = Test.build suite_builder->
|
|
||||||
add_specs suite_builder
|
|
||||||
suite.run_with_filter filter
|
|
@ -393,7 +393,7 @@ add_specs suite_builder =
|
|||||||
|
|
||||||
group_builder.specify "should provide access to info about group names" <|
|
group_builder.specify "should provide access to info about group names" <|
|
||||||
data.pattern.named_groups.sort . should_equal ["empty", "letters"]
|
data.pattern.named_groups.sort . should_equal ["empty", "letters"]
|
||||||
data.pattern.group_nums_to_names . should_equal <| Map.from_vector [[2, "letters"],[4, "empty"]]
|
data.pattern.group_nums_to_names . should_equal <| Dictionary.from_vector [[2, "letters"],[4, "empty"]]
|
||||||
|
|
||||||
group_builder.specify "should return the results of all named groups" <|
|
group_builder.specify "should return the results of all named groups" <|
|
||||||
groups = data.match.named_groups
|
groups = data.match.named_groups
|
||||||
|
@ -133,8 +133,8 @@ add_specs suite_builder =
|
|||||||
data.root.at 3 . attribute "does_not_exist" if_missing="if_missing" . should_equal "if_missing"
|
data.root.at 3 . attribute "does_not_exist" if_missing="if_missing" . should_equal "if_missing"
|
||||||
|
|
||||||
group_builder.specify "Can get element an attribute map" <|
|
group_builder.specify "Can get element an attribute map" <|
|
||||||
data.root.at 2 . attributes . should_equal (Map.from_vector [["studentId", "1000"], ["year", "2"]])
|
data.root.at 2 . attributes . should_equal (Dictionary.from_vector [["studentId", "1000"], ["year", "2"]])
|
||||||
data.root.at 3 . attributes . should_equal (Map.from_vector [["studentId", "1001"], ["year", "3"]])
|
data.root.at 3 . attributes . should_equal (Dictionary.from_vector [["studentId", "1001"], ["year", "3"]])
|
||||||
|
|
||||||
group_builder.specify "Can get nodes via xpath" <|
|
group_builder.specify "Can get nodes via xpath" <|
|
||||||
classes = data.root.get_xpath "/class"
|
classes = data.root.get_xpath "/class"
|
||||||
|
@ -30,12 +30,13 @@ import project.Data.Array_Proxy_Spec
|
|||||||
import project.Data.Bool_Spec
|
import project.Data.Bool_Spec
|
||||||
import project.Data.Base_64_Spec
|
import project.Data.Base_64_Spec
|
||||||
import project.Data.Decimal_Spec
|
import project.Data.Decimal_Spec
|
||||||
|
import project.Data.Dictionary_Spec
|
||||||
import project.Data.Function_Spec
|
import project.Data.Function_Spec
|
||||||
|
import project.Data.Hashset_Spec
|
||||||
import project.Data.Interval_Spec
|
import project.Data.Interval_Spec
|
||||||
import project.Data.Json_Spec
|
import project.Data.Json_Spec
|
||||||
import project.Data.List_Spec
|
import project.Data.List_Spec
|
||||||
import project.Data.Locale_Spec
|
import project.Data.Locale_Spec
|
||||||
import project.Data.Map_Spec
|
|
||||||
import project.Data.Maybe_Spec
|
import project.Data.Maybe_Spec
|
||||||
import project.Data.Numbers_Spec
|
import project.Data.Numbers_Spec
|
||||||
import project.Data.Ordering_Spec
|
import project.Data.Ordering_Spec
|
||||||
@ -47,7 +48,6 @@ import project.Data.Polyglot_Spec
|
|||||||
import project.Data.Problems_Spec
|
import project.Data.Problems_Spec
|
||||||
import project.Data.Range_Spec
|
import project.Data.Range_Spec
|
||||||
import project.Data.Regression_Spec
|
import project.Data.Regression_Spec
|
||||||
import project.Data.Set_Spec
|
|
||||||
import project.Data.Statistics_Spec
|
import project.Data.Statistics_Spec
|
||||||
import project.Data.Time.Spec as Time_Spec
|
import project.Data.Time.Spec as Time_Spec
|
||||||
import project.Data.Vector_Spec
|
import project.Data.Vector_Spec
|
||||||
@ -129,8 +129,8 @@ main filter=Nothing =
|
|||||||
Json_Spec.add_specs suite_builder
|
Json_Spec.add_specs suite_builder
|
||||||
List_Spec.add_specs suite_builder
|
List_Spec.add_specs suite_builder
|
||||||
Locale_Spec.add_specs suite_builder
|
Locale_Spec.add_specs suite_builder
|
||||||
Map_Spec.add_specs suite_builder
|
Dictionary_Spec.add_specs suite_builder
|
||||||
Set_Spec.add_specs suite_builder
|
Hashset_Spec.add_specs suite_builder
|
||||||
Maybe_Spec.add_specs suite_builder
|
Maybe_Spec.add_specs suite_builder
|
||||||
Meta_Spec.add_specs suite_builder
|
Meta_Spec.add_specs suite_builder
|
||||||
Instrumentor_Spec.add_specs suite_builder
|
Instrumentor_Spec.add_specs suite_builder
|
||||||
|
@ -42,7 +42,7 @@ add_specs suite_builder =
|
|||||||
req.body.should_equal (Request_Body.Json json)
|
req.body.should_equal (Request_Body.Json json)
|
||||||
req.headers.should_equal [Header.application_json]
|
req.headers.should_equal [Header.application_json]
|
||||||
group_builder.specify "should set form body" <|
|
group_builder.specify "should set form body" <|
|
||||||
body_form = Map.from_vector [["key", "val"]]
|
body_form = Dictionary.from_vector [["key", "val"]]
|
||||||
req = Request.get test_uri . with_form body_form
|
req = Request.get test_uri . with_form body_form
|
||||||
req.body.should_equal (Request_Body.Form_Data body_form)
|
req.body.should_equal (Request_Body.Form_Data body_form)
|
||||||
req.headers.should_equal [Header.application_x_www_form_urlencoded]
|
req.headers.should_equal [Header.application_x_www_form_urlencoded]
|
||||||
|
@ -288,14 +288,14 @@ add_specs suite_builder =
|
|||||||
|
|
||||||
group_builder.specify "Can perform a url-encoded form POST" <| Test.with_retries <|
|
group_builder.specify "Can perform a url-encoded form POST" <| Test.with_retries <|
|
||||||
test_file = enso_project.data / "sample.txt"
|
test_file = enso_project.data / "sample.txt"
|
||||||
form_data = Map.from_vector [["key", "val"], ["a_file", test_file]]
|
form_data = Dictionary.from_vector [["key", "val"], ["a_file", test_file]]
|
||||||
response = Data.post url_post (Request_Body.Form_Data form_data url_encoded=True)
|
response = Data.post url_post (Request_Body.Form_Data form_data url_encoded=True)
|
||||||
response.at "headers" . at "Content-Type" . should_equal "application/x-www-form-urlencoded"
|
response.at "headers" . at "Content-Type" . should_equal "application/x-www-form-urlencoded"
|
||||||
response.at "data" . replace "%0D%" "%" . should_equal 'key=val&a_file=Cupcake+ipsum+dolor+sit+amet.+Caramels+tootsie+roll+cake+ice+cream.+Carrot+cake+apple+pie+gingerbread+chocolate+cake+pudding+tart+souffl%C3%A9+jelly+beans+gummies.%0A%0ATootsie+roll+chupa+chups+muffin+croissant+fruitcake+jujubes+danish+cotton+candy+danish.+Oat+cake+chocolate+fruitcake+halvah+icing+oat+cake+toffee+powder.+Pastry+drag%C3%A9e+croissant.+Ice+cream+candy+canes+dessert+muffin+sugar+plum+tart+jujubes.%0A'
|
response.at "data" . replace "%0D%" "%" . should_equal 'key=val&a_file=Cupcake+ipsum+dolor+sit+amet.+Caramels+tootsie+roll+cake+ice+cream.+Carrot+cake+apple+pie+gingerbread+chocolate+cake+pudding+tart+souffl%C3%A9+jelly+beans+gummies.%0A%0ATootsie+roll+chupa+chups+muffin+croissant+fruitcake+jujubes+danish+cotton+candy+danish.+Oat+cake+chocolate+fruitcake+halvah+icing+oat+cake+toffee+powder.+Pastry+drag%C3%A9e+croissant.+Ice+cream+candy+canes+dessert+muffin+sugar+plum+tart+jujubes.%0A'
|
||||||
|
|
||||||
group_builder.specify "Can perform a multipart form POST" <| Test.with_retries <|
|
group_builder.specify "Can perform a multipart form POST" <| Test.with_retries <|
|
||||||
test_file = enso_project.data / "sample.png"
|
test_file = enso_project.data / "sample.png"
|
||||||
form_data = Map.from_vector [["key", "val"], ["a_file", test_file]]
|
form_data = Dictionary.from_vector [["key", "val"], ["a_file", test_file]]
|
||||||
response = Data.post url_post (Request_Body.Form_Data form_data)
|
response = Data.post url_post (Request_Body.Form_Data form_data)
|
||||||
response_json = response
|
response_json = response
|
||||||
response_json.at "headers" . at "Content-Type" . should_start_with "multipart/form-data; boundary="
|
response_json.at "headers" . at "Content-Type" . should_start_with "multipart/form-data; boundary="
|
||||||
|
@ -20,7 +20,7 @@ sum_recur n = if n == 0 then 0 else 1 + sum_recur n-1
|
|||||||
|
|
||||||
build_map size =
|
build_map size =
|
||||||
rand = Java_Random.new
|
rand = Java_Random.new
|
||||||
0.up_to size . fold Map.empty (m -> i -> m.insert (rand.nextInt 10000) i)
|
0.up_to size . fold Dictionary.empty (m -> i -> m.insert (rand.nextInt 10000) i)
|
||||||
|
|
||||||
type Data
|
type Data
|
||||||
Value ~list ~vec ~vec_float
|
Value ~list ~vec ~vec_float
|
||||||
|
@ -44,13 +44,13 @@ collect_benches = Bench.build builder->
|
|||||||
builder.group ("Enso_Hash_Map_" + n.to_text) options group_builder->
|
builder.group ("Enso_Hash_Map_" + n.to_text) options group_builder->
|
||||||
# Scenario similar to what is done in distinct
|
# Scenario similar to what is done in distinct
|
||||||
group_builder.specify "Enso_Incremental" <|
|
group_builder.specify "Enso_Incremental" <|
|
||||||
Scenario.Instance (_ -> Map.empty) . run_distinct data.ints
|
Scenario.Instance (_ -> Dictionary.empty) . run_distinct data.ints
|
||||||
group_builder.specify "Java_Incremental" <|
|
group_builder.specify "Java_Incremental" <|
|
||||||
Scenario.Instance (_ -> JavaHashMapWrapper.new) . run_distinct data.ints
|
Scenario.Instance (_ -> JavaHashMapWrapper.new) . run_distinct data.ints
|
||||||
|
|
||||||
# A scenario similar to what is done in add_row_number with grouping
|
# A scenario similar to what is done in add_row_number with grouping
|
||||||
group_builder.specify "Enso_Replacement" <|
|
group_builder.specify "Enso_Replacement" <|
|
||||||
Scenario.Instance (_ -> Map.empty) . run_count_keys data.ints
|
Scenario.Instance (_ -> Dictionary.empty) . run_count_keys data.ints
|
||||||
group_builder.specify "Java_Replacement" <|
|
group_builder.specify "Java_Replacement" <|
|
||||||
Scenario.Instance (_ -> JavaHashMapWrapper.new) . run_count_keys data.ints
|
Scenario.Instance (_ -> JavaHashMapWrapper.new) . run_count_keys data.ints
|
||||||
|
|
||||||
|
@ -49,8 +49,8 @@ add_specs suite_builder = suite_builder.group "Examples" group_builder->
|
|||||||
group_builder.specify "should provide a basic cons list" <|
|
group_builder.specify "should provide a basic cons list" <|
|
||||||
Examples.list.length . should_equal 3
|
Examples.list.length . should_equal 3
|
||||||
|
|
||||||
group_builder.specify "should provide a basic KV map" <|
|
group_builder.specify "should provide a basic KV dictionary" <|
|
||||||
Examples.map.size . should_equal 3
|
Examples.dictionary.size . should_equal 3
|
||||||
|
|
||||||
group_builder.specify "should provide a type with no methods" <|
|
group_builder.specify "should provide a type with no methods" <|
|
||||||
Examples.No_Methods.should_be_a Examples.No_Methods
|
Examples.No_Methods.should_be_a Examples.No_Methods
|
||||||
|
@ -583,14 +583,14 @@ add_snowflake_specs suite_builder create_connection_fn db_name =
|
|||||||
Common_Table_Operations.Main.add_specs suite_builder setup
|
Common_Table_Operations.Main.add_specs suite_builder setup
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
supported_replace_params : Set Replace_Params
|
supported_replace_params : Hashset Replace_Params
|
||||||
supported_replace_params =
|
supported_replace_params =
|
||||||
e0 = [Replace_Params.Value Text Case_Sensitivity.Default False, Replace_Params.Value Text Case_Sensitivity.Default True, Replace_Params.Value Text Case_Sensitivity.Sensitive False]
|
e0 = [Replace_Params.Value Text Case_Sensitivity.Default False, Replace_Params.Value Text Case_Sensitivity.Default True, Replace_Params.Value Text Case_Sensitivity.Sensitive False]
|
||||||
e1 = [Replace_Params.Value Text Case_Sensitivity.Sensitive True, Replace_Params.Value Text Case_Sensitivity.Insensitive False, Replace_Params.Value Text Case_Sensitivity.Insensitive True]
|
e1 = [Replace_Params.Value Text Case_Sensitivity.Sensitive True, Replace_Params.Value Text Case_Sensitivity.Insensitive False, Replace_Params.Value Text Case_Sensitivity.Insensitive True]
|
||||||
e2 = [Replace_Params.Value Regex Case_Sensitivity.Default False, Replace_Params.Value Regex Case_Sensitivity.Default True, Replace_Params.Value Regex Case_Sensitivity.Sensitive False]
|
e2 = [Replace_Params.Value Regex Case_Sensitivity.Default False, Replace_Params.Value Regex Case_Sensitivity.Default True, Replace_Params.Value Regex Case_Sensitivity.Sensitive False]
|
||||||
e3 = [Replace_Params.Value Regex Case_Sensitivity.Sensitive True, Replace_Params.Value Regex Case_Sensitivity.Insensitive False, Replace_Params.Value Regex Case_Sensitivity.Insensitive True]
|
e3 = [Replace_Params.Value Regex Case_Sensitivity.Sensitive True, Replace_Params.Value Regex Case_Sensitivity.Insensitive False, Replace_Params.Value Regex Case_Sensitivity.Insensitive True]
|
||||||
e4 = [Replace_Params.Value DB_Column Case_Sensitivity.Default False, Replace_Params.Value DB_Column Case_Sensitivity.Sensitive False]
|
e4 = [Replace_Params.Value DB_Column Case_Sensitivity.Default False, Replace_Params.Value DB_Column Case_Sensitivity.Sensitive False]
|
||||||
Set.from_vector <| e0 + e1 + e2 + e3 + e4
|
Hashset.from_vector <| e0 + e1 + e2 + e3 + e4
|
||||||
|
|
||||||
add_table_specs suite_builder =
|
add_table_specs suite_builder =
|
||||||
case create_connection_builder of
|
case create_connection_builder of
|
||||||
|
@ -1297,7 +1297,7 @@ add_specs suite_builder setup =
|
|||||||
input_type = Meta.type_of term
|
input_type = Meta.type_of term
|
||||||
params = Replace_Params.Value input_type case_sensitivity only_first
|
params = Replace_Params.Value input_type case_sensitivity only_first
|
||||||
supported_replace_params = setup.test_selection.supported_replace_params
|
supported_replace_params = setup.test_selection.supported_replace_params
|
||||||
supported_replace_params . should_be_a Set
|
supported_replace_params . should_be_a Hashset
|
||||||
are_params_supported = supported_replace_params.contains params
|
are_params_supported = supported_replace_params.contains params
|
||||||
case are_params_supported of
|
case are_params_supported of
|
||||||
True -> column.text_replace term new_text case_sensitivity only_first . to_vector . should_equal expected
|
True -> column.text_replace term new_text case_sensitivity only_first . to_vector . should_equal expected
|
||||||
|
@ -186,7 +186,7 @@ add_specs suite_builder setup =
|
|||||||
problems = [Duplicate_Output_Column_Names.Error ["x Agg1", "y Agg1", "z Agg1"]]
|
problems = [Duplicate_Output_Column_Names.Error ["x Agg1", "y Agg1", "z Agg1"]]
|
||||||
Problems.test_problem_handling action problems tester
|
Problems.test_problem_handling action problems tester
|
||||||
|
|
||||||
table3 = data.table2.rename_columns (Map.from_vector [["Group", "x"]])
|
table3 = data.table2.rename_columns (Dictionary.from_vector [["Group", "x"]])
|
||||||
action3 = table3.cross_tab ["x"] "Key" on_problems=_
|
action3 = table3.cross_tab ["x"] "Key" on_problems=_
|
||||||
tester3 table =
|
tester3 table =
|
||||||
table.column_names . should_equal ["x", "x 1", "y", "z"]
|
table.column_names . should_equal ["x", "x 1", "y", "z"]
|
||||||
|
@ -56,7 +56,7 @@ add_specs suite_builder setup =
|
|||||||
|
|
||||||
group_builder.specify "should be able to replace values via a lookup table provided as a Map" <|
|
group_builder.specify "should be able to replace values via a lookup table provided as a Map" <|
|
||||||
table = table_builder [['x', [1, 2, 3, 4, 2]], ['y', ['a', 'b', 'c', 'd', 'e']]]
|
table = table_builder [['x', [1, 2, 3, 4, 2]], ['y', ['a', 'b', 'c', 'd', 'e']]]
|
||||||
lookup_table = Map.from_vector [[2, 20], [1, 10], [4, 40], [3, 30]]
|
lookup_table = Dictionary.from_vector [[2, 20], [1, 10], [4, 40], [3, 30]]
|
||||||
expected = table_builder [['x', [10, 20, 20, 30, 40]], ['y', ['a', 'b', 'e', 'c', 'd']]]
|
expected = table_builder [['x', [10, 20, 20, 30, 40]], ['y', ['a', 'b', 'e', 'c', 'd']]]
|
||||||
result = table.replace lookup_table 'x' . sort ["x", "y"]
|
result = table.replace lookup_table 'x' . sort ["x", "y"]
|
||||||
result . should_equal expected
|
result . should_equal expected
|
||||||
@ -158,25 +158,25 @@ add_specs suite_builder setup =
|
|||||||
|
|
||||||
group_builder.specify "should accept an empty lookup map, if allow_unmatched_rows=True, but expect a warning" <|
|
group_builder.specify "should accept an empty lookup map, if allow_unmatched_rows=True, but expect a warning" <|
|
||||||
table = table_builder [['x', [1, 2, 3, 4, 2]], ['y', ['a', 'b', 'c', 'd', 'e']]]
|
table = table_builder [['x', [1, 2, 3, 4, 2]], ['y', ['a', 'b', 'c', 'd', 'e']]]
|
||||||
t = table.replace Map.empty 'x'
|
t = table.replace Dictionary.empty 'x'
|
||||||
t . should_equal table
|
t . should_equal table
|
||||||
Problems.expect_warning (Empty_Error.Error "lookup_table") t
|
Problems.expect_warning (Empty_Error.Error "lookup_table") t
|
||||||
|
|
||||||
group_builder.specify "should throw an error on an empty lookup map and non-empty base table if allow_unmatched_rows=False" <|
|
group_builder.specify "should throw an error on an empty lookup map and non-empty base table if allow_unmatched_rows=False" <|
|
||||||
table = table_builder [['x', [1, 2, 3, 4, 2]], ['y', ['a', 'b', 'c', 'd', 'e']]] . sort ['x']
|
table = table_builder [['x', [1, 2, 3, 4, 2]], ['y', ['a', 'b', 'c', 'd', 'e']]] . sort ['x']
|
||||||
t = table.replace Map.empty 'x' allow_unmatched_rows=False
|
t = table.replace Dictionary.empty 'x' allow_unmatched_rows=False
|
||||||
t . should_fail_with Unmatched_Rows_In_Lookup
|
t . should_fail_with Unmatched_Rows_In_Lookup
|
||||||
t.catch.example_key_values . should_equal [1]
|
t.catch.example_key_values . should_equal [1]
|
||||||
|
|
||||||
group_builder.specify "should accept an empty lookup map if the base table is also empty, but expect a warning" <|
|
group_builder.specify "should accept an empty lookup map if the base table is also empty, but expect a warning" <|
|
||||||
table = table_builder_typed [['x', []], ['z', []]] Value_Type.Integer
|
table = table_builder_typed [['x', []], ['z', []]] Value_Type.Integer
|
||||||
t = table.replace Map.empty 'x'
|
t = table.replace Dictionary.empty 'x'
|
||||||
t . should_equal table
|
t . should_equal table
|
||||||
Problems.expect_warning (Empty_Error.Error "lookup_table") t
|
Problems.expect_warning (Empty_Error.Error "lookup_table") t
|
||||||
|
|
||||||
group_builder.specify "should not allow from/to_coumn to specified if the argument is a Map" <|
|
group_builder.specify "should not allow from/to_coumn to specified if the argument is a Map" <|
|
||||||
table = table_builder [['x', [1, 2, 3, 4, 2]], ['y', ['a', 'b', 'c', 'd', 'e']]]
|
table = table_builder [['x', [1, 2, 3, 4, 2]], ['y', ['a', 'b', 'c', 'd', 'e']]]
|
||||||
lookup_table = Map.from_vector [[2, 20], [1, 10], [4, 40], [3, 30]]
|
lookup_table = Dictionary.from_vector [[2, 20], [1, 10], [4, 40], [3, 30]]
|
||||||
table.replace lookup_table 'x' from_column=8 . should_fail_with Illegal_Argument
|
table.replace lookup_table 'x' from_column=8 . should_fail_with Illegal_Argument
|
||||||
table.replace lookup_table 'x' to_column=9 . should_fail_with Illegal_Argument
|
table.replace lookup_table 'x' to_column=9 . should_fail_with Illegal_Argument
|
||||||
table.replace lookup_table 'x' from_column=8 to_column=9 . should_fail_with Illegal_Argument
|
table.replace lookup_table 'x' from_column=8 to_column=9 . should_fail_with Illegal_Argument
|
||||||
|
@ -28,7 +28,7 @@ type Data
|
|||||||
add_specs suite_builder setup =
|
add_specs suite_builder setup =
|
||||||
prefix = setup.prefix
|
prefix = setup.prefix
|
||||||
create_connection_fn = setup.create_connection_func
|
create_connection_fn = setup.create_connection_func
|
||||||
suite_builder.group prefix+"Table.make_table_from_map/vectors" group_builder->
|
suite_builder.group prefix+"Table.make_table_from_dictionary/vectors" group_builder->
|
||||||
data = Data.setup setup create_connection_fn
|
data = Data.setup setup create_connection_fn
|
||||||
|
|
||||||
group_builder.teardown <|
|
group_builder.teardown <|
|
||||||
@ -66,21 +66,21 @@ add_specs suite_builder setup =
|
|||||||
vecs2 = [[], [3, 4, 5], [6, 7, 8]]
|
vecs2 = [[], [3, 4, 5], [6, 7, 8]]
|
||||||
data.dummy_table.make_table_from_vectors vecs2 ['x', 'y', 'z'] . read . should_fail_with Illegal_Argument
|
data.dummy_table.make_table_from_vectors vecs2 ['x', 'y', 'z'] . read . should_fail_with Illegal_Argument
|
||||||
|
|
||||||
group_builder.specify "should be able to create a literal table from a map" <|
|
group_builder.specify "should be able to create a literal table from a dictionary" <|
|
||||||
map = Map.from_vector [['x', 1], ['y', 2], ['z', 3]]
|
map = Dictionary.from_vector [['x', 1], ['y', 2], ['z', 3]]
|
||||||
t = data.dummy_table.make_table_from_map map 'k' 'v' . sort 'v'
|
t = data.dummy_table.make_table_from_dictionary map 'k' 'v' . sort 'v'
|
||||||
t.at 'k' . to_vector . should_equal ['x', 'y', 'z']
|
t.at 'k' . to_vector . should_equal ['x', 'y', 'z']
|
||||||
t.at 'v' . to_vector . should_equal [1, 2, 3]
|
t.at 'v' . to_vector . should_equal [1, 2, 3]
|
||||||
|
|
||||||
if setup.is_database then
|
if setup.is_database then
|
||||||
group_builder.specify "should not be able to create a literal table from an empty map" <|
|
group_builder.specify "should not be able to create a literal table from an empty dictionary" <|
|
||||||
map = Map.empty
|
map = Dictionary.empty
|
||||||
data.dummy_table.make_table_from_map map 'k' 'v' . should_fail_with Illegal_Argument
|
data.dummy_table.make_table_from_dictionary map 'k' 'v' . should_fail_with Illegal_Argument
|
||||||
|
|
||||||
if setup.is_database.not then
|
if setup.is_database.not then
|
||||||
group_builder.specify "should be able to create a literal table from an empty map" <|
|
group_builder.specify "should be able to create a literal table from an empty dictionary" <|
|
||||||
map = Map.empty
|
map = Dictionary.empty
|
||||||
t = data.dummy_table.make_table_from_map map 'k' 'v'
|
t = data.dummy_table.make_table_from_dictionary map 'k' 'v'
|
||||||
t.row_count . should_equal 0
|
t.row_count . should_equal 0
|
||||||
|
|
||||||
if setup.is_database then
|
if setup.is_database then
|
||||||
|
@ -481,10 +481,10 @@ add_specs suite_builder setup =
|
|||||||
|
|
||||||
t1 = table_builder [["alpha", [1]], ["name=123", [2]], ["name= foo bar", [3]]]
|
t1 = table_builder [["alpha", [1]], ["name=123", [2]], ["name= foo bar", [3]]]
|
||||||
expect_column_names ["alpha", "key:123", "key: foo bar"] <|
|
expect_column_names ["alpha", "key:123", "key: foo bar"] <|
|
||||||
t1.rename_columns (Map.from_vector [["name=(.*)".to_regex, "key:$1"]])
|
t1.rename_columns (Dictionary.from_vector [["name=(.*)".to_regex, "key:$1"]])
|
||||||
|
|
||||||
group_builder.specify "should work by index" <|
|
group_builder.specify "should work by index" <|
|
||||||
map = Map.from_vector [[0, "FirstColumn"], [-2, "Another"]]
|
map = Dictionary.from_vector [[0, "FirstColumn"], [-2, "Another"]]
|
||||||
expect_column_names ["FirstColumn", "beta", "Another", "delta"] <|
|
expect_column_names ["FirstColumn", "beta", "Another", "delta"] <|
|
||||||
data.table.rename_columns map
|
data.table.rename_columns map
|
||||||
|
|
||||||
@ -504,12 +504,12 @@ add_specs suite_builder setup =
|
|||||||
data.table.rename_columns vec
|
data.table.rename_columns vec
|
||||||
|
|
||||||
group_builder.specify "should work by name" <|
|
group_builder.specify "should work by name" <|
|
||||||
map = Map.from_vector [["alpha", "FirstColumn"], ["delta", "Another"]]
|
map = Dictionary.from_vector [["alpha", "FirstColumn"], ["delta", "Another"]]
|
||||||
expect_column_names ["FirstColumn", "beta", "gamma", "Another"] <|
|
expect_column_names ["FirstColumn", "beta", "gamma", "Another"] <|
|
||||||
data.table.rename_columns map
|
data.table.rename_columns map
|
||||||
|
|
||||||
group_builder.specify "should work by mixed Map" <|
|
group_builder.specify "should work by mixed Map" <|
|
||||||
map = Map.from_vector [["alpha", "FirstColumn"], [-1, "Another"]]
|
map = Dictionary.from_vector [["alpha", "FirstColumn"], [-1, "Another"]]
|
||||||
expect_column_names ["FirstColumn", "beta", "gamma", "Another"] <|
|
expect_column_names ["FirstColumn", "beta", "gamma", "Another"] <|
|
||||||
data.table.rename_columns map
|
data.table.rename_columns map
|
||||||
|
|
||||||
@ -552,17 +552,17 @@ add_specs suite_builder setup =
|
|||||||
fail_2.catch.message.should_contain "materialize"
|
fail_2.catch.message.should_contain "materialize"
|
||||||
|
|
||||||
group_builder.specify "should work by name case-insensitively" <|
|
group_builder.specify "should work by name case-insensitively" <|
|
||||||
map = Map.from_vector [["ALPHA", "FirstColumn"], ["DELTA", "Another"]]
|
map = Dictionary.from_vector [["ALPHA", "FirstColumn"], ["DELTA", "Another"]]
|
||||||
expect_column_names ["FirstColumn", "beta", "gamma", "Another"] <|
|
expect_column_names ["FirstColumn", "beta", "gamma", "Another"] <|
|
||||||
data.table.rename_columns map Case_Sensitivity.Insensitive
|
data.table.rename_columns map Case_Sensitivity.Insensitive
|
||||||
|
|
||||||
group_builder.specify "should work by name using regex" <|
|
group_builder.specify "should work by name using regex" <|
|
||||||
map = Map.from_vector [["a.*".to_regex, "FirstColumn"]]
|
map = Dictionary.from_vector [["a.*".to_regex, "FirstColumn"]]
|
||||||
expect_column_names ["FirstColumn", "beta", "gamma", "delta"] <|
|
expect_column_names ["FirstColumn", "beta", "gamma", "delta"] <|
|
||||||
data.table.rename_columns map
|
data.table.rename_columns map
|
||||||
|
|
||||||
group_builder.specify "should work by name using regex substitution" <|
|
group_builder.specify "should work by name using regex substitution" <|
|
||||||
map = Map.from_vector [["a(.*)".to_regex, "$1"]]
|
map = Dictionary.from_vector [["a(.*)".to_regex, "$1"]]
|
||||||
expect_column_names ["lpha", "beta", "gamma", "delta"] <|
|
expect_column_names ["lpha", "beta", "gamma", "delta"] <|
|
||||||
data.table.rename_columns map
|
data.table.rename_columns map
|
||||||
|
|
||||||
@ -591,7 +591,7 @@ add_specs suite_builder setup =
|
|||||||
|
|
||||||
group_builder.specify "should correctly handle problems: unmatched names" <|
|
group_builder.specify "should correctly handle problems: unmatched names" <|
|
||||||
weird_name = '.*?-!@#!"'
|
weird_name = '.*?-!@#!"'
|
||||||
map = Map.from_vector [["alpha", "FirstColumn"], ["omicron", "Another"], [weird_name, "Fixed"]]
|
map = Dictionary.from_vector [["alpha", "FirstColumn"], ["omicron", "Another"], [weird_name, "Fixed"]]
|
||||||
action = data.table.rename_columns map error_on_missing_columns=False on_problems=_
|
action = data.table.rename_columns map error_on_missing_columns=False on_problems=_
|
||||||
tester = expect_column_names ["FirstColumn", "beta", "gamma", "delta"]
|
tester = expect_column_names ["FirstColumn", "beta", "gamma", "delta"]
|
||||||
err_checker err =
|
err_checker err =
|
||||||
@ -603,7 +603,7 @@ add_specs suite_builder setup =
|
|||||||
err.should_fail_with Missing_Input_Columns
|
err.should_fail_with Missing_Input_Columns
|
||||||
|
|
||||||
group_builder.specify "should correctly handle problems: out of bounds indices" <|
|
group_builder.specify "should correctly handle problems: out of bounds indices" <|
|
||||||
map = Map.from_vector [[0, "FirstColumn"], [-1, "Another"], [100, "Boo"], [-200, "Nothing"], [300, "Here"]]
|
map = Dictionary.from_vector [[0, "FirstColumn"], [-1, "Another"], [100, "Boo"], [-200, "Nothing"], [300, "Here"]]
|
||||||
action = data.table.rename_columns map error_on_missing_columns=False on_problems=_
|
action = data.table.rename_columns map error_on_missing_columns=False on_problems=_
|
||||||
tester = expect_column_names ["FirstColumn", "beta", "gamma", "Another"]
|
tester = expect_column_names ["FirstColumn", "beta", "gamma", "Another"]
|
||||||
err_checker err =
|
err_checker err =
|
||||||
@ -615,12 +615,12 @@ add_specs suite_builder setup =
|
|||||||
err.should_fail_with Missing_Input_Columns
|
err.should_fail_with Missing_Input_Columns
|
||||||
|
|
||||||
group_builder.specify "should correctly handle edge-cases: aliased indices" <|
|
group_builder.specify "should correctly handle edge-cases: aliased indices" <|
|
||||||
map1 = Map.from_vector [[1, "FirstColumn"], [-3, "FirstColumn"]]
|
map1 = Dictionary.from_vector [[1, "FirstColumn"], [-3, "FirstColumn"]]
|
||||||
t1 = data.table.rename_columns map1 on_problems=..Report_Error
|
t1 = data.table.rename_columns map1 on_problems=..Report_Error
|
||||||
Problems.assume_no_problems t1
|
Problems.assume_no_problems t1
|
||||||
expect_column_names ["alpha", "FirstColumn", "gamma", "delta"] t1
|
expect_column_names ["alpha", "FirstColumn", "gamma", "delta"] t1
|
||||||
|
|
||||||
map2 = Map.from_vector [[1, "FirstColumn"], [-3, "DifferentName!"]]
|
map2 = Dictionary.from_vector [[1, "FirstColumn"], [-3, "DifferentName!"]]
|
||||||
t2 = data.table.rename_columns map2 on_problems=..Report_Error
|
t2 = data.table.rename_columns map2 on_problems=..Report_Error
|
||||||
t2.should_fail_with Ambiguous_Column_Rename
|
t2.should_fail_with Ambiguous_Column_Rename
|
||||||
err = t2.catch . inner_error
|
err = t2.catch . inner_error
|
||||||
@ -629,12 +629,12 @@ add_specs suite_builder setup =
|
|||||||
|
|
||||||
group_builder.specify "should correctly handle edge-cases: aliased selectors" <|
|
group_builder.specify "should correctly handle edge-cases: aliased selectors" <|
|
||||||
t = table_builder [["alpha", [1,2,3]], ["bet", [4,5,6]]]
|
t = table_builder [["alpha", [1,2,3]], ["bet", [4,5,6]]]
|
||||||
map1 = Map.from_vector [["a.*".to_regex, "AA"], [".*a".to_regex, "AA"]]
|
map1 = Dictionary.from_vector [["a.*".to_regex, "AA"], [".*a".to_regex, "AA"]]
|
||||||
t1 = t.rename_columns map1 on_problems=..Report_Error
|
t1 = t.rename_columns map1 on_problems=..Report_Error
|
||||||
Problems.assume_no_problems t1
|
Problems.assume_no_problems t1
|
||||||
expect_column_names ["AA", "bet"] t1
|
expect_column_names ["AA", "bet"] t1
|
||||||
|
|
||||||
map2 = Map.from_vector [["a.*".to_regex, "StartsWithA"], [".*a".to_regex, "EndsWithA"]]
|
map2 = Dictionary.from_vector [["a.*".to_regex, "StartsWithA"], [".*a".to_regex, "EndsWithA"]]
|
||||||
t2 = t.rename_columns map2 on_problems=..Report_Error
|
t2 = t.rename_columns map2 on_problems=..Report_Error
|
||||||
t2.should_fail_with Ambiguous_Column_Rename
|
t2.should_fail_with Ambiguous_Column_Rename
|
||||||
err = t2.catch . inner_error
|
err = t2.catch . inner_error
|
||||||
@ -647,13 +647,13 @@ add_specs suite_builder setup =
|
|||||||
This is to show that even if distinct rename patterns match the
|
This is to show that even if distinct rename patterns match the
|
||||||
same column, if the resulting rename is unambiguous, no error is
|
same column, if the resulting rename is unambiguous, no error is
|
||||||
raised.
|
raised.
|
||||||
map3 = Map.from_vector [["a(.*)".to_regex, "$1A"], ["(.*)aa".to_regex, "$1aA"]]
|
map3 = Dictionary.from_vector [["a(.*)".to_regex, "$1A"], ["(.*)aa".to_regex, "$1aA"]]
|
||||||
t4 = t3.rename_columns map3 on_problems=..Report_Error
|
t4 = t3.rename_columns map3 on_problems=..Report_Error
|
||||||
Problems.assume_no_problems t4
|
Problems.assume_no_problems t4
|
||||||
expect_column_names ["aaA", "bbb"] t4
|
expect_column_names ["aaA", "bbb"] t4
|
||||||
|
|
||||||
group_builder.specify "should correctly handle problems: invalid names ''" <|
|
group_builder.specify "should correctly handle problems: invalid names ''" <|
|
||||||
map = Map.from_vector [[1, ""]]
|
map = Dictionary.from_vector [[1, ""]]
|
||||||
[Problem_Behavior.Ignore, Problem_Behavior.Report_Warning, Problem_Behavior.Report_Error].each pb->
|
[Problem_Behavior.Ignore, Problem_Behavior.Report_Warning, Problem_Behavior.Report_Error].each pb->
|
||||||
r = data.table.rename_columns map on_problems=pb
|
r = data.table.rename_columns map on_problems=pb
|
||||||
r.should_fail_with Invalid_Column_Names
|
r.should_fail_with Invalid_Column_Names
|
||||||
@ -678,13 +678,13 @@ add_specs suite_builder setup =
|
|||||||
Problems.test_problem_handling action problems tester
|
Problems.test_problem_handling action problems tester
|
||||||
|
|
||||||
group_builder.specify "should correctly handle problems: new name is clashing with existing name of existing column" <|
|
group_builder.specify "should correctly handle problems: new name is clashing with existing name of existing column" <|
|
||||||
map = Map.from_vector [["alpha", "beta"]]
|
map = Dictionary.from_vector [["alpha", "beta"]]
|
||||||
action = data.table.rename_columns map on_problems=_
|
action = data.table.rename_columns map on_problems=_
|
||||||
tester = expect_column_names ["beta", "beta 1", "gamma", "delta"]
|
tester = expect_column_names ["beta", "beta 1", "gamma", "delta"]
|
||||||
problems = [Duplicate_Output_Column_Names.Error ["beta"]]
|
problems = [Duplicate_Output_Column_Names.Error ["beta"]]
|
||||||
Problems.test_problem_handling action problems tester
|
Problems.test_problem_handling action problems tester
|
||||||
|
|
||||||
map2 = Map.from_vector [["beta", "alpha"]]
|
map2 = Dictionary.from_vector [["beta", "alpha"]]
|
||||||
action2 = data.table.rename_columns map2 on_problems=_
|
action2 = data.table.rename_columns map2 on_problems=_
|
||||||
tester2 = expect_column_names ["alpha 1", "alpha", "gamma", "delta"]
|
tester2 = expect_column_names ["alpha 1", "alpha", "gamma", "delta"]
|
||||||
problems2 = [Duplicate_Output_Column_Names.Error ["alpha"]]
|
problems2 = [Duplicate_Output_Column_Names.Error ["alpha"]]
|
||||||
|
@ -211,7 +211,7 @@ postgres_specific_spec suite_builder create_connection_fn db_name setup =
|
|||||||
column/table names and their lengths, this should not be a big
|
column/table names and their lengths, this should not be a big
|
||||||
problem usually, so only a warning is issued. It may however lead
|
problem usually, so only a warning is issued. It may however lead
|
||||||
to data integrity issues in some very rare edge cases.
|
to data integrity issues in some very rare edge cases.
|
||||||
unsupported_encodings = Set.from_vector <|
|
unsupported_encodings = Hashset.from_vector <|
|
||||||
["EUC_JIS_2004", "LATIN6", "LATIN8", "MULE_INTERNAL", "SHIFT_JIS_2004"]
|
["EUC_JIS_2004", "LATIN6", "LATIN8", "MULE_INTERNAL", "SHIFT_JIS_2004"]
|
||||||
|
|
||||||
known_encodings.each encoding_name->
|
known_encodings.each encoding_name->
|
||||||
@ -706,14 +706,14 @@ add_postgres_specs suite_builder create_connection_fn db_name =
|
|||||||
Common_Table_Operations.Main.add_specs suite_builder setup
|
Common_Table_Operations.Main.add_specs suite_builder setup
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
supported_replace_params : Set Replace_Params
|
supported_replace_params : Hashset Replace_Params
|
||||||
supported_replace_params =
|
supported_replace_params =
|
||||||
e0 = [Replace_Params.Value Text Case_Sensitivity.Default False, Replace_Params.Value Text Case_Sensitivity.Default True, Replace_Params.Value Text Case_Sensitivity.Sensitive False]
|
e0 = [Replace_Params.Value Text Case_Sensitivity.Default False, Replace_Params.Value Text Case_Sensitivity.Default True, Replace_Params.Value Text Case_Sensitivity.Sensitive False]
|
||||||
e1 = [Replace_Params.Value Text Case_Sensitivity.Sensitive True, Replace_Params.Value Text Case_Sensitivity.Insensitive False, Replace_Params.Value Text Case_Sensitivity.Insensitive True]
|
e1 = [Replace_Params.Value Text Case_Sensitivity.Sensitive True, Replace_Params.Value Text Case_Sensitivity.Insensitive False, Replace_Params.Value Text Case_Sensitivity.Insensitive True]
|
||||||
e2 = [Replace_Params.Value Regex Case_Sensitivity.Default False, Replace_Params.Value Regex Case_Sensitivity.Default True, Replace_Params.Value Regex Case_Sensitivity.Sensitive False]
|
e2 = [Replace_Params.Value Regex Case_Sensitivity.Default False, Replace_Params.Value Regex Case_Sensitivity.Default True, Replace_Params.Value Regex Case_Sensitivity.Sensitive False]
|
||||||
e3 = [Replace_Params.Value Regex Case_Sensitivity.Sensitive True, Replace_Params.Value Regex Case_Sensitivity.Insensitive False, Replace_Params.Value Regex Case_Sensitivity.Insensitive True]
|
e3 = [Replace_Params.Value Regex Case_Sensitivity.Sensitive True, Replace_Params.Value Regex Case_Sensitivity.Insensitive False, Replace_Params.Value Regex Case_Sensitivity.Insensitive True]
|
||||||
e4 = [Replace_Params.Value DB_Column Case_Sensitivity.Default False, Replace_Params.Value DB_Column Case_Sensitivity.Sensitive False]
|
e4 = [Replace_Params.Value DB_Column Case_Sensitivity.Default False, Replace_Params.Value DB_Column Case_Sensitivity.Sensitive False]
|
||||||
Set.from_vector <| e0 + e1 + e2 + e3 + e4
|
Hashset.from_vector <| e0 + e1 + e2 + e3 + e4
|
||||||
|
|
||||||
add_table_specs suite_builder =
|
add_table_specs suite_builder =
|
||||||
db_name = Environment.get "ENSO_POSTGRES_DATABASE"
|
db_name = Environment.get "ENSO_POSTGRES_DATABASE"
|
||||||
|
@ -356,10 +356,10 @@ sqlite_spec suite_builder prefix create_connection_func =
|
|||||||
|
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
supported_replace_params : Set Replace_Params
|
supported_replace_params : Hashset Replace_Params
|
||||||
supported_replace_params =
|
supported_replace_params =
|
||||||
e = [Replace_Params.Value Text Case_Sensitivity.Default False, Replace_Params.Value Text Case_Sensitivity.Sensitive False, Replace_Params.Value Text Case_Sensitivity.Default True, Replace_Params.Value Text Case_Sensitivity.Sensitive True, Replace_Params.Value Text Case_Sensitivity.Insensitive True]
|
e = [Replace_Params.Value Text Case_Sensitivity.Default False, Replace_Params.Value Text Case_Sensitivity.Sensitive False, Replace_Params.Value Text Case_Sensitivity.Default True, Replace_Params.Value Text Case_Sensitivity.Sensitive True, Replace_Params.Value Text Case_Sensitivity.Insensitive True]
|
||||||
Set.from_vector e
|
Hashset.from_vector e
|
||||||
|
|
||||||
## Reference to the database file that ensures the first test that uses it will
|
## Reference to the database file that ensures the first test that uses it will
|
||||||
clean any leftover files from earlier runs.
|
clean any leftover files from earlier runs.
|
||||||
|
@ -123,8 +123,8 @@ add_specs suite_builder =
|
|||||||
|
|
||||||
group_builder.specify "should be able to infer types for all supported operations" <|
|
group_builder.specify "should be able to infer types for all supported operations" <|
|
||||||
dialect = Dialect.sqlite
|
dialect = Dialect.sqlite
|
||||||
internal_mapping = dialect.dialect_operations.operation_map
|
internal_mapping = dialect.dialect_operations.operations_dict
|
||||||
operation_type_mapping = SQLite_Type_Mapping.operations_map
|
operation_type_mapping = SQLite_Type_Mapping.operations_dict
|
||||||
|
|
||||||
operation_type_mapping.keys.sort . should_equal internal_mapping.keys.sort
|
operation_type_mapping.keys.sort . should_equal internal_mapping.keys.sort
|
||||||
|
|
||||||
|
@ -562,7 +562,7 @@ add_specs suite_builder make_new_connection prefix persistent_connector=True =
|
|||||||
e2.clashing_example_key_values.length . should_equal 1
|
e2.clashing_example_key_values.length . should_equal 1
|
||||||
x = e2.clashing_example_key_values.first
|
x = e2.clashing_example_key_values.first
|
||||||
[1, 2, 3].should_contain x
|
[1, 2, 3].should_contain x
|
||||||
counts = Map.from_vector [[1, 2], [2, 4], [3, 2]]
|
counts = Dictionary.from_vector [[1, 2], [2, 4], [3, 2]]
|
||||||
e2.clashing_example_row_count . should_equal (counts.at x)
|
e2.clashing_example_row_count . should_equal (counts.at x)
|
||||||
|
|
||||||
# Will not find clashes if they are not in the first 1000 rows, in Output disabled mode.
|
# Will not find clashes if they are not in the first 1000 rows, in Output disabled mode.
|
||||||
@ -1201,14 +1201,14 @@ test_table_append group_builder (data : Data) source_table_builder target_table_
|
|||||||
|
|
||||||
## If there are some additional tables, we add some timeout to allow
|
## If there are some additional tables, we add some timeout to allow
|
||||||
the database to do the cleaning up.
|
the database to do the cleaning up.
|
||||||
additional_tables = (Set.from_vector tables_immediately_after).difference (Set.from_vector existing_tables)
|
additional_tables = (Hashset.from_vector tables_immediately_after).difference (Hashset.from_vector existing_tables)
|
||||||
if additional_tables.is_empty then Nothing else
|
if additional_tables.is_empty then Nothing else
|
||||||
additional_table = additional_tables.to_vector.first
|
additional_table = additional_tables.to_vector.first
|
||||||
|
|
||||||
wait_until_temporary_table_is_deleted_after_closing_connection data.connection additional_table
|
wait_until_temporary_table_is_deleted_after_closing_connection data.connection additional_table
|
||||||
# After the wait we check again and now there should be no additional tables.
|
# After the wait we check again and now there should be no additional tables.
|
||||||
tables_after_wait = data.connection.base_connection.get_tables_advanced types=Nothing include_hidden=True . at "Name" . to_vector
|
tables_after_wait = data.connection.base_connection.get_tables_advanced types=Nothing include_hidden=True . at "Name" . to_vector
|
||||||
additional_tables_2 = (Set.from_vector tables_after_wait).difference (Set.from_vector existing_tables)
|
additional_tables_2 = (Hashset.from_vector tables_after_wait).difference (Hashset.from_vector existing_tables)
|
||||||
additional_tables_2.to_vector . should_equal []
|
additional_tables_2.to_vector . should_equal []
|
||||||
|
|
||||||
|
|
||||||
|
@ -116,7 +116,7 @@ add_specs suite_builder =
|
|||||||
strategy.make_unique "abc" . should_equal "ab 10"
|
strategy.make_unique "abc" . should_equal "ab 10"
|
||||||
strategy.make_unique "abc" . should_equal "ab 11"
|
strategy.make_unique "abc" . should_equal "ab 11"
|
||||||
|
|
||||||
strategy.truncated_names . should_be_a Map
|
strategy.truncated_names . should_be_a Dictionary
|
||||||
strategy.truncated_names.get "abcdefgh" . should_equal "abcde"
|
strategy.truncated_names.get "abcdefgh" . should_equal "abcde"
|
||||||
# abc will contain the entry for the last truncated case
|
# abc will contain the entry for the last truncated case
|
||||||
strategy.truncated_names.get "abc" . should_equal "ab 11"
|
strategy.truncated_names.get "abc" . should_equal "ab 11"
|
||||||
|
@ -227,30 +227,30 @@ add_specs suite_builder =
|
|||||||
table.expand_column "cols" . should_equal expected
|
table.expand_column "cols" . should_equal expected
|
||||||
|
|
||||||
group_builder.specify "will work even if keys are not Text" <|
|
group_builder.specify "will work even if keys are not Text" <|
|
||||||
table = Table.new [["a", [1, 2]], ["b", [Map.from_vector [[1, "x"], [2, "y"]], Map.from_vector [[2, "z"], [3, "w"]]]]]
|
table = Table.new [["a", [1, 2]], ["b", [Dictionary.from_vector [[1, "x"], [2, "y"]], Dictionary.from_vector [[2, "z"], [3, "w"]]]]]
|
||||||
expected = Table.new [["a", [1, 2]], ["b 1", ["x", Nothing]], ["b 2", ["y", "z"]], ["b 3", [Nothing, "w"]]]
|
expected = Table.new [["a", [1, 2]], ["b 1", ["x", Nothing]], ["b 2", ["y", "z"]], ["b 3", [Nothing, "w"]]]
|
||||||
table.expand_column "b" . should_equal expected
|
table.expand_column "b" . should_equal expected
|
||||||
|
|
||||||
table2 = Table.new [["a", [1, 2]], ["b", [Map.from_vector [[My_Mod_Type.Value 12, "x"], [My_Mod_Type.Value 23, "y"]], Map.from_vector [[My_Mod_Type.Value 32, "z"]]]]]
|
table2 = Table.new [["a", [1, 2]], ["b", [Dictionary.from_vector [[My_Mod_Type.Value 12, "x"], [My_Mod_Type.Value 23, "y"]], Dictionary.from_vector [[My_Mod_Type.Value 32, "z"]]]]]
|
||||||
expected2 = Table.new [["a", [1, 2]], ["b x%10=3", ["y", Nothing]], ["b x%10=2", ["x", "z"]]]
|
expected2 = Table.new [["a", [1, 2]], ["b x%10=3", ["y", Nothing]], ["b x%10=2", ["x", "z"]]]
|
||||||
table2.expand_column "b" . should_equal expected2
|
table2.expand_column "b" . should_equal expected2
|
||||||
|
|
||||||
group_builder.specify "will fail if text representation of keys is not unique" <|
|
group_builder.specify "will fail if text representation of keys is not unique" <|
|
||||||
k1 = My_Mod_Type.Value 12
|
k1 = My_Mod_Type.Value 12
|
||||||
k2 = My_Mod_Type.Value 32
|
k2 = My_Mod_Type.Value 32
|
||||||
m = Map.from_vector [[k1, "a"], [k2, "b"]]
|
m = Dictionary.from_vector [[k1, "a"], [k2, "b"]]
|
||||||
m.at k1 . should_equal "a"
|
m.at k1 . should_equal "a"
|
||||||
m.at k2 . should_equal "b"
|
m.at k2 . should_equal "b"
|
||||||
k1.to_text . should_equal "x%10=2"
|
k1.to_text . should_equal "x%10=2"
|
||||||
k2.to_text . should_equal "x%10=2"
|
k2.to_text . should_equal "x%10=2"
|
||||||
|
|
||||||
table = Table.new [["a", [1, 2]], ["b", [Map.from_vector [[k1, "x"], [k2, "y"]] , Map.from_vector []]]]
|
table = Table.new [["a", [1, 2]], ["b", [Dictionary.from_vector [[k1, "x"], [k2, "y"]] , Dictionary.from_vector []]]]
|
||||||
r = table.expand_column "b"
|
r = table.expand_column "b"
|
||||||
r.should_fail_with Illegal_Argument
|
r.should_fail_with Illegal_Argument
|
||||||
r.catch.to_display_text . should_contain "keys are duplicated when converted to text"
|
r.catch.to_display_text . should_contain "keys are duplicated when converted to text"
|
||||||
|
|
||||||
group_builder.specify "will error when all objects have no fields" <|
|
group_builder.specify "will error when all objects have no fields" <|
|
||||||
table = Table.new [["aaa", [1, 2]], ["bbb", [Map.from_vector [], Map.from_vector []]], ["ccc", [5, 6]]]
|
table = Table.new [["aaa", [1, 2]], ["bbb", [Dictionary.from_vector [], Dictionary.from_vector []]], ["ccc", [5, 6]]]
|
||||||
r = table.expand_column "bbb"
|
r = table.expand_column "bbb"
|
||||||
r.should_fail_with Illegal_Argument
|
r.should_fail_with Illegal_Argument
|
||||||
r.catch.message.should_contain "as all inputs had no fields"
|
r.catch.message.should_contain "as all inputs had no fields"
|
||||||
@ -337,7 +337,7 @@ add_specs suite_builder =
|
|||||||
table.expand_to_rows "bbb" . should_equal expected
|
table.expand_to_rows "bbb" . should_equal expected
|
||||||
|
|
||||||
group_builder.specify "Can expand Map" <|
|
group_builder.specify "Can expand Map" <|
|
||||||
values_to_expand = [Map.empty.insert "a" 10, Map.empty.insert "d" 40 . insert "b" 20, Map.empty.insert "c" 30]
|
values_to_expand = [Dictionary.singleton "a" 10, Dictionary.singleton "d" 40 . insert "b" 20, Dictionary.singleton "c" 30]
|
||||||
table = Table.new [["aaa", [1, 2, 3]], ["bbb", values_to_expand], ["ccc", [5, 6, 7]]]
|
table = Table.new [["aaa", [1, 2, 3]], ["bbb", values_to_expand], ["ccc", [5, 6, 7]]]
|
||||||
expected = Table.new [["aaa", [1, 2, 2, 3]], ["bbb Key", ["a", "d", "b", "c"]], ["bbb", [10, 40, 20, 30]], ["ccc", [5, 6, 6, 7]]]
|
expected = Table.new [["aaa", [1, 2, 2, 3]], ["bbb Key", ["a", "d", "b", "c"]], ["bbb", [10, 40, 20, 30]], ["ccc", [5, 6, 6, 7]]]
|
||||||
table.expand_to_rows "bbb" . should_equal expected
|
table.expand_to_rows "bbb" . should_equal expected
|
||||||
|
@ -905,7 +905,7 @@ add_specs suite_builder =
|
|||||||
if has_nulls then builder.append Nothing
|
if has_nulls then builder.append Nothing
|
||||||
if has_true then builder.append True
|
if has_true then builder.append True
|
||||||
if has_false then builder.append False
|
if has_false then builder.append False
|
||||||
in_vector_set = Set.from_vector in_vector
|
in_vector_set = Hashset.from_vector in_vector
|
||||||
|
|
||||||
vectors = [[True, False, Nothing], [Nothing, Nothing, Nothing], [False, False, True], [True, True, True], [False, False, False], [Nothing, True, True], [False, Nothing, False]]
|
vectors = [[True, False, Nothing], [Nothing, Nothing, Nothing], [False, False, True], [True, True, True], [False, False, False], [Nothing, True, True], [False, Nothing, False]]
|
||||||
vectors.each column_vector->
|
vectors.each column_vector->
|
||||||
|
Loading…
Reference in New Issue
Block a user