Let JavaScript parse JSON and write JSON ... (#3987)

Use JavaScript to parse and serialise to JSON. Parses to native Enso object.
- `.to_json` now returns a `Text` of the JSON.
- Json methods now `parse`, `stringify` and `from_pairs`.
- New `JSON_Object` representing a JavaScript Object.
- `.to_js_object` allows for types to custom serialize. Returning a `JS_Object`.
- Default JSON format for Atom now has a `type` and `constructor` property (or method to call for as needed to deserialise).
- Removed `.into` support for now.
- Added JSON File Format and SPI to allow `Data.read` to work.
- Added `Data.fetch` API for easy Web download.
- Default visualization for JS Object trunctes, and made Vector default truncate children too.

Fixes defect where types with no constructor crashed on `to_json` (e.g. `Matching_Mode.Last.to_json`.
Adjusted default visualisation for Vector, so it doesn't serialise an array of arrays forever.
Likewise, JS_Object default visualisation is truncated to a small subset.

New convention:
- `.get` returns `Nothing` if a key or index is not present. Takes an `other` argument allowing control of default.
- `.at` error if key or index is not present.
- `Nothing` gains a `get` method allowing for easy propagation.
This commit is contained in:
James Dunkerley 2022-12-20 10:33:46 +00:00 committed by GitHub
parent dbe60d2466
commit ace459ed53
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
90 changed files with 1306 additions and 1944 deletions

View File

@ -262,6 +262,8 @@
- [Aligned `Text.match`/`Text.locate` API][3841]
- [Added `transpose` and `cross_tab` to the In-Memory Table.][3919]
- [Improvements to JSON, Pair, Statistics and other minor tweaks.][3964]
- [Overhauled the JSON support (now based of JavaScript), `Data.fetch` and other
minor tweaks][3987]
[debug-shortcuts]:
https://github.com/enso-org/enso/blob/develop/app/gui/docs/product/shortcuts.md#debug
@ -412,6 +414,7 @@
[3950]: https://github.com/enso-org/enso/pull/3950
[3964]: https://github.com/enso-org/enso/pull/3964
[3967]: https://github.com/enso-org/enso/pull/3967
[3987]: https://github.com/enso-org/enso/pull/3987
#### Enso Compiler

View File

@ -13,13 +13,13 @@ component-groups:
- Input:
exports:
- Standard.Base.System.File.File.new
- Standard.Base.Data.read_file
- Standard.Base.Data.read_text_file
- Standard.Base.Data.read
- Standard.Base.Data.read_text
- Standard.Base.Data.list_directory
- Web:
exports:
- Standard.Base.Network.HTTP.HTTP.new
- Standard.Base.Network.HTTP.HTTP.fetch
- Standard.Base.Data.fetch
- Parse:
exports:
- Standard.Base.Data.Json.Json.parse

View File

@ -299,7 +299,7 @@ type Any
example_map_error =
my_map = Map.empty
error = my_map.get "x"
error = my_map.at "x"
error.map_error (_ -> Example_Error_Type "x is missing")
map_error : (Error -> Error) -> Any
map_error self _ = self

View File

@ -1,9 +1,17 @@
import project.Any.Any
import project.Data.Pair.Pair
import project.Data.Text.Encoding.Encoding
import project.Data.Text.Text
import project.Data.Vector.Vector
import project.Error.Common.Type_Error
import project.Error.Error
import project.Error.File_Error.File_Error
import project.Error.Problem_Behavior.Problem_Behavior
import project.Network.HTTP.Header.Header
import project.Network.HTTP.HTTP
import project.Network.HTTP.HTTP_Method.HTTP_Method
import project.Network.HTTP.Request.Request
import project.Network.HTTP.Request_Error
import project.Nothing.Nothing
import project.System.File.File
import project.System.File_Format.Auto_Detect
@ -41,8 +49,8 @@ from project.Data.Boolean import Boolean, True, False
import Standard.Examples
example_xls_to_table = Data.read Examples.xls (Excel (Worksheet 'Dates'))
read_file : File_Format -> Problem_Behavior -> Any ! File_Error
read_file path format=Auto_Detect (on_problems=Problem_Behavior.Report_Warning) =
read : File_Format -> Problem_Behavior -> Any ! File_Error
read path format=Auto_Detect (on_problems=Problem_Behavior.Report_Warning) =
File.new path . read format on_problems
## Open and read the file at the provided `path`.
@ -64,9 +72,9 @@ read_file path format=Auto_Detect (on_problems=Problem_Behavior.Report_Warning)
import Standard.Base.Data
import Standard.Examples
example_read = Data.read_text_file Examples.csv_path
read_text_file : (Text | File) -> Encoding -> Problem_Behavior -> Text
read_text_file path (encoding=Encoding.utf_8) (on_problems=Problem_Behavior.Report_Warning) =
example_read = Data.read_text Examples.csv_path
read_text : (Text | File) -> Encoding -> Problem_Behavior -> Text
read_text path (encoding=Encoding.utf_8) (on_problems=Problem_Behavior.Report_Warning) =
File.new path . read_text encoding on_problems
## Lists files contained in the provided directory.
@ -122,3 +130,30 @@ read_text_file path (encoding=Encoding.utf_8) (on_problems=Problem_Behavior.Repo
list_directory : (File | Text) -> Text -> Boolean -> Vector File
list_directory directory name_filter=Nothing recursive=False =
File.new directory . list name_filter=name_filter recursive=recursive
## Fetches from the provided URL and returns the response body.
Will error if the status code does not represent a successful response.
Arguments:
- url: The URL to fetch.
- method: The HTTP method to use. Defaults to `GET`.
- headers: The headers to send with the request. Defaults to an empty vector.
- parse: If successful should the body be parsed to an Enso native object.
fetch : (URI | Text) -> HTTP_Method -> Vector (Header | Pair Text Text) -> Boolean -> Any
fetch uri method=HTTP_Method.Get headers=[] parse=True =
parsed_headers = headers . map h-> case h of
_ : Vector -> Header.new (h.at 0) (h.at 1)
_ : Pair -> Header.new (h.at 0) (h.at 1)
_ : Header -> h
_ -> Error.throw (Type_Error.Error Header "Invalid header type (must be a Pair, Vector or Header).")
request = Request.new method uri parsed_headers
response = HTTP.new.request request
if response.code.is_success.not then Error.throw (Request_Error.Error "Status Code" ("Request failed with status code: " + response.code + ". " + response.body.to_text)) else
response_headers = response.headers
content_type = response_headers.find h-> "Content-Type".equals_ignore_case h.name
if (parse == False) || (content_type == Nothing) then response else
format = Auto_Detect.get_web_parser content_type.value uri
if format == Nothing then response else
format.read_web response

View File

@ -1,223 +1,55 @@
import project.Any.Any
import project.Data.Json.Internal
import project.Data.Array.Array
import project.Data.Array_Proxy.Array_Proxy
import project.Data.Json.Extensions
import project.Data.Map.Map
import project.Data.Map.No_Value_For_Key
import project.Data.Numbers.Integer
import project.Data.Numbers.Number
import project.Data.Pair.Pair
import project.Data.Range.Extensions
import project.Data.Text.Extensions
import project.Data.Text.Text
import project.Data.Vector.Vector
import project.Error.Common.Type_Error
import project.Error.Error
import project.Error.Illegal_Argument.Illegal_Argument
import project.Error.No_Such_Key.No_Such_Key
import project.Meta
import project.Nothing.Nothing
import project.Panic.Panic
import project.Warning.Warning
from project.Data.Boolean import Boolean, True, False
polyglot java import java.lang.Exception as JException
## Represents a JSON structure.
## Methods for serializing from and to JSON.
type Json
## A smart constructor, building an object representation based on a vector
of key-value pairs.
## Parse a Text value into a `JS_Object` or an Enso primitive value (like `Text`, `Number`, `Boolean`, `Nothing`), or a `Vector` of values.
parse : Text -> JS_Object | Boolean | Number | Nothing | Text | Vector ! Invalid_JSON
parse json =
error_handler js_exception =
Error.throw (Invalid_JSON.Error js_exception.payload.message)
Arguments:
- contents: A vector of key-value pairs.
Panic.catch Any handler=error_handler <|
parsed = json_parse json
make_enso parsed
All values used as keys must define a `to_json_key : Text` method.
## Serialize an Object to JSON
stringify : (JS_Object | Boolean | Number | Nothing | Text | Vector) -> Text
stringify object =
json_stringify (make_javascript object.to_js_object)
> Example
The following code returns a JSON object, that after serialization becomes
{ "foo": 533, "bar": false }
import Standard.Base.Data.Json.Json
example_from_pairs = Json.from_pairs [["foo", 533], ["bar", False]]
from_pairs : Vector -> Json
from_pairs contents =
fs = contents.fold Map.empty map-> kv_pair->
key = kv_pair . at 0 . to_json_key
val = kv_pair . at 1 . to_json
map.insert key val
Json.Object fs
## Parses an RFC-8259 compliant JSON text into a `Json` structure.
Arguments:
- json_text: The RFC-8259-compliant JSON text.
> Example
Convert some text representing a JSON object into JSON.
import Standard.Base.Data.Json.Json
example_parse = Json.parse '{ "a": 1 }'
parse : Text -> Json ! Json_Parse_Error
parse json_text =
Panic.catch JException (Internal.parse_helper json_text) cause->
Error.throw (Json_Parse_Error.Error cause.payload.getMessage)
## A representation of a JSON object.
Arguments:
- fields: The fields of the JSON object.
Object fields
## A representation of a JSON array.
Arguments:
- items: The items in the JSON array.
Array items
## A representation of a JSON string.
Arguments:
- value: The text contained in the JSON string.
String value
## A representation of a JSON number.
Arguments:
- value: The number contained in the JSON number.
Number value
## A representation of a JSON boolean.
Arguments:
- value: The boolean contained in a JSON boolean.
Boolean value
## A representation of a JSON null.
Null
## Marshals this JSON into an arbitrary value described by
`type_descriptor`.
Arguments:
- type_descriptor: The type descriptor is a fully-applied type,
describing all required sub-types. It can either be an Atom or one of
the primitive types (`Number`, `Text`, `Boolean`, `Vector`).
> Example
The following shows an example of reading a nested JSON into a desired
type. It will return a vector of `Book` objects containing data from
`json_string`.
import Standard.Examples
type Book title author
type Author name year_of_birth
example_into =
json = Examples.json
format = (Vector.fill 1 (Book title=Text (Author name=Text year_of_birth=Number)))
json.into format
into : Any -> Any ! Marshalling_Error
into self type_descriptor =
Panic.recover Any (Internal.into_helper type_descriptor self)
## Returns an empty JSON object.
empty_object : Json
empty_object = Json.Object Map.empty
## Returns an empty JSON array.
empty_array : Json
empty_array = Json.Array []
## Returns this Json object.
This is a no-op on a JSON object, but is included to implement the
`to_json` interface.
to_json : Json
to_json self = self
## Renders this JSON into an RFC-8259 compliant text.
> Example
Convert some JSON to text.
import Standard.Examples
example_to_text = Examples.json.to_text
to_text : Text
to_text self = Internal.render_helper "" self
## Recursively unwraps the JSON value into primitive values.
> Example
Unwrap the JSON number 3 to the primitive number 3.
import Standard.Base.Data.Json.Json
example_unwrap = Json.Number 3 . unwrap
unwrap : Any
unwrap self = case self of
Json.Array its -> its.map .unwrap
Json.Boolean b -> b
Json.Number n -> n
Json.String t -> t
Json.Null -> Nothing
Json.Object f -> f.map .unwrap
## Gets the value associated with the given key in this object.
Throws `No_Such_Field` if the associated key is not defined.
Arguments:
- field: The name of the field from which to get the value.
> Example
Get the "title" field from this JSON representing a book.
import Standard.Examples
example_get = Examples.json_object.get "title"
get : Text -> Json ! No_Such_Field
get self field = self.get_or_else field (Error.throw (No_Such_Field.Error field))
## Gets the value associated with the given key in this object.
Arguments:
- field: The name of the field from which to get the value.
- other: The value to return if not present. Defaults to `Json.Null`.
get_or_else : Text -> Any -> Json
get_or_else self field ~other=Json.Null = case self of
Json.Object fields -> fields.get_or_else field other
Json.Null -> other
_ -> Error.throw (Illegal_Argument.Error "Json.get_or_else: self must be an Object.")
## Gets the value associated with the given index in this object.
Arguments:
- index: The index position from which to get the value.
at : Integer -> Json ! Illegal_Argument | Out_Of_Bounds
at self index = case self of
Json.Array array -> array.at index
Json.String text -> Json.String (text.at index)
Json.Null -> Json.Null
_ -> Error.throw (Illegal_Argument.Error "Json.at: self must be a String or an Array.")
## Gets the set of field names for an object.
field_names : Vector ! Illegal_Argument
field_names self = case self of
Json.Object map -> map.keys
_ -> Error.throw (Illegal_Argument.Error "Json.field_name: self must be an Object.")
## Returns the number of items in this array or object or length of string.
length : Integer ! Illegal_Argument
length self = case self of
Json.Array array -> array.length
Json.Object map -> map.size
Json.String text -> text.length
_ -> Error.throw (Illegal_Argument.Error "Json.length: self must be a String, an Object, or an Array.")
## Convert a Vector of Keys and Values to JSON.
Keys must be `Text` values.
from_pairs : Vector -> Text
from_pairs pairs =
JS_Object.from_pairs pairs . to_text
## UNSTABLE
A failure indicating malformed text input into the JSON parser.
Check the `message` field for detailed information on the specific failure.
type Json_Parse_Error
type Invalid_JSON
Error message
## PRIVATE
@ -227,19 +59,6 @@ type Json_Parse_Error
to_display_text self =
"Parse error in parsing JSON: " + self.message.to_text + "."
## UNSTABLE
An error indicating that there is no such field in the JSON object.
type No_Such_Field
Error field_name
## PRIVATE
Pretty prints the no such field error.
to_display_text : Text
to_display_text self =
"The field " + self.field_name.to_text + " is not present in this object."
## UNSTABLE
A failure indicating the inability to marshall a `Json` object into the
@ -283,3 +102,169 @@ type Marshalling_Error
"Type mismatch error: the json with type `" + json_text + "` did not match the format `" + format_text + "`."
Marshalling_Error.Missing_Field _ field _ ->
"Missing field in Json: the field `" + field.to_text "` was missing in the json."
type JS_Object
## Creates a JS_Object from a list of key-value pairs.
Keys must be `Text` values.
Values will be recursively converted to JSON serializable as needed.
from_pairs : Vector -> JS_Object
from_pairs pairs =
js_object = pairs.fold new_object current->pair->
case pair.at 0 of
text : Text ->
set_value current text (pair.at 1).to_js_object
current
_ -> Error.throw (Illegal_Argument.Error "JS_Object.from_pairs: key must be a Text value")
JS_Object.Value js_object
## PRIVATE
Value js_object
## Get a value for a key of the object, or a default value if that key is not present.
Arguments:
- key: The key to get.
- if_missing: The value to return if the key is not found.
get : Text -> Any -> JS_Object | Boolean | Number | Nothing | Text | Vector
get self key ~if_missing=Nothing =
if (has_property self.js_object key) . not then if_missing else
value = get_value self.js_object key
make_enso value
## Get a value for a key of the object.
If the key is not found, throws a `No_Such_Key` error.
Arguments:
- key: The key to get.
at : Text -> JS_Object | Boolean | Number | Nothing | Text | Vector ! No_Such_Key
at self key = self.get key (Error.throw (No_Such_Key.Error self key))
## Get the keys of the object.
field_names : Vector
field_names self =
Vector.from_polyglot_array (get_property_names self.js_object)
## Gets the number of keys in the object.
length : Number
length self =
get_property_names self.js_object . length
## Convert the object to a Vector of Pairs.
to_vector : Vector
to_vector self =
keys = get_property_names self.js_object
proxy = Array_Proxy.new keys.length (i-> Pair.new (keys.at i) (self.get (keys.at i)))
Vector.from_polyglot_array proxy
## Convert the object to a JS_Object.
to_js_object : JS_Object
to_js_object self = self
## Convert to a Text.
to_text : Text
to_text self = Json.stringify self
## Convert to a JSON representation.
to_json : Text
to_json self = self.to_text
## Checks if this JS_Object is equal to another JS_Object.
Arguments:
- that: The map to compare `self` to.
== : JS_Object -> Boolean
== self that = case that of
_ : JS_Object ->
self_keys = self.field_names
that_keys = that.field_names
self_keys.length == that_keys.length && self_keys.all key->
(self.get key == that.at key).catch No_Such_Key.Error _->False
_ -> False
## UNSTABLE
Transform the vector into text for displaying as part of its default
visualization.
to_default_visualization_data : Text
to_default_visualization_data self =
render self
## PRIVATE
Render the JS_Object to Text with truncated depth.
render object depth=0 max_depth=5 max_length=100 = case object of
_ : JS_Object ->
if depth == max_depth then '"{...}"' else
len = object.length
keys = object.field_names.take max_length
children = keys.map key-> key.to_json + ': ' + (render (object.get key) (depth+1) max_depth max_length)
'{' + (children.join ", ") + (if len > max_length then ', "...": "Another ' + (len - max_length).to_text + ' items truncated."' else "") + '}'
_ : Vector ->
if depth == max_depth then '"[...]"' else
len = object.length
children = 0.up_to (max_length.min len) . map i-> render (object.at i) (depth+1) max_depth max_length
"[" + (children.join ", ") + (if len > max_length then ', "... another ' + (len - max_length).to_text + ' items truncated."' else "") + "]"
_ : Array ->
proxy = Array_Proxy.new object.length (i-> make_enso (object.at i))
@Tail_Call render (Vector.from_polyglot_array proxy) depth max_depth max_length
Nothing -> object.to_json
_ : Text -> object.to_json
_ : Boolean -> object.to_json
_ : Number -> object.to_json
_ -> @Tail_Call render (JS_Object.Value object) depth max_depth max_length
## PRIVATE
Internal function ensuring that a JavaScript object is presented appropriately to Enso.
js_object must be a Native JS Object, Text, Boolean, Nothing, Number, Vector, Array, JS_Object.
Children of these types will be recursively passed through this function.
make_enso js_object =
case js_object of
Nothing -> Nothing
_ : Text -> js_object
_ : Boolean -> js_object
_ : Number -> js_object
_ : Vector ->
proxy = Array_Proxy.new js_object.length (i-> make_enso (js_object.at i))
Vector.from_polyglot_array proxy
_ : Array ->
proxy = Array_Proxy.new js_object.length (i-> make_enso (js_object.at i))
Vector.from_polyglot_array proxy
_ : JS_Object -> js_object
_ -> JS_Object.Value js_object
## PRIVATE
Internal function to convert any JS_Objects into their native JS objects before passing to JS.
enso_object must be Text, Boolean, Nothing, Number, Vector, Array, JS_Object (and children as well).
make_javascript enso_object =
## Have to remove warnings before passing into JavaScript otherwise will be an Object.
if Warning.get_all enso_object != [] then make_javascript (Warning.set enso_object []) else
case enso_object of
_ : JS_Object ->
enso_object.field_names.fold new_object current->key->
value = enso_object.get key
js_value = make_javascript value
set_value current key js_value
current
_ : Vector -> enso_object.map make_javascript
_ : Array -> Vector.from_polyglot_array enso_object . map make_javascript
_ -> enso_object
foreign js new_object = """
return {}
foreign js json_parse text = """
return JSON.parse(text)
foreign js json_stringify js_object = """
return JSON.stringify(js_object)
foreign js has_property js_object key = """
return js_object.hasOwnProperty(key)
foreign js get_value object key = """
return object[key]
foreign js set_value object key value = """
object[key] = value
foreign js get_property_names object = """
return Object.getOwnPropertyNames(object)

View File

@ -1,11 +1,17 @@
import project.Any.Any
import project.Data.Array.Array
import project.Data.Array_Proxy.Array_Proxy
import project.Data.Json.Json
import project.Data.Json.JS_Object
import project.Data.Locale.Locale
import project.Data.Map.Map
import project.Data.Numbers.Decimal
import project.Data.Numbers.Integer
import project.Data.Numbers.Number
import project.Data.Range.Extensions
import project.Data.Text.Extensions
import project.Data.Text.Text
import project.Data.Text.Text_Sub_Range.Text_Sub_Range
import project.Data.Vector.Vector
import project.Error.Error
import project.Meta
@ -13,124 +19,114 @@ import project.Nothing.Nothing
from project.Data.Boolean import Boolean, True, False
## ALIAS To JSON
## Converts the given value to a JSON serialized value.
Any.to_json : Text
Any.to_json self = Json.stringify self
Generically converts an atom into a JSON object.
## Converts the given value to a JSON serialized value.
Error.to_json : Text
Error.to_json self = self.to_js_object.to_text
The input atom is converted into a JSON object, with a `"type"` field set to
the atom's type name and all other fields serialized with their name as
object key and the value as the object value.
## Converts the given value to a JSON serializable object.
For Nothing, booleans, numbers and strings, this is the value itself.
For arrays or vectors, the elements are converted recursively.
For atoms and maps, the object is converted to a JS_Object.
Text.to_js_object : JS_Object | Text
Text.to_js_object self = case self of
Text -> JS_Object.from_pairs [["type", "Text"]]
_ -> self
> Example
Convert a vector to JSON.
[1, 2, 3, 4].to_json
Any.to_json : Json
Any.to_json self =
## Converts the given value to a JSON serializable object.
For Nothing, booleans, numbers and strings, this is the value itself.
For arrays or vectors, the elements are converted recursively.
For atoms and maps, the object is converted to a JS_Object.
Number.to_js_object : JS_Object | Number
Number.to_js_object self = case self of
Number -> JS_Object.from_pairs [["type", "Number"]]
Integer -> JS_Object.from_pairs [["type", "Integer"]]
Decimal -> JS_Object.from_pairs [["type", "Decimal"]]
_ -> self
## Converts the given value to a JSON serializable object.
For Nothing, booleans, numbers and strings, this is the value itself.
For arrays or vectors, the elements are converted recursively.
For atoms and maps, the object is converted to a JS_Object.
Boolean.to_js_object : JS_Object | Boolean
Boolean.to_js_object self = case self of
Boolean -> JS_Object.from_pairs [["type", "Boolean"]]
_ -> self
## Converts the given value to a JSON serializable object.
For Nothing, booleans, numbers and strings, this is the value itself.
For arrays or vectors, the elements are converted recursively.
For atoms and maps, the object is converted to a JS_Object.
Nothing.to_js_object : Nothing
Nothing.to_js_object self = self
## Converts the given value to a JSON serializable object.
For Nothing, booleans, numbers and strings, this is the value itself.
For arrays or vectors, the elements are converted recursively.
For atoms and maps, the object is converted to a JS_Object.
Array.to_js_object : Vector
Array.to_js_object self =
proxy = Array_Proxy.new self.length i-> self.at i . to_js_object
Vector.from_polyglot_array proxy
## Converts the given value to a JSON serializable object.
For Nothing, booleans, numbers and strings, this is the value itself.
For arrays or vectors, the elements are converted recursively.
For atoms, the object is converted to a JS_Object.
Vector.to_js_object : Vector
Vector.to_js_object self =
proxy = Array_Proxy.new self.length i-> self.at i . to_js_object
Vector.from_polyglot_array proxy
## Converts the given value to a JSON serializable object.
For Nothing, booleans, numbers and strings, this is the value itself.
For arrays or vectors, the elements are converted recursively.
For atoms, the object is converted to a JS_Object.
Any.to_js_object : JS_Object
Any.to_js_object self =
m = Meta.meta self
case m of
_ : Meta.Atom ->
type_pair = ["type", Meta.type_of self . to_text]
cons = m.constructor
fs = m.fields
fnames = cons.fields
json_fs = 0.up_to fnames.length . fold Map.empty m-> i->
m.insert (fnames.at i) (fs.at i . to_json)
with_tp = json_fs . insert "type" (Json.String cons.name)
Json.Object with_tp
java_cons = cons.value ...
if java_cons == self then JS_Object.from_pairs [type_pair] else
fs = m.fields
field_names = cons.fields
builder = Vector.new_builder field_names.length+2
builder.append type_pair
builder.append ["constructor", cons.name]
0.up_to field_names.length . map i-> builder.append [field_names.at i, fs.at i . to_js_object]
JS_Object.from_pairs builder.to_vector
_ : Meta.Constructor ->
Json.Object (Map.empty . insert "type" (Json.String m.name))
type_name = Meta.get_qualified_type_name self . split '.' . at -2
JS_Object.from_pairs [["type", type_name], ["constructor", m.name]]
_ -> Error.throw ("Cannot convert " + self.to_text + " to JSON")
## The following two cases cannot be handled generically and should
instead define their own `to_json` implementations.
_ : Meta.Polyglot -> Json.Null
_ : Meta.Primitive -> Json.Null
## UNSTABLE
Returns a JSON representation of the dataflow error.
> Example
Converting a dataflow error to JSON.
import Standard.Examples
example_to_json = Examples.throw_error.to_json
Error.to_json : Json
Error.to_json self =
error_type = ["type", "Error"]
## Converts the given value to a JSON serializable object.
For Nothing, booleans, numbers and strings, this is the value itself.
For arrays or vectors, the elements are converted recursively.
For atoms, the object is converted to a JS_Object.
Error.to_js_object : JS_Object
Error.to_js_object self =
caught = self.catch
error_content = ["content", caught.to_json]
error_type = ["type", "Error"]
error_content = ["content", caught]
error_message = ["message", caught.to_display_text]
Json.from_pairs [error_type, error_content, error_message]
JS_Object.from_pairs [error_type, error_content, error_message]
## Text to JSON conversion.
> Example
Convert the text "Hello World!" to JSON.
"Hello World!".to_json
> Example
Convert the text "cześć" to JSON.
"cześć".to_json
Text.to_json : Json
Text.to_json self = Json.String self
## Method used by object builders to convert a value into a valid JSON key.
> Example
Ensure that the text "foo" is a JSON key.
"foo".to_json_key
Text.to_json_key : Text
Text.to_json_key self = self
## Convert a boolean to JSON.
> Example
Convert `True` to JSON.
True.to_json
Boolean.to_json : Json
Boolean.to_json self = Json.Boolean self
## Convert `Nothing` to JSON.
> Example
Convert `Nothing` to JSON.
Nothing.to_json
Nothing.to_json : Json
Nothing.to_json self = Json.Null
## Number to JSON conversion.
> Example
Convert the number 8 to JSON.
8.to_json
Number.to_json : Json
Number.to_json self = Json.Number self
## Vector to JSON conversion.
> Example
Convert a vector of numbers to JSON.
[1, 2, 3].to_json
Vector.to_json : Json
Vector.to_json self = Json.Array (self.map .to_json)
## A Locale to Json conversion
> Example
Convert the default locale to JSON.
import Standard.Base.Data.Locale.Locale
example_to_json = Locale.default.to_json
Locale.to_json : Json
Locale.to_json self =
b = Vector.new_builder
## Converts the given value to a JSON serializable object.
Custom serialization for Locale, serializes the language, country and variant.
Locale.to_js_object : JS_Object
Locale.to_js_object self =
b = Vector.new_builder 5
b.append ["type", "Locale"]
if self.language.is_nothing.not then b.append ["language", self.language]
if self.country.is_nothing.not then b.append ["country", self.country]
if self.variant.is_nothing.not then b.append ["variant", self.variant]
Json.from_pairs b.to_vector
b.append ["constructor", "new"]
b.append ["language", self.language]
b.append ["country", self.country]
b.append ["variant", self.variant]
JS_Object.from_pairs b.to_vector

View File

@ -1,338 +0,0 @@
import project.Any.Any
import project.Data.Json.Json
import project.Data.Json.Extensions
import project.Data.Json.Marshalling_Error
import project.Data.List.List
import project.Data.Map.Map
import project.Data.Numbers.Decimal
import project.Data.Numbers.Integer
import project.Data.Numbers.Number
import project.Data.Text.Text
import project.Data.Vector.Vector
import project.Meta
import project.Nothing.Nothing
import project.Panic.Panic
import project.Runtime.Ref.Ref
from project.Data.Boolean import Boolean, True, False
polyglot java import org.enso.base.json.Parser
polyglot java import org.enso.base.json.Printer
## PRIVATE
A JSON parser event consumer, passed to the Java parser backend.
Conforms to the `org.enso.base.json.Parser.JsonConsumer` Java interface.
type Consumer
## PRIVATE
Creates a new top-level consumer.
new : Consumer
new =
child = Ref.new List.Nil
val = Ref.new Nothing
Consumer.Value child val
## PRIVATE
A JSON parser event consumer, passed to the Java parser backend.
Arguments:
- child_consumer: The child consumer of this.
- value: The value being consumed.
Conforms to the `org.enso.base.json.Parser.JsonConsumer` Java interface.
Value child_consumer value
## PRIVATE
A helper for handling "value emitted" events.
Arguments:
- v: The value to act upon.
on_value : Any -> Nothing
on_value self v = case self.child_consumer . get of
List.Nil -> self.value . put v
cons -> cons.on_value v
## PRIVATE
Closes the child consumer and either sets the current consumer to its
parent, or takes its returned value as the final result of parsing.
seal_child : Nothing
seal_child self =
child = self.child_consumer.get
val = child.seal
case child.parent of
List.Nil ->
self.value . put val
p ->
self.child_consumer . put p
p.on_value val
## PRIVATE
Consumes the `start_object` event.
on_start_object : Nothing
on_start_object self =
parent = self.child_consumer . get
self.child_consumer . put (Object_Consumer.new parent)
## PRIVATE
Consumes the `key` event.
Arguments:
- k: The key to act upon.
on_key : Text -> Nothing
on_key self k = self.child_consumer . get . on_key k
## PRIVATE
Consumes the `end_object` event.
on_end_object : Nothing
on_end_object self = self.seal_child
## PRIVATE
Consumes the `start_array` event.
on_start_array : Nothing
on_start_array self =
parent = self.child_consumer . get
self.child_consumer . put (Array_Consumer.new parent)
## PRIVATE
Consumes the `end_array` event.
on_end_array : Nothing
on_end_array self = self.seal_child
## PRIVATE
Consumes the `long` event.
Arguments:
- n: The long value to process.
on_long : Integer -> Nothing
on_long self n = self.on_value (Json.Number n)
## PRIVATE
Consumes the `double` event.
Arguments:
- n: The double value to process.
on_double : Decimal -> Nothing
on_double self n = self.on_value (Json.Number n)
## PRIVATE
Consumes the `string` event.
Arguments:
- s: The string value to process.
on_string : Text -> Nothing
on_string self s = self.on_value (Json.String s)
## PRIVATE
Consumes the `true` event.
on_true : Nothing
on_true self = self.on_value (Json.Boolean True)
## PRIVATE
Consumes the `false` event.
on_false : Nothing
on_false self = self.on_value (Json.Boolean False)
## PRIVATE
Consumes the `null` event.
on_null : Nothing
on_null self = self.on_value Json.Null
## PRIVATE
A child consumer, used to process events inside arrays.
type Array_Consumer
## PRIVATE
Creates a new array consumer with the given parent.
Arguments:
- parent: The parent of the new consumer.
new : Any -> Array_Consumer
new parent =
Array_Consumer.Value Vector.new_builder parent
## PRIVATE
A child consumer, used to process events inside arrays.
Arguments:
- builder: The builder for array values.
- parent: The parent consumer.
Value builder parent
## PRIVATE
Consumes a value.
Arguments:
- v: The value to process.
on_value : Any -> Nothing
on_value self v = self.builder.append v
## PRIVATE
Returns the final value built by this consumer.
seal : Json
seal self =
vec = self.builder.to_vector
Json.Array vec
## PRIVATE
A child consumer, used to process events inside objects.
type Object_Consumer
## PRIVATE
Creates a new object consumer with the given parent.
Arguments:
- parent: The parent of the new consumer.
new : Any -> Object_Consumer
new parent =
k = Ref.new ""
m = Ref.new Map.empty
Object_Consumer.Value k m parent
## PRIVATE
A child consumer, used to process events inside objects.
Arguments:
- last_key: The last object key that has been seen.
- map: The map representing the object.
- parent: The parent consumer.
Value last_key map parent
## PRIVATE
Consumes a key.
Arguments:
- k: The key to process.
on_key : Text -> Nothing
on_key self k = self.last_key . put k
## PRIVATE
Consumes a value.
Arguments:
- v: The value to process.
on_value : Any -> Nothing
on_value self v =
k = self.last_key . get
m = self.map . get
new_m = m.insert k v
self.map . put new_m
## PRIVATE
Returns the final value built by this consumer.
seal : Json
seal self =
m = self.map . get
Json.Object m
## PRIVATE
A helper method for printing JSON values to Text.
Arguments:
- builder: An accumulator for text.
- json: The json value being converted to text.
render_helper : Text -> Json -> Text
render_helper builder json = case json of
Json.Object fields ->
r = Ref.new ""
render_key_value acc key value =
separator = r . get
r.put ","
val = render_helper "" value
acc + separator + (Printer.json_escape key) + ":" + val
arr = fields.fold_with_key "" render_key_value
builder + "{" + arr + "}"
Json.Array items ->
r = Ref.new ""
render_array_element acc element =
separator = r.get
r.put ","
val = render_helper "" element
acc + separator + val
arr = items.fold "" render_array_element
builder + "[" + arr + "]"
Json.String value ->
builder + (Printer.json_escape value)
Json.Number value ->
builder + (if value.is_nan || value.is_infinite then "null" else value.to_text)
Json.Boolean value ->
val = if value then "true" else "false"
builder + val
Json.Null ->
builder + "null"
## PRIVATE
A helper method for converting JSON objects into arbitrary types.
Arguments:
- fmt: The format to convert the JSON into.
- json: The JSON being converted.
See `Json.into` for semantics documentation.
into_helper : Any -> Json -> Any
into_helper fmt json = case fmt of
_ : Vector -> case json of
Json.Array items -> items.map (into_helper (fmt.at 0))
_ -> Panic.throw (Marshalling_Error.Type_Mismatch json fmt)
_ : Boolean -> case json of
Json.Boolean v -> v
_ -> Panic.throw (Marshalling_Error.Type_Mismatch json fmt)
_ : Number -> case json of
Json.Number v -> v
_ -> Panic.throw (Marshalling_Error.Type_Mismatch json fmt)
_ : Text -> case json of
Json.String v -> v
_ -> Panic.throw (Marshalling_Error.Type_Mismatch json fmt)
_ ->
m = Meta.meta fmt
case m of
_ : Meta.Atom -> case json of
Json.Object json_fields ->
cons = m.constructor
field_names = cons.fields
field_formats = m.fields
field_values = field_names.zip field_formats n-> inner_format->
field_json = json_fields . get n . catch Any _->
Panic.throw (Marshalling_Error.Missing_Field json fmt n)
into_helper inner_format field_json
cons.new field_values
_ -> Panic.throw (Marshalling_Error.Type_Mismatch json fmt)
_ -> Panic.throw (Marshalling_Error.Type_Mismatch json fmt)
## PRIVATE
A helper used to parse text into a JSON value.
Arguments:
- json_text: The textual representation of the JSON.
parse_helper : Text -> Json
parse_helper json_text =
consumer = Consumer.new
Parser.parse json_text consumer
consumer.value . get

View File

@ -6,6 +6,7 @@ import project.Data.Pair.Pair
import project.Data.Text.Text
import project.Data.Vector.Vector
import project.Error.Error
import project.Error.No_Such_Key.No_Such_Key
import project.Nothing.Nothing
from project.Data.Boolean import Boolean, True, False
@ -185,7 +186,7 @@ type Map
insert self key value = Internal.insert self key value
## Gets the value associated with `key` in this map, or throws a
`No_Value_For_Key_Error` if `key` is not present.
`No_Such_Key.Error` if `key` is not present.
Arguments:
- key: The key to look up in the map.
@ -196,17 +197,16 @@ type Map
import Standard.Base.Data.Map.Map
import Standard.Examples
example_get = Examples.map.get 1
get : Any -> Any ! No_Value_For_Key
get self key =
self.get_or_else key (Error.throw (No_Value_For_Key.Error key))
example_at = Examples.map.at 1
at : Any -> Any ! No_Such_Key
at self key = self.get key (Error.throw (No_Such_Key.Error self key))
## Gets the value associated with `key` in this map, or returns `other` if
it isn't present.
## Gets the value associated with `key` in this map, or returns
`if_missing` if it isn't present.
Arguments:
- key: The key to look up in the map.
- other: The value to use if the key isn't present.
- if_missing: The value to use if the key isn't present.
> Example
Get the value for the key 2 in a map or instead return "zero" if it
@ -215,11 +215,11 @@ type Map
import Standard.Base.Data.Map.Map
import Standard.Examples
example_get_or_else = Examples.map.get_or_else 2 "zero"
get_or_else : Any -> Any -> Any
get_or_else self key ~other =
example_get = Examples.map.get 2 "zero"
get : Any -> Any -> Any
get self key ~if_missing=Nothing =
go map = case map of
Map.Tip -> other
Map.Tip -> if_missing
Map.Bin _ k v l r -> case Internal.compare_allow_nothing key k of
Ordering.Equal -> v
Ordering.Less -> @Tail_Call go l
@ -465,19 +465,3 @@ type Map
Map.Bin _ k v _ r -> @Tail_Call last (Pair.new k v) r
Map.Tip -> p
last Nothing self
## UNSTABLE
An error for getting a missing value from a map.
Arguments:
- key: The key that was asked for.
type No_Value_For_Key
Error key
## PRIVATE
Converts the error into a human-readable representation.
to_display_text : Text
to_display_text self =
"The map contained no value for the key " + self.key.to_text + "."

View File

@ -23,6 +23,14 @@ type Encoding
all_encodings =
Encoding.all_character_sets.map Encoding.Value
## Create an Encoding from a Text.
from_name : Text -> Encoding ! Illegal_Argument
from_name name =
handler = _-> Error.throw (Illegal_Argument.Error ("Unknown Character Set: " + name))
Panic.catch UnsupportedCharsetException handler=handler <|
charset = Charset.forName name
Encoding.Value charset.name
## Create a new Encoding object.
Arguments:

View File

@ -546,7 +546,7 @@ Text.bytes self encoding on_problems=Problem_Behavior.Report_Warning =
Get the ASCII bytes of the text "Hello".
"Hello".bytes (Encoding.ascii)
Text.from_bytes : Vector Byte -> Encoding -> Text
Text.from_bytes : Vector Byte -> Encoding -> Problem_Behavior -> Text
Text.from_bytes bytes encoding on_problems=Problem_Behavior.Report_Warning =
result = Encoding_Utils.from_bytes bytes.to_array (encoding . to_java_charset)
if result.warnings.is_nothing then result.result else

View File

@ -1,5 +1,5 @@
import project.Any.Any
import project.Data.Json.Json
import project.Data.Json.JS_Object
import project.Data.Locale.Locale
import project.Data.Numbers.Integer
import project.Data.Ordering.Ordering
@ -539,14 +539,17 @@ type Date
_ ->
Error.throw (Illegal_Argument.Error "Illegal period argument")
## A Date to Json conversion.
## Convert to a JS_Object representing this Date.
> Example
Convert the current date to JSON.
Convert the current date to a JS_Object.
example_to_json = Date.now.to_json
to_json : Json
to_json self = Json.from_pairs [["type", "Date"], ["day", self.day], ["month", self.month], ["year", self.year]]
example_to_json = Date.now.to_js_object
to_js_object : JS_Object
to_js_object self =
type_pair = ["type", "Date"]
cons_pair = ["constructor", "new"]
JS_Object.from_pairs [type_pair, cons_pair, ["day", self.day], ["month", self.month], ["year", self.year]]
## Format this date using the provided format specifier.

View File

@ -1,5 +1,5 @@
import project.Any.Any
import project.Data.Json.Json
import project.Data.Json.JS_Object
import project.Data.Locale.Locale
import project.Data.Numbers.Integer
import project.Data.Ordering.Ordering
@ -595,16 +595,17 @@ type Date_Time
to_text : Text
to_text self = @Builtin_Method "Date_Time.to_text"
## Convert the time to JSON.
## Convert to a JavaScript Object representing a Date_Time.
> Example
Convert the current time to JSON.
Convert the current time to a JS_Object.
from Standard.Base import Date_Time
example_to_json = Date_Time.now.to_json
to_json : Json
to_json self = Json.from_pairs [["type", "Date_Time"], ["year", self.year], ["month", self.month], ["day", self.day], ["hour", self.hour], ["minute", self.minute], ["second", self.second], ["nanosecond", self.nanosecond], ["zone", self.zone]]
example_to_json = Date_Time.now.to_js_object
to_js_object : JS_Object
to_js_object self =
type_pair = ["type", "Date_Time"]
cons_pair = ["constructor", "new"]
JS_Object.from_pairs [type_pair, cons_pair, ["year", self.year], ["month", self.month], ["day", self.day], ["hour", self.hour], ["minute", self.minute], ["second", self.second], ["nanosecond", self.nanosecond], ["zone", self.zone]]
## Format this time as text using the specified format specifier.

View File

@ -1,5 +1,5 @@
import project.Any.Any
import project.Data.Json.Json
import project.Data.Json.JS_Object
import project.Data.Numbers.Integer
import project.Data.Ordering.Ordering
import project.Data.Pair.Pair
@ -300,24 +300,23 @@ type Duration
to_vector : Vector Integer
to_vector self = [self.hours, self.minutes, self.seconds, self.milliseconds, self.nanoseconds]
## A Duration to Json conversion.
## Convert to a JavaScript Object representing a Duration.
> Example
Convert a duration of 10 seconds to Json.
Convert a duration of 10 seconds to a JS_Object.
import Standard.Base.Data.Time.Duration
example_to_json = (Duration.new seconds=10).to_json
to_json : Json
to_json self =
b = Vector.new_builder
example_to_json = (Duration.new seconds=10).to_js_object
to_js_object : JS_Object
to_js_object self =
b = Vector.new_builder 7
b.append ["type", "Duration"]
b.append ["constructor", "new"]
if self.hours==0 . not then b.append ["hours", self.hours]
if self.minutes==0 . not then b.append ["minutes", self.minutes]
if self.seconds==0 . not then b.append ["seconds", self.seconds]
if self.milliseconds==0 . not then b.append ["milliseconds", self.milliseconds]
if self.nanoseconds==0 . not then b.append ["nanoseconds", self.nanoseconds]
Json.from_pairs b.to_vector
JS_Object.from_pairs b.to_vector
## Check if this duration represents an empty time-span.

View File

@ -1,5 +1,5 @@
import project.Any.Any
import project.Data.Json.Json
import project.Data.Json.JS_Object
import project.Data.Locale.Locale
import project.Data.Numbers.Integer
import project.Data.Ordering.Ordering
@ -296,16 +296,17 @@ type Time_Of_Day
to_text : Text
to_text self = @Builtin_Method "Time_Of_Day.to_text"
## A Time_Of_Day to Json conversion.
## Convert to a JavaScript Object representing this Time_Of_Day.
> Example
Convert the current time to JSON.
Convert the current time to a JS_Object.
from Standard.Base import Time_Of_Day
example_to_json = Time_Of_Day.now.to_text
to_json : Json
to_json self = Json.from_pairs [["type", "Time_Of_Day"], ["hour", self.hour], ["minute", self.minute], ["second", self.second], ["nanosecond", self.nanosecond]]
example_to_json = Time_Of_Day.now.to_js_object
to_js_object : JS_Object
to_js_object self =
type_pair = ["type", "Time_Of_Day"]
cons_pair = ["constructor", "new"]
JS_Object.from_pairs [type_pair, cons_pair, ["hour", self.hour], ["minute", self.minute], ["second", self.second], ["nanosecond", self.nanosecond]]
## Format this time of day using the provided formatter pattern.

View File

@ -1,5 +1,5 @@
import project.Any.Any
import project.Data.Json.Json
import project.Data.Json.JS_Object
import project.Data.Numbers.Integer
import project.Data.Text.Text
import project.Error.Error
@ -111,7 +111,7 @@ type Time_Zone
This method parses the ID producing a `Time_Zone`.
Arguments:
- text: The text representing a zone identifier.
- id: The text representing a zone identifier.
> Example
Get Central European Time.
@ -141,9 +141,9 @@ type Time_Zone
example_parse = Time_Zone.parse "+03:02:01"
parse : Text -> Time_Zone ! Time_Error
parse text =
parse id =
Panic.catch JException handler=(catch -> Error.throw (Time_Error.Error catch.payload.getMessage)) <|
parse_builtin text
parse_builtin id
## Get the unique timezone ID.
@ -156,16 +156,19 @@ type Time_Zone
zone_id : Text
zone_id self = @Builtin_Method "Time_Zone.zone_id"
## Convert the time zone to JSON.
## Convert to a JavaScript Object representing this Time_Zone.
> Example
Convert your system's current timezone to JSON.
Convert your system's current timezone to a JS_Object.
from Standard.Base import Time_Zone
example_to_json = Time_Zone.system.to_json
to_json : Json
to_json self = Json.from_pairs [["type", "Time_Zone"], ["id", self.zone_id]]
example_to_json = Time_Zone.system.to_js_object
to_js_object : JS_Object
to_js_object self =
type_pair = ["type", "Time_Zone"]
cons_pair = ["constructor", "new"]
JS_Object.from_pairs [type_pair, cons_pair, ["id", self.zone_id]]
## Compares two Zones for equality.
== : Time_Zone -> Boolean

View File

@ -166,6 +166,20 @@ type Vector a
at : Integer -> Any ! Index_Out_Of_Bounds
at self index = @Builtin_Method "Vector.at"
## Gets an element from the vector at a specified index (0-based).
If the index is invalid then `if_missing` is returned.
Arguments:
- index: The location in the vector to get the element from. The index is
also allowed be negative, then the elements are indexed from the back
of the vector, i.e. -1 will correspond to the last element.
- if_missing: The value to return if the index is out of bounds.
get : Integer -> Any -> Any
get self index ~if_missing=Nothing =
len = self.length
if index < -len || index >= len then if_missing else
self.at index
## Combines all the elements of the vector, by iteratively applying the
passed function with next elements of the vector.
@ -936,7 +950,7 @@ type Vector a
result = self.fold Map.empty existing->
item->
key = on item
if (existing.get_or_else key False) then existing else
if (existing.get key False) then existing else
builder.append item
existing.insert key True
if result.is_error then result else builder.to_vector

View File

@ -110,7 +110,7 @@ type Error
example_map_error =
map = Examples.map
map.get 10 . map_error (_ -> "The element 10 was not found.")
map.at 10 . map_error (_ -> "The element 10 was not found.")
map_error : (Error -> Error) -> Any
map_error self f = self.catch Any (x -> Error.throw (f x))

View File

@ -27,7 +27,7 @@ type Syntax_Error
Arguments:
- message: A description of the erroneous syntax.
Error message
Error message
@Builtin_Type
type Type_Error

View File

@ -0,0 +1,18 @@
import project.Data.Text.Text
type No_Such_Key
## UNSTABLE
An error for getting a missing value from a map.
Arguments:
- object: The object that was being accessed.
- key: The key that was asked for.
Error object key
## PRIVATE
Converts the error into a human-readable representation.
to_display_text : Text
to_display_text self =
"The object does not contain the key " + self.key.to_text + "."

View File

@ -61,15 +61,16 @@ export project.Warning.Warning
from project.Data.Boolean export Boolean, True, False
from project.Function export all
from project.Data.Numbers export Number, Integer, Decimal
from project.System.File_Format export File_Format, Plain_Text_Format, Plain_Text, Bytes, Infer, Auto_Detect
from project.System.File_Format export File_Format, Plain_Text_Format, Plain_Text, Bytes, Infer, Auto_Detect, JSON_File
import project.Data
import project.Data.Filter_Condition.Filter_Condition
import project.Data.Index_Sub_Range.Index_Sub_Range
import project.Data.Interval.Bound
import project.Data.Interval.Interval
import project.Data.Json.Json
import project.Data.Json.Extensions
import project.Data.Json.Json
import project.Data.Json.JS_Object
import project.Data.Locale.Locale
import project.Data.Maybe.Maybe
import project.Data.Noise
@ -119,8 +120,9 @@ export project.Data.Filter_Condition.Filter_Condition
export project.Data.Index_Sub_Range.Index_Sub_Range
export project.Data.Interval.Bound
export project.Data.Interval.Interval
export project.Data.Json.Json
export project.Data.Json.Extensions
export project.Data.Json.Json
export project.Data.Json.JS_Object
export project.Data.Locale.Locale
export project.Data.Maybe.Maybe
export project.Data.Ordering.Natural_Order

View File

@ -1,5 +1,4 @@
import project.Any.Any
import project.Data.Json.Json
import project.Data.Pair.Pair
import project.Data.Text.Text
import project.Data.Time.Duration.Duration
@ -261,15 +260,15 @@ type HTTP
> Example
Send a Post request with json data. NOTE: This example will make a network
request.
Send a Post request with json data.
NOTE: This example will make a network request.
import Standard.Examples
example_post_json =
json = Json.parse '{"key":"val"}'
json = '{"key":"val"}'
Examples.http_client.post_json "http://httpbin.org/post" json
post_json : (Text | URI) -> (Text | Json) -> Vector -> Response ! Request_Error
post_json : (Text | URI) -> Text -> Vector -> Response ! Request_Error
post_json self uri body_json (headers = []) =
new_headers = [Header.application_json]
req = Request.post uri (Request_Body.Json body_json) headers . with_headers new_headers
@ -305,15 +304,15 @@ type HTTP
- headers: Any headers for the options request.
> Example
Send a Put request with json data. NOTE: This example will make a
network request.
Send a Put request with json data.
NOTE: This example will make a network request.
import Standard.Examples
example_post_json =
json = Json.parse '{"key":"val"}'
json = '{"key":"val"}'
Examples.http_client.put_json "http://httpbin.org/post" json
put_json : (Text | URI) -> (Text | Json) -> Vector -> Response ! Request_Error
put_json : (Text | URI) -> Text -> Vector -> Response ! Request_Error
put_json self uri body_json (headers = []) =
new_headers = [Header.application_json]
req = Request.put uri (Request_Body.Json body_json) headers . with_headers new_headers
@ -434,10 +433,7 @@ type HTTP
Pair.new req (body_publishers.ofString text)
Request_Body.Json json ->
builder.header Header.application_json.name Header.application_json.value
json_body = case json of
_ : Text -> json
_ -> json.to_text
Pair.new req (body_publishers.ofString json_body)
Pair.new req (body_publishers.ofString json)
Request_Body.Form form ->
add_multipart form =
body_builder = Http_Utils.multipart_body_builder

View File

@ -1,3 +1,5 @@
import project.Data.Boolean.Boolean
type HTTP_Status_Code
## 100 Continue.
continue : HTTP_Status_Code
@ -164,3 +166,7 @@ type HTTP_Status_Code
Arguments:
- code: The numeric representation of the code.
Value code
## Does the status code represent a successful response?
is_success : Boolean
is_success self = self.code >= 200 && self.code < 300

View File

@ -1,6 +1,7 @@
import project.Data.Boolean.Boolean
import project.Data.Text.Text
from project.Data.Boolean import Boolean, False, True
polyglot java import org.enso.base.Http_Utils
type Header
@ -26,7 +27,9 @@ type Header
example_header_eq =
(Header.new "My_Header" "foo") == (Header.new "My_Header" "bar")
== : Header -> Boolean
== self that = (self.name.equals_ignore_case that.name) && self.value==that.value
== self that = case that of
_ : Header -> (self.name.equals_ignore_case that.name) && self.value==that.value
_ -> False
## ALIAS Build a Header

View File

@ -1,5 +1,4 @@
import project.Any.Any
import project.Data.Json.Json
import project.Data.Pair.Pair
import project.Data.Text.Text
import project.Data.Vector.Vector
@ -216,7 +215,7 @@ type Request
example_with_json =
Request.post (URI.parse "http://example.com") Request_Body.Empty |> _.with_json '{ "a": "b" }'
with_json : (Text | Json) -> Request
with_json : Text -> Request
with_json self json_body =
new_body = Request_Body.Json json_body
Request.Value self.method self.uri self.headers new_body . with_headers [Header.application_json]

View File

@ -1,4 +1,4 @@
import project.Data.Json.Json
import project.Data.Json.JS_Object
import project.Data.Vector.Vector
import project.Network.HTTP.Header.Header
import project.Network.HTTP.HTTP_Status_Code.HTTP_Status_Code
@ -54,15 +54,17 @@ type Response
code : HTTP_Status_Code
code self = HTTP_Status_Code.Value self.internal_http_response.statusCode
## Convert the response to JSON.
## Convert to a JavaScript Object representing this Response.
> Example
Convert a response to JSON. NOTE that this example will make a network
request.
Convert a response to JS_Object.
NOTE that this example will make a network request.
import Standard.Examples
example_to_json = Examples.get_response.to_json
to_json : Json
to_json self = Json.from_pairs [["type", "Response"], ["headers", self.headers], ["body", self.body], ["code", self.code]]
example_to_json = Examples.get_response.to_js_object
to_js_object : JS_Object
to_js_object self =
type_pair = ["type", "Response"]
cons_pair = ["constructor", "Value"]
JS_Object.from_pairs [type_pair, cons_pair, ["headers", self.headers], ["body", self.body], ["code", self.code]]

View File

@ -1,6 +1,11 @@
import project.Data.Boolean.Boolean
import project.Data.Json.Json
import project.Data.Json.JS_Object
import project.Data.Numbers.Number
import project.Data.Text.Extensions
import project.Data.Text.Text
import project.Data.Vector.Vector
import project.Nothing.Nothing
import project.System.File.File
import project.System.File.Write_Extensions
@ -23,17 +28,6 @@ type Response_Body
to_text : Text
to_text self = Text.from_utf_8 self.bytes
## Convert response body to Json.
> Example
Convert a response to JSON. NOTE: This example makes a network request.
import Standard.Examples
example_to_text = Examples.get_geo_data.to_json
to_json : Json
to_json self = Json.parse self.to_text
## Write response body to a File.
Arguments:
@ -52,3 +46,14 @@ type Response_Body
to_file self file =
self.bytes.write_bytes file
file
## Convert response body from JSON to the Enso data types.
> Example
Convert a response from JSON. NOTE: This example makes a network request.
import Standard.Examples
example_to_text = Examples.get_geo_data.parse_json
parse_json : JS_Object | Boolean | Number | Nothing | Text | Vector
parse_json self = Json.parse self.to_text

View File

@ -1,6 +1,6 @@
import project.Any.Any
import project.Data.Boolean.Boolean
import project.Data.Json.Json
import project.Data.Json.JS_Object
import project.Data.Text.Text
import project.Error.Common.Syntax_Error
import project.Error.Error
@ -28,7 +28,7 @@ type URI
Parse a URI from text.
Arguments:
- text: The text to parse as a URI.
- uri: The text to parse as a URI.
Throws a Syntax_Error when the text cannot be parsed as a URI.
@ -39,8 +39,8 @@ type URI
example_parse = URI.parse "http://example.com"
parse : Text -> URI ! Syntax_Error
parse text =
Panic.catch JException (URI.Value (Java_URI.create text)) caught_panic->
parse uri =
Panic.catch JException (URI.Value (Java_URI.create uri)) caught_panic->
Error.throw (Syntax_Error.Error ("URI syntax error: " + caught_panic.payload.getMessage))
## PRIVATE
@ -194,17 +194,20 @@ type URI
to_text : Text
to_text self = self.internal_uri.toString
## Convert a URI to JSON.
## Convert to a JavaScript Object representing this URI.
> Example
Convert a URI to JSON.
Convert a URI to a JS_Object.
import Standard.Base.Network.URI.URI
import Standard.Examples
example_to_json = Examples.uri.to_json
to_json : Json
to_json self = Json.String self.to_text
example_to_json = Examples.uri.to_js_object
to_js_object : JS_Object
to_js_object self =
type_pair = ["type", "URI"]
cons_pair = ["constructor", "parse"]
JS_Object.from_pairs [type_pair, cons_pair, ["uri", self.to_text]]
## Check if this URI is equal to another URI.

View File

@ -1,4 +1,6 @@
import project.Any.Any
import project.Data.Numbers.Integer
import project.Data.Text.Text
from project.Data.Boolean import Boolean, True
@ -27,3 +29,14 @@ type Nothing
"Hello".if_nothing ""
if_nothing : Any -> Any
if_nothing self ~function = function
## Get a value for the key of the object.
As `Nothing` has no keys, returns `if_missing`.
Arguments:
- key: The key to get.
- if_missing: The value to return if the key is not found.
get : Text | Integer -> Any -> Nothing
get self key ~if_missing=Nothing =
_ = key
if_missing

View File

@ -1,3 +1,4 @@
import project.Any.Any
import project.Data.Text.Text
import project.Nothing.Nothing
@ -11,6 +12,7 @@ polyglot java import org.enso.base.Environment_Utils
Arguments:
- key: The name of the environment variable to look up.
- if_missing: The default fallback value.
> Example
Look up the value of the `PATH` environment variable.
@ -18,25 +20,8 @@ polyglot java import org.enso.base.Environment_Utils
import Standard.Base.System.Environment
example_get = Environment.get "PATH"
get : Text -> Text | Nothing
get key = Environment_Utils.get_environment_variable key
## UNSTABLE
Returns a value of a specified environment variable or the provided default
value if such variable is not defined.
Arguments:
- key: The name of the environment variable to look up.
- default: The default fallback value.
> Example
Look up the value of the `FOOBAR` environment variable.
import Standard.Base.System.Environment
example_get_or_else = Environment.get_or_else "FOOBAR" "my default"
get_or_else : Text -> Text -> Text
get_or_else key ~default = case get key of
Nothing -> default
value -> value
get : Text -> Any -> Text | Nothing
get key ~if_missing=Nothing =
case Environment_Utils.get_environment_variable key of
Nothing -> if_missing
value -> value

View File

@ -1,7 +1,7 @@
import project.Any.Any
import project.Data.Array.Array
import project.Data.Index_Sub_Range.Index_Sub_Range
import project.Data.Json.Json
import project.Data.Json.JS_Object
import project.Data.Numbers.Integer
import project.Data.Text.Encoding.Encoding
import project.Data.Text.Extensions
@ -242,16 +242,19 @@ type File
resolve : (Text | File) -> File
resolve self = @Builtin_Method "File.resolve"
## A File to JSON conversion.
## Convert the file descriptor to a JS_Object.
> Example
Get a JSON representation of a file.
Convert a file to a JS_Object.
import Standard.Examples
example_to_json = Examples.csv.to_json
to_json : Json
to_json self = Json.from_pairs [["type", "File"], ["path", self.path]]
example_to_json = Examples.csv.to_js_object
to_js_object : JS_Object
to_js_object self =
type_pair = ["type", "File"]
cons_pair = ["constructor", "new"]
JS_Object.from_pairs [type_pair, cons_pair, ["path", self.path]]
## Checks whether the file exists.

View File

@ -1,10 +1,16 @@
import project.Any.Any
import project.Data.Json.Json
import project.Data.Text.Encoding.Encoding
import project.Data.Text.Extensions
import project.Data.Text.Text
import project.Data.Vector.Vector
import project.Error.Error
import project.Error.File_Error.File_Error
import project.Error.Problem_Behavior.Problem_Behavior
import project.Error.Unimplemented.Unimplemented
import project.Function.Function
import project.Network.HTTP.Response.Response
import project.Network.URI.URI
import project.Nothing.Nothing
import project.System.File.File
@ -16,6 +22,18 @@ polyglot java import org.enso.base.file_format.FileFormatSPI
format_types : Vector
format_types = Vector.from_polyglot_array (FileFormatSPI.get_types False)
## PRIVATE
Gets the first format not returning Nothing from the callback
get_format : Function -> Any | Nothing
get_format callback =
reader idx =
if idx >= format_types.length then Nothing else
format = callback (format_types.at idx)
if format.is_nothing.not then format else
@Tail_Call reader (idx + 1)
reader 0
type Auto_Detect
## PRIVATE
Implements the `File.read` for this `File_Format`
@ -28,12 +46,13 @@ type Auto_Detect
## PRIVATE
get_format : File -> Any | Nothing
get_format file =
reader idx =
if idx >= format_types.length then Nothing else
format = format_types.at idx . for_file file
if format.is_nothing.not then format else
@Tail_Call reader (idx + 1)
reader 0
get_format f-> f.for_file file
## PRIVATE
get_web_parser : Text -> URI -> Any | Nothing
get_web_parser content_type uri =
get_format f-> f.for_web content_type uri
type File_Format
## PRIVATE
@ -52,12 +71,31 @@ type Plain_Text_Format
".log" -> Plain_Text_Format.Plain_Text
_ -> Nothing
## If the File_Format supports reading from the web response, return a configured instance.
for_web : Text -> URI -> Plain_Text_Format | Nothing
for_web content_type _ =
parts = content_type.split ";" . map .trim
case parts.first of
"text/plain" ->
charset_part = parts.find (x->x.starts_with "charset=")
encoding = if charset_part.is_error then Encoding.utf_8 else
parsed = Encoding.from_name (charset_part.drop 8)
if parsed.is_error then Encoding.utf_8 else parsed
Plain_Text_Format.Plain_Text encoding
_ -> Nothing
## PRIVATE
Implements the `File.read` for this `File_Format`
read : File -> Problem_Behavior -> Any
read self file on_problems =
file.read_text self.encoding on_problems
## PRIVATE
Implements the `Data.parse` for this `File_Format`
read_web : Response -> Any
read_web self response =
Text.from_bytes response.body.bytes self.encoding
type Bytes
## If the File_Format supports reading from the file, return a configured instance.
for_file : File -> Bytes | Nothing
@ -66,11 +104,46 @@ type Bytes
".dat" -> Bytes
_ -> Nothing
## If the File_Format supports reading from the web response, return a configured instance.
As `Bytes`, does not support reading from the web returns `Nothing`.
for_web : Text -> URI -> Bytes | Nothing
for_web _ _ = Nothing
## PRIVATE
Implements the `File.read` for this `File_Format`
read : File -> Problem_Behavior -> Any
read self file _ =
file.read_bytes
type JSON_File
## If the File_Format supports reading from the file, return a configured instance.
for_file : File -> JSON_File | Nothing
for_file file =
case file.extension of
".json" -> JSON_File
".geojson" -> JSON_File
_ -> Nothing
## If the File_Format supports reading from the web response, return a configured instance.
for_web : Text -> URI -> JSON_File | Nothing
for_web content_type _ =
first = content_type.split ';' . first . trim
case first of
"application/json" -> JSON_File
_ -> Nothing
## PRIVATE
Implements the `File.read` for this `File_Format`
read : File -> Problem_Behavior -> Any
read self file _ =
text = file.read_text
Json.parse text
## PRIVATE
Implements the `Data.parse` for this `File_Format`
read_web : Response -> Any
read_web self response =
response.body.parse_json
## A setting to infer the default behaviour of some option.
type Infer

View File

@ -87,7 +87,7 @@ ssl_mode_to_jdbc_properties use_ssl = case use_ssl of
[Pair.new 'sslmode' 'verify-full', Pair.new 'sslrootcert' (File.new cert_file).absolute.path]
## PRIVATE
default_postgres_host = Environment.get_or_else "PGHOST" "localhost"
default_postgres_host = Environment.get "PGHOST" "localhost"
## PRIVATE
default_postgres_port =
@ -97,4 +97,4 @@ default_postgres_port =
port -> Integer.parse port . catch Number_Parse_Error.Error (_->hardcoded_port)
## PRIVATE
default_postgres_database = Environment.get_or_else "PGDATABASE" ""
default_postgres_database = Environment.get "PGDATABASE" ""

View File

@ -63,9 +63,9 @@ type Column
## UNSTABLE
Converts this column to JSON.
to_json : Json
to_json self = self.to_sql.to_json
Converts this column to JS_Object representation.
to_js_object : JS_Object
to_js_object self = self.to_sql.to_js_object
## UNSTABLE

View File

@ -70,13 +70,13 @@ type SQL_Statement
## UNSTABLE
Returns a JSON representation of the statement.
to_json : Json
to_json self =
Returns a JS_Object representation of the statement.
to_js_object : JS_Object
to_js_object self =
jsonify fragment = case fragment of
SQL_Fragment.Code_Part code -> Json.from_pairs [["sql_code", code]]
SQL_Fragment.Code_Part code -> JS_Object.from_pairs [["sql_code", code]]
SQL_Fragment.Interpolation typ obj ->
inner = Json.from_pairs [["value", obj], ["expected_sql_type", typ.name]]
Json.from_pairs [["sql_interpolation", inner]]
fragments = Json.Array (self.internal_fragments.map jsonify)
Json.from_pairs [["query", fragments]]
inner = JS_Object.from_pairs [["value", obj], ["expected_sql_type", typ.name]]
JS_Object.from_pairs [["sql_interpolation", inner]]
fragments = self.internal_fragments.map jsonify
JS_Object.from_pairs [["query", fragments]]

View File

@ -78,12 +78,11 @@ type Table
## UNSTABLE
Converts this table into a JSON structure.
to_json : Json
to_json self = case self.internal_columns.is_empty of
True ->
Json.from_pairs [["query", Nothing], ["message", "The table has no columns so a query cannot be generated."]]
False -> self.to_sql.to_json
Converts this column to JS_Object representation.
to_js_object : JS_Object
to_js_object self = case self.internal_columns.is_empty of
True -> JS_Object.from_pairs [["query", Nothing], ["message", "The table has no columns so a query cannot be generated."]]
False -> self.to_sql.to_js_object
## UNSTABLE
@ -1216,9 +1215,9 @@ display_dataframe df indices_count all_rows_count format_terminal =
combine_names left_names right_names left_suffix right_suffix =
make_count_map names =
map = names.fold Map.empty acc-> name->
count = acc.get_or_else name 0 + 1
count = acc.get name 0 + 1
acc.insert name count
name-> map.get_or_else name 0
name-> map.get name 0
original_names_count = make_count_map left_names+right_names
add_suffix_if_necessary suffix name = case original_names_count name > 1 of
True -> [name, name+suffix]

View File

@ -250,7 +250,7 @@ generate_expression dialect expr = case expr of
dialect.wrap_identifier origin ++ '.' ++ dialect.wrap_identifier name
SQL_Expression.Constant sql_type value -> SQL.interpolation sql_type value
SQL_Expression.Operation kind arguments ->
op = dialect.operation_map.get_or_else kind (Error.throw <| Unsupported_Database_Operation.Error kind)
op = dialect.operation_map.get kind (Error.throw <| Unsupported_Database_Operation.Error kind)
parsed_args = arguments.map (generate_expression dialect)
op parsed_args
query : Query -> generate_query dialect query

View File

@ -99,12 +99,12 @@ json_text = """
]
## Example JSON for working with.
json : Json
json : Vector
json = Json.parse json_text
## An example JSON object.
json_object : Json
json_object = json.items.head
json_object : JS_Object
json_object = json.first
## An example cons-list.
list : List
@ -200,11 +200,11 @@ get_boolean : Boolean
get_boolean = False
## A simple small piece of JSON that can easily be converted into a table.
simple_table_json : Json
simple_table_json : Text
simple_table_json =
row_1 = Json.from_pairs [['foo', 20], ['bar', 'baz'], ['baz', False]]
row_2 = Json.from_pairs [['bar', 'xyz'], ['baz', True]]
row_3 = Json.from_pairs [['baz', False], ['foo', 13]]
row_1 = JS_Object.from_pairs [['foo', 20], ['bar', 'baz'], ['baz', False]]
row_2 = JS_Object.from_pairs [['bar', 'xyz'], ['baz', True]]
row_3 = JS_Object.from_pairs [['baz', False], ['foo', 13]]
[row_1, row_2, row_3].to_json
## The headers for the columns in the JSON table `simple_table_json`.
@ -212,7 +212,7 @@ simple_table_json_headers : Vector Text
simple_table_json_headers = ["foo", "bar", "baz"]
## Some simple GeoJSON.
geo_json : Json
geo_json : JS_Object
geo_json = Json.parse <| '''
{
"type": "FeatureCollection",

View File

@ -1,106 +1,27 @@
from Standard.Base import all
from Standard.Table import Table
from Standard.Table import all
import Standard.Table.Errors.Invalid_JSON_Format
## PRIVATE
import project.Helpers
import project.Helpers.Object_Type
A type of GeoJSON object.
type Object_Type
## PRIVATE
A Geo JSON feature.
Feature
## PRIVATE
A Geo JSON feature collection.
Feature_Collection
## PRIVATE
Convert GeoJSON object type to Text.
to_text : Text
to_text self = case self of
Object_Type.Feature -> "Feature"
Object_Type.Feature_Collection -> "FeatureCollection"
## PRIVATE
Get the type field of a GeoJSON object.
Json.get_type : Any
Json.get_type self = case self of
Json.Object object ->
object.get_or_else "type" Nothing.to_json . unwrap
## PRIVATE
Get key-value pairs of a Feature GeoJSON object.
Json.get_feature_row : Map
Json.get_feature_row self =
properties_row = self.get "properties" . get_properties_row
geometry_row = self.get "geometry" . get_geometry_row
geometry_row.fold_with_key properties_row acc-> k-> v->
acc.insert k v
## PRIVATE
Get column key-value pairs of a feature's "properties" object.
Json.get_properties_row : Map
Json.get_properties_row self = case self of
Json.Object properties -> properties.map p-> case p of
Json.Object _ -> Nothing.to_json
_ -> p
## PRIVATE
Get column key-value pairs of a feature's "geometry" object.
Json.get_geometry_row : Map
Json.get_geometry_row self = case self of
Json.Object fields ->
geometry_type = fields.get_or_else "type" Nothing
if geometry_type == "Point".to_json then self.get_point_row else Map.empty
## PRIVATE
Get column key-value pairs of a "Point" geometry object.
Json.get_point_row : Map
Json.get_point_row self =
fields = ["longitude", "latitude", "elevation"]
case self.get "coordinates" of
Json.Array coordinates ->
Map.from_vector (fields.zip coordinates)
_ -> Map.empty
Json.geo_json_to_table : Vector -> Table
Json.geo_json_to_table self fields=Nothing = case self of
Json.Array items ->
rows = items.map item->
case item of
Json.Object _ ->
if item.get_type != Object_Type.Feature.to_text then Vector.fill fields.length Nothing else
row = item.get_feature_row
fields.map n-> row.get n . unwrap . catch Any (_ -> Nothing)
_ -> Vector.fill fields.length Nothing
cols = fields.map_with_index i-> n->
[n, rows.map (_.at i)]
geo_json_to_table : (Vector | JS_Object) -> Table
geo_json_to_table geo_json fields=Nothing = case geo_json of
_ : Vector ->
rows = geo_json.map item-> case item of
_ : JS_Object ->
if item.get "type" != Object_Type.Feature.to_text then Vector.fill fields.length Nothing else
row = Helpers.get_feature_row item
fields.map row.get
_ -> Vector.fill fields.length Nothing
cols = fields.map_with_index i->n-> [n, rows.map (_.at i)]
Table.new cols
Json.Object _ ->
if self.get_type != Object_Type.Feature_Collection.to_text then Error.throw (Invalid_JSON_Format.Error self "not being a feature collection") else
case self.get "features" of
Json.Array items ->
feature_rows = items.map .get_feature_row
column_names = case fields of
Nothing ->
column_names_row = feature_rows.fold Map.empty acc-> row->
row.fold_with_key acc a-> k-> _->
a.insert k 0
column_names_row.keys
_ -> fields
rows = feature_rows.map row->
column_names.map n-> row.get n . unwrap . catch Any (_ -> Nothing)
cols = column_names.map_with_index i-> n->
[n, rows.map (_.at i)]
Table.new cols
_ -> Error.throw (Invalid_JSON_Format.Error self "not having the 'features' key.")
_ : JS_Object ->
if geo_json.get "type" != Object_Type.Feature_Collection.to_text then Error.throw (Invalid_JSON_Format.Error geo_json "not being a feature collection") else
features = geo_json.get "features"
case features of
_ : Vector ->
feature_rows = features.map Helpers.get_feature_row
Table.from_objects feature_rows fields
_ -> Error.throw (Invalid_JSON_Format.Error geo_json "not having the 'features' key.")

View File

@ -0,0 +1,43 @@
from Standard.Base import all
## PRIVATE
A type of GeoJSON object.
type Object_Type
## PRIVATE
A Geo JSON feature.
Feature
## PRIVATE
A Geo JSON feature collection.
Feature_Collection
## PRIVATE
Convert GeoJSON object type to Text.
to_text : Text
to_text self = case self of
Object_Type.Feature -> "Feature"
Object_Type.Feature_Collection -> "FeatureCollection"
## PRIVATE
Get JS_Object of a Feature GeoJSON object with properties and geometry merged.
get_feature_row : JS_Object -> JS_Object
get_feature_row feature =
properties_row = feature.get "properties" . to_vector
geometry_row = get_geometry_row feature
JS_Object.from_pairs (geometry_row + properties_row)
## PRIVATE
Get column key-value pairs of a feature's "geometry" object.
get_geometry_row : JS_Object -> Any
get_geometry_row feature =
geometry = feature.get "geometry"
if geometry.get "type" == "Point" then get_point_row geometry else []
## PRIVATE
Get column key-value pairs of a "Point" geometry object.
get_point_row : JS_Object -> Vector
get_point_row value =
fields = ["longitude", "latitude", "elevation"]
case value.get "coordinates" of
vector : Vector -> fields.zip vector
_ -> []

View File

@ -2,7 +2,7 @@ from Standard.Base import all
from Standard.Table import Table
import project.Geo_Json
export project.Geo_Json
from project.Geo_Json export geo_json_to_table
## UNSTABLE

View File

@ -14,17 +14,17 @@ type Histogram
## UNSTABLE
Convert histogram data to Json.
Convert histogram data to a JS_Object.
> Example
Convert a histogram to JSON.
Convert a histogram to a JavaScript object.
import Standard.Examples
example_to_json =
example_to_js_object =
histo = Examples.image.histogram 0
histo.to_json
to_json : Json
to_json self =
bins = Json.from_pairs [["bins", self.data]]
Json.from_pairs [["data", bins]]
histo.to_js_object
to_js_object : JS_Object
to_js_object self =
bins = JS_Object.from_pairs [["bins", self.data]]
JS_Object.from_pairs [["data", bins]]

View File

@ -421,18 +421,18 @@ type Image
## UNSTABLE
Convert the image to Json.
Convert the image to a JS_Object.
> Example
Convert an image to JSON.
Convert an image to a JavaScript object.
import Standard.Examples
example_to_json = Examples.image.to_json
to_json : Json
to_json self =
example_to_json = Examples.image.to_js_object
to_js_object : JS_Object
to_js_object self =
base64 = Java_Image.to_base64 self.opencv_mat
Json.from_pairs [["mediaType", "image/png"], ["base64", base64]]
JS_Object.from_pairs [["mediaType", "image/png"], ["base64", base64]]
## UNSTABLE

View File

@ -419,16 +419,16 @@ type Matrix
## UNSTABLE
Convert this matrix to Json.
Convert this matrix to a JS_Object.
> Example
Convert a matrix to JSON.
Convert a matrix to a JavaScript object.
import Standard.Examples
example_to_json = Examples.matrix.to_json
to_json : Json
to_json self = Json.String self.opencv_mat.to_text
example_to_json = Examples.matrix.to_js_object
to_js_object : Text
to_js_object self = self.opencv_mat.to_text
## PRIVATE

View File

@ -1,4 +1,5 @@
from Standard.Base import all
import Standard.Base.Data.Array_Proxy.Array_Proxy
import Standard.Base.Data.Ordering.Comparator
import Standard.Base.Error.Common.Index_Out_Of_Bounds
import Standard.Base.Error.Illegal_Argument.Illegal_Argument
@ -1045,28 +1046,21 @@ type Column
## UNSTABLE
Converts this column to JSON.
Converts this column to JS_Object representation.
> Example
Get a JSON representation of the column.
Get a JavaScript representation of the column.
import Standard.Examples
example_to_json = Examples.integer_column.to_json
to_json : Json
to_json self =
col = self.java_column
name = col.getName
storage = col.getStorage
json_factory = case self.storage_type of
Storage.Text -> Json.String
Storage.Integer -> Json.Number
Storage.Decimal -> Json.Number
Storage.Boolean -> Json.Boolean
_ -> .to_json
storage_json = Json.Array (storage_to_json storage json_factory)
fields = Map.singleton "name" (Json.String name) . insert "data" storage_json
Json.Object fields
example_to_json = Examples.integer_column.to_js_object
to_js_object : JS_Object
to_js_object self =
name = self.java_column.getName
storage = self.java_column.getStorage
storage_proxy = Array_Proxy.new storage.size i-> storage.getItem i
storage_json = Vector.from_polyglot_array storage_proxy
JS_Object.from_pairs [["name", name], ["data", storage_json]]
## Efficiently joins two tables based on either the index or the specified
key column.
@ -1108,19 +1102,6 @@ type Column
to_table : Table
to_table self = Table.Value self.java_column.toTable
## UNSTABLE
ADVANCED
Shows a JSON serialization of a truncated version of this column, for the
benefit of visualization in the IDE.
to_default_visualization_data : Text
to_default_visualization_data self =
size = ['length', self.length]
name = ['name', self.name]
max_data = 100
data = ['data', self.to_vector.take (First max_data)]
Json.from_pairs [size, name, data] . to_text
## UNSTABLE
Sorts the column according to the specified rules.
@ -1348,19 +1329,6 @@ run_vectorized_unary_op column name fallback_fn on_missing=Nothing =
rs = s.map name fallback_fn on_missing
Column.Value (Java_Column.new "Result" ix rs)
## PRIVATE
A helper function for converting a column storage to JSON.
Arguments:
- storage: The storage to convert to JSON.
- factory: The factory function for converting the storage.
storage_to_json : Storage -> (Any -> Json) -> Json
storage_to_json storage factory =
Vector.new storage.size ix->
if storage.isNa ix then Json.Null else
factory (storage.getItem ix)
## PRIVATE
Gets a textual representation of the item at position `ix` in `column`.

View File

@ -1,7 +1,7 @@
from Standard.Base import all
import Standard.Base.Data.Array_Proxy.Array_Proxy
from project.Data.Table import Table
import project.Data.Table.Table
## Represents a single row of some in-memory Table.
type Row
@ -20,17 +20,6 @@ type Row
to_vector : Vector
to_vector self = Vector.from_polyglot_array (Array_Proxy.from_proxy_object self)
## Converts this row into a JSON structure.
to_json : Json
to_json self = self.to_vector.to_json
## UNSTABLE
ADVANCED
Returns a Text used to display this table in the IDE by default.
Returns a JSON object containing useful metadata and previews of column
values.
to_default_visualization_data : Text
to_default_visualization_data self =
self.to_vector.to_default_visualization_data
## Converts this row into a JS_Object.
to_js_object : Vector
to_js_object self = self.to_vector.to_js_object

View File

@ -157,41 +157,24 @@ type Table
IO.println (self.display show_rows format_terminal=True)
IO.println ''
## Converts this table to a JSON structure.
## Converts this table into a JS_Object.
> Example
Convert a table to a corresponding JSON representation.
Convert a table to a corresponding JavaScript JS_Object representation.
import Standard.Examples
example_to_json = Examples.inventory_table.to_json
to_json : Json
to_json self =
example_to_json = Examples.inventory_table.to_js_object
to_js_object : JS_Object
to_js_object self =
index_prep = case self.index.catch No_Index_Set_Error (_->Nothing) of
Nothing -> []
index -> [index]
cols = index_prep + self.columns
rows = 0.up_to self.row_count . map row->
vals_kv = cols.map col-> [col.name, col.at row]
Json.from_pairs vals_kv
rows.to_json
## UNSTABLE
ADVANCED
Returns a Text used to display this table in the IDE by default.
Returns a JSON object containing useful metadata and previews of column
values.
to_default_visualization_data : Text
to_default_visualization_data self =
max_size = 10
row_count = ['number_of_rows', self.row_count]
cols = self.columns.map c->
name = c.name
items = c.to_vector.take (First max_size)
Json.from_pairs [['name', name], ['data', items]]
Json.from_pairs [row_count, ['columns', cols]] . to_text
JS_Object.from_pairs vals_kv
rows
## Returns the column with the given name.
@ -815,7 +798,7 @@ type Table
column
new_columns = self.columns.map column->
is_selected = selected_names.get_or_else column.name False
is_selected = selected_names.get column.name False
if is_selected then transform column else column
result = Table.new new_columns
@ -1237,7 +1220,7 @@ type Table
selected_names = Map.from_vector (id_columns.map column-> [column.name, True])
data = columns_helper.internal_columns.filter column->(selected_names.get_or_else column.name False . not)
data = columns_helper.internal_columns.filter column->(selected_names.get column.name False . not)
java_data = data.map .java_column
java_id = id_columns.map .java_column
@ -1362,7 +1345,7 @@ type Table
example_to_json = Examples.inventory_table.write_json (enso_project.data / 'example.json')
write_json : File -> Nothing
write_json self file = self.to_json.to_text.write file
write_json self file = self.to_json.write file
## This function writes a table from memory into a file.

View File

@ -1,4 +1,5 @@
from Standard.Base import all
import Standard.Base.Error.Common.Type_Error
import Standard.Base.Error.Unimplemented.Unimplemented
import project.Data.Table.Table
@ -17,21 +18,17 @@ Text.from (that : Table) (format:Delimited_Format = Delimited_Format.Delimited '
_ : Delimited_Format -> Delimited_Writer.write_text that format
_ -> Unimplemented.throw "Text.from is currently only implemented for Delimited_Format."
## ALIAS To Table
Converts a JSON array into a dataframe, by looking up the requested keys
from each item.
## Converts an object or a Vector of object into a Table, by looking up the
requested fields from each item.
Arguments:
- fields: a vector of texts representing the names of fields to look up.
- value: The object or Vector of objects to convert to a table.
If a Vector or Array, then each item is added as a row.
Otherwise, treated as a single row.
- fields: a Vector of Text representing the names of fields to look up.
If `Nothing` then all fields found are added.
The function assumes the elements have one of the following structures:
- a JSON object containing the requested keys. In case an item is not an
object, or the request key does not exist, the relevant values of the table
will be set to `Nothing`.
> Example Converts a JSON array containing key-value pairs into a table for the
provided headers.
> Example Converts a set of objects into a table using the provided headers.
from Standard.Table import Table
import Standard.Examples
@ -39,13 +36,55 @@ Text.from (that : Table) (format:Delimited_Format = Delimited_Format.Delimited '
example_to_table =
json = Examples.simple_table_json
headers = Examples.simple_table_json_headers
json.to_table headers
Json.to_table : Vector -> Table
Json.to_table self fields=Nothing = case self of
Json.Array items ->
rows = items.map item-> case item of
Json.Object row -> fields.map n-> row.get n . unwrap . catch Any (_ -> Nothing)
_ -> Vector.fill fields.length Nothing
cols = fields.map_with_index i-> n-> [n, rows.map (_.at i)]
Table.new cols
_ -> Invalid_JSON_Format.throw "Json.to_table expects a JSON array."
Table.from_objects json headers
Table.from_objects : Any -> Vector | Nothing -> Table ! Type_Error
Table.from_objects value fields=Nothing =
table_for_value v =
column_names = fields.if_nothing ["Value"]
column_values = column_names.map f-> if f == "Value" then v else Nothing
Table.from_rows column_names [column_values]
get_fields v = case v of
_ : JS_Object -> v.field_names
Nothing -> ["Value"]
_ : Number -> ["Value"]
_ : Boolean -> ["Value"]
_ : Text -> ["Value"]
_ -> Error.throw (Type_Error.Error Any "Invalid item within Vector for Table.from_objects. Currently only JS_Object, Number, Boolean, Text and Nothing are supported.")
get_value v field = case v of
_ : JS_Object -> v.get field
_ -> if field == "Value" then v else Nothing
case value of
Nothing -> table_for_value value
_ : Number -> table_for_value value
_ : Boolean -> table_for_value value
_ : Text -> table_for_value value
_ : JS_Object ->
field_names = fields.if_nothing value.field_names
values = field_names.map value.get
Table.from_rows field_names [values]
_ : Vector ->
len = value.length
## For every record, get the values into a Map.
column_map = value.fold_with_index Map.empty current-> i-> v->
field_names = fields.if_nothing (get_fields v)
field_names.fold current c->f->
data = c.get f (Vector.new_builder len)
data.length.up_to i . each _-> data.append Nothing
data.append (get_value v f)
c.insert f data
used_fields = fields.if_nothing column_map.keys
## Convert Builders to Vectors and fill as needed.
used_values = used_fields.map f->
data = column_map.get f (Vector.new_builder len)
data.length.up_to len . each _-> data.append Nothing
data.to_vector
Table.new (used_fields.zip used_values)
_ : Array -> Table.from_objects (Vector.from_polyglot_array value) fields
_ -> Error.throw (Type_Error.Error Any "Invalid value for Table.from_objects. Currently must be one of JS_Object, Vector, Array, Number, Boolean, Text and Nothing are supported.")

View File

@ -1,4 +1,5 @@
from Standard.Base import all
import Standard.Base.Network.HTTP.Response.Response
import project.Data.Table.Table
import project.Data.Data_Formatter.Data_Formatter
@ -52,14 +53,37 @@ type Delimited_Format
for_file file =
case file.extension of
".csv" -> Delimited_Format.Delimited ','
".tab" -> Delimited_Format.Delimited '\t'
".tsv" -> Delimited_Format.Delimited '\t'
_ -> Nothing
## If the File_Format supports reading from the web response, return a configured instance.
for_web : Text -> URI -> Delimited_Format | Nothing
for_web content_type _ =
parts = content_type.split ";" . map .trim
charset_part = parts.find (x->x.starts_with "charset=")
encoding = if charset_part.is_error then Encoding.utf_8 else
parsed = Encoding.from_name (charset_part.drop 8)
if parsed.is_error then Encoding.utf_8 else parsed
case parts.first of
"text/csv" -> Delimited_Format.Delimited ',' encoding
"text/tab-separated-values" -> Delimited_Format.Delimited '\t' encoding
_ -> Nothing
## Implements the `File.read` for this `File_Format`
read : File -> Problem_Behavior -> Any
read self file on_problems =
Delimited_Reader.read_file self file on_problems
## PRIVATE
Implements the `Data.parse` for this `File_Format`
read_web : Response -> Any
read_web self response =
text = Text.from_bytes response.body.bytes self.encoding
Delimited_Reader.read_text text self Report_Warning
## Implements the `Table.write` for this `File_Format`.
write_table : File -> Table -> Existing_File_Behavior -> Match_Columns -> Problem_Behavior -> Nothing
write_table self file table on_existing_file match_columns on_problems =

View File

@ -49,6 +49,12 @@ type Excel_Format
if is_xls.is_error then Nothing else
Excel_Format.Excel xls_format=is_xls
## If the File_Format supports reading from the web response, return a configured instance.
for_web : Text -> URI -> Excel_Format | Nothing
for_web _ _ =
## Currently not loading Excel files automatically as these need to be loaded as a connection.
Nothing
## Implements the `File.read` for this `File_Format`
read : File -> Problem_Behavior -> Any
read self file on_problems =

View File

@ -84,7 +84,7 @@ type Table_Column_Helper
selection = self.select_columns_helper selector reorder=False problem_builder
selected_names = Map.from_vector (selection.map column-> [column.name, True])
result = self.internal_columns.filter column->
should_be_removed = selected_names.get_or_else column.name False
should_be_removed = selected_names.get column.name False
should_be_removed.not
if result.is_empty then
problem_builder.report_other_warning No_Output_Columns
@ -115,7 +115,7 @@ type Table_Column_Helper
problem_builder.attach_problems_before on_problems <|
selected_names = Map.from_vector (selection.map column-> [column.name, True])
other_columns = self.internal_columns.filter column->
is_selected = selected_names.get_or_else column.name False
is_selected = selected_names.get column.name False
is_selected.not
result = case position of
Position.Before_Other_Columns -> selection + other_columns
@ -266,7 +266,7 @@ rename_columns internal_columns mapping on_problems =
index_map = Map.from_vector <| good_indices.map p->[p.at 0, map.get (p.at 1)]
new_names = 0.up_to col_count . map i->
target = index_map.get_or_else i Nothing
target = index_map.get i Nothing
if target.is_nothing then Nothing else unique.make_unique target
new_names
@ -338,7 +338,7 @@ select_indices_reordering vector indices =
select_indices_preserving_order vector indices =
indices_to_keep = Map.from_vector (indices.map i-> [i, True])
vector.filter_with_index ix-> _->
indices_to_keep.get_or_else ix False
indices_to_keep.get ix False
## PRIVATE
Returns the actual position in the array that the index points to.
@ -393,7 +393,7 @@ validate_unique vector problem_callback on=(x->x) =
acc = vector.fold [Map.empty, Vector.new_builder, Vector.new_builder] acc-> item->
existing = acc.at 0
key = on item
already_present = existing.get_or_else key False
already_present = existing.get key False
case already_present of
True -> [existing, acc.at 1, acc.at 2 . append item]
False -> [existing.insert key True, acc.at 1 . append item, acc.at 2]
@ -499,7 +499,7 @@ transform_columns_by_index internal_columns index_selectors problem_builder inde
good_indices = validate_indices internal_columns.length index_selectors problem_builder index_extractor
selectors_map = Map.from_vector good_indices
internal_columns.map_with_index i-> column->
associated_selector = selectors_map.get_or_else i Nothing
associated_selector = selectors_map.get i Nothing
Column_Transform_Element.Value column associated_selector
## PRIVATE

View File

@ -10,14 +10,14 @@ import project.Helpers
Arguments:
- table: the Table to be visualized.
json_from_table : Table -> Json
json_from_table : Table -> JS_Object
json_from_table table =
names = ['label', 'latitude', 'longitude', 'radius', 'color']
pairs = names.map <| name->
column = table.lookup_ignore_case name
column.when_valid ["df_" + name, column.to_vector] . catch Nothing
Json.from_pairs <| pairs.filter (x -> x.is_nothing.not)
JS_Object.from_pairs <| pairs.filter (x -> x.is_nothing.not)
## PRIVATE
@ -29,7 +29,9 @@ json_from_table table =
process_to_json_text : Any -> Text
process_to_json_text value =
json = case value of
Table.Value _ -> json_from_table value
Table.Value _ -> json_from_table value . to_text
_ -> value.to_json
json.to_text
## Workaround so that the JS String is converted to a Text
https://www.pivotaltracker.com/story/show/184061302
"" + json

View File

@ -1,6 +1,9 @@
from Standard.Base import all
import Standard.Base.Data.Array_Proxy.Array_Proxy
from Standard.Base.Data.Json import render
from Standard.Table import Table, Column
import Standard.Table.Data.Row.Row
import Standard.Table.Data.Storage.Storage
import project.Id.Id
@ -29,7 +32,7 @@ Any.default_visualization self = Id.json
2.to_default_visualization_data
Any.to_default_visualization_data : Text
Any.to_default_visualization_data self = self.to_json.to_text
Any.to_default_visualization_data self = self.to_json
## PRIVATE
@ -78,7 +81,7 @@ Any.catch_ self ~val = self.catch Any (_-> val)
example_display = Examples.throw_error.to_default_visualization_data
Error.to_default_visualization_data : Text
Error.to_default_visualization_data self = self.catch Any .to_default_visualization_data
Error.to_default_visualization_data self = self.catch . to_default_visualization_data
## PRIVATE
@ -121,7 +124,7 @@ recover_errors : Any -> Any
recover_errors ~body =
result = Panic.recover Any body
result.catch Any err->
Json.from_pairs [["error", err.to_display_text]] . to_text
JS_Object.from_pairs [["error", err.to_display_text]] . to_text
## UNSTABLE
ADVANCED
@ -136,9 +139,18 @@ Vector.default_visualization self = Id.table
Transform the vector into text for displaying as part of its default
visualization.
Vector.to_default_visualization_data : Text
Vector.to_default_visualization_data self =
json = self.take 100 . to_json
json.to_text
Vector.to_default_visualization_data self = render_vector self
render_vector object depth=0 max_depth=5 max_length=100 =
case object of
_ : Vector -> if depth == max_depth then "[...]" else
result = object.take max_length . map (elem-> if elem.is_error then elem.to_json else render_vector elem (depth+1) max_depth max_length)
"[" + result.join ", " + "]"
_ : Array ->
proxy = Array_Proxy.new object.length object.at
@Tail_Call render_vector (Vector.from_polyglot_array proxy) depth max_depth max_length
_ : JS_Object -> render object depth max_depth max_length
_ -> object.to_default_visualization_data
## UNSTABLE
ADVANCED
@ -203,9 +215,66 @@ Table.default_visualization self =
if cols.contains "x" && cols.contains "y" then Id.scatter_plot else
Id.table
## UNSTABLE
ADVANCED
Returns a Text used to display this table in the IDE by default.
Returns a JSON object containing useful metadata and previews of column
values.
Table.to_default_visualization_data : Text
Table.to_default_visualization_data self =
max_size = 10
row_count = ['number_of_rows', self.row_count]
cols = self.columns.map c->
name = c.name
items = c.to_vector.take (First max_size)
JS_Object.from_pairs [['name', name], ['data', items]]
JS_Object.from_pairs [row_count, ['columns', cols]] . to_text
## UNSTABLE
ADVANCED
Guides the visualization system to display the most suitable graphical
representation for this Column.
Column.default_visualization : Id
Column.default_visualization self = Id.table
## PRIVATE
Checks if the column stores numbers.
Column.is_numeric : Boolean
Column.is_numeric self =
[Storage.Integer,Storage.Decimal].contains self.storage_type
## UNSTABLE
ADVANCED
Shows a JSON serialization of a truncated version of this column, for the
benefit of visualization in the IDE.
Column.to_default_visualization_data : Text
Column.to_default_visualization_data self =
size = ['length', self.length]
name = ['name', self.name]
max_data = 100
data = ['data', self.to_vector.take (First max_data)]
JS_Object.from_pairs [size, name, data] . to_text
## UNSTABLE
ADVANCED
Guides the visualization system to display the most suitable graphical
representation for this Row.
Row.default_visualization : Id
Row.default_visualization self = Id.table
## UNSTABLE
ADVANCED
Returns a Text used to display this table in the IDE by default.
Returns a JSON object containing useful metadata and previews of column
values.
Row.to_default_visualization_data : Text
Row.to_default_visualization_data self =
self.to_vector.to_default_visualization_data

View File

@ -28,14 +28,13 @@ type Update
## PRIVATE
Generate JSON that can be consumed by the visualization.
to_json : Json
to_json self =
data = ['data', Json.from_pairs [['values', self.values]]]
axis = ['axis', Json.from_pairs [['x', Json.from_pairs [['label', self.label]]]]]
to_js_object : JS_Object
to_js_object self =
data = ['data', JS_Object.from_pairs [['values', self.values]]]
ret_pairs = case self.label of
Nothing -> [data]
_ -> [axis,data]
Json.from_pairs ret_pairs
_ -> [['axis', JS_Object.from_pairs [['x', JS_Object.from_pairs [['label', self.label]]]]], data]
JS_Object.from_pairs ret_pairs
## PRIVATE
from_table : Table -> Update
@ -69,5 +68,6 @@ from_value value =
- value: the value to be visualized.
process_to_json_text : Any -> Text
process_to_json_text value =
update = from_value value
update.to_json.to_text
## Workaround so that the JS String is converted to a Text
https://www.pivotaltracker.com/story/show/184061302
"" + (from_value value . to_json)

View File

@ -12,13 +12,13 @@ type Id
## Serializes this ID to a JSON format understandable by the graphical
interface.
to_json self =
to_js_object self =
project = case self of
Id.Builtin _ -> Nothing
Id.Library project _ ->
full_name = project.namespace + "." + project.name
Json.from_pairs [["name", full_name]]
Json.from_pairs [["library", project], ["name", self.name]]
JS_Object.from_pairs [["name", full_name]]
JS_Object.from_pairs [["library", project], ["name", self.name]]
## UNSTABLE
ADVANCED

View File

@ -2,18 +2,19 @@ from Standard.Base import all
import project.Helpers
## PRIVATE
Default visualization preprocessor.
default_preprocessor x = x.to_default_visualization_data
default_preprocessor x =
## Workaround so that the JS String is converted to a Text
https://www.pivotaltracker.com/story/show/184061302
"" + x.to_default_visualization_data
## PRIVATE
Error visualization preprocessor.
error_preprocessor x =
ok = '{ message: ""}'
ok = JS_Object.from_pairs [['message', '']] . to_json
result = x.map_error err->
message = err.to_display_text
stack_trace = x.get_stack_trace_text.if_nothing "" . split '\n'
full_message = message + if stack_trace.length > 1 then " (" + stack_trace.at 1 . trim +")" else ""
'{ "kind": "Dataflow", "message": ' + full_message.to_json.to_text + '}'
JS_Object.from_pairs [['kind', 'Dataflow'], ['message', full_message]] . to_json
if result.is_error then result.catch else ok

View File

@ -17,7 +17,7 @@ import project.Helpers
Expected Enso types are inferred based on known SQL types and their mapping
to Enso types.
prepare_visualization : Table.IR.Query -> Json
prepare_visualization : Table.IR.Query -> Text
prepare_visualization x = Helpers.recover_errors <|
prepared = x.to_sql.prepare
code = prepared.first
@ -27,9 +27,13 @@ prepare_visualization x = Helpers.recover_errors <|
actual_type = Meta.get_qualified_type_name value
expected_sql_type = e.second.name
expected_enso_type = find_expected_enso_type_for_sql e.second
Json.from_pairs [["value", value], ["actual_type", actual_type], ["expected_sql_type", expected_sql_type], ["expected_enso_type", expected_enso_type]]
JS_Object.from_pairs [["value", value], ["actual_type", actual_type], ["expected_sql_type", expected_sql_type], ["expected_enso_type", expected_enso_type]]
dialect = x.connection.dialect.name
Json.from_pairs [["dialect", dialect], ["code", code], ["interpolations", mapped]] . to_text
json = JS_Object.from_pairs [["dialect", dialect], ["code", code], ["interpolations", mapped]] . to_text
## Workaround so that the JS String is converted to a Text
https://www.pivotaltracker.com/story/show/184061302
"" + json
## PRIVATE

View File

@ -100,7 +100,7 @@ type No_Fallback_Column
## PRIVATE
Generates JSON that describes points data.
Table.point_data : Table -> Json
Table.point_data : Table -> Vector
Table.point_data self =
get_point_data field = field.lookup_in self . rename field.name . catch Any (_->Nothing)
columns = Point_Data.all_fields.map get_point_data . filter (x -> x.is_nothing.not)
@ -108,30 +108,29 @@ Table.point_data self =
pairs = columns.map column->
value = column.at row_n . catch_ Nothing
[column.name, value]
Json.from_pairs pairs
JS_Object.from_pairs pairs
## PRIVATE
Generates JSON that describes plot axes.
Table.axes : Table -> Json
Table.axes : Table -> JS_Object
Table.axes self =
describe_axis field =
col_name = field.lookup_in self . name
label = Json.from_pairs [[label_field, col_name]]
label = JS_Object.from_pairs [[label_field, col_name]]
[field.name, label]
x_axis = describe_axis Point_Data.X
y_axis = describe_axis Point_Data.Y
is_valid axis_pair =
label = axis_pair.at 1
label.is_valid && (self.all_columns.length > 0)
axes_obj = Json.from_pairs <| [x_axis, y_axis].filter is_valid
if axes_obj.fields.size > 0 then axes_obj else Nothing
axes_obj = JS_Object.from_pairs <| [x_axis, y_axis].filter is_valid
if axes_obj.length > 0 then axes_obj else Nothing
## PRIVATE
Vector.point_data : Vector -> Json
Vector.point_data : Vector -> Vector
Vector.point_data self =
self.map_with_index <| i-> elem->
Json.from_pairs [[Point_Data.X.name, i], [Point_Data.Y.name, elem]]
self.map_with_index i-> elem-> JS_Object.from_pairs [[Point_Data.X.name, i], [Point_Data.Y.name, elem]]
## PRIVATE
bound_data bounds data = case bounds of
@ -143,8 +142,8 @@ bound_data bounds data = case bounds of
max_y = bounds.at 3
data.filter <| datum->
x = datum.get "x" . unwrap
y = datum.get "y" . unwrap
x = datum.get "x"
y = datum.get "y"
min_x<=x && x<=max_x && min_y<=y && y<=max_y
@ -155,8 +154,8 @@ type Extreme
limit_data limit data = case limit of
Nothing -> data
_ -> if ((data.length <= limit) || (data.length == 0)) then data else
x datum = (datum.get "x").unwrap
y datum = (datum.get "y").unwrap
x datum = datum.get "x"
y datum = datum.get "y"
update_extreme current idx point =
new_min_x = if x current.min_x.second > x point then [idx, point] else current.min_x
@ -175,18 +174,17 @@ limit_data limit data = case limit of
extreme + data.take (Index_Sub_Range.Sample (limit - extreme.length))
## PRIVATE
json_from_table : Table -> [Int]|Nothing -> Int|Nothing -> Json
json_from_table : Table -> [Int]|Nothing -> Int|Nothing -> Text
json_from_table table bounds limit =
data = table.point_data |> bound_data bounds |> limit_data limit
axes = table.axes
Json.from_pairs <| [[data_field,data], [axis_field, axes]]
JS_Object.from_pairs [[data_field, data], [axis_field, axes]] . to_json
## PRIVATE
json_from_vector : Vector Any -> [Int]|Nothing -> Int|Nothing -> Json
json_from_vector : Vector Any -> [Int]|Nothing -> Int|Nothing -> Text
json_from_vector vec bounds limit =
data = vec.point_data |> bound_data bounds |> limit_data limit
axes = [axis_field, Nothing]
Json.from_pairs <| [[data_field, data], axes]
JS_Object.from_pairs [[data_field, data], [axis_field, Nothing]] . to_json
## PRIVATE
@ -204,4 +202,6 @@ process_to_json_text value bounds=Nothing limit=Nothing =
_ : Vector -> json_from_vector value bounds limit
_ -> json_from_vector value.to_vector bounds limit
json.to_text
## Workaround so that the JS String is converted to a Text
https://www.pivotaltracker.com/story/show/184061302
"" + json

View File

@ -20,7 +20,7 @@ import project.Helpers
- max_rows: The maximum number of rows to display.
In case of Database backed data, it materializes a fragment of the data.
prepare_visualization : Any -> Integer -> Json
prepare_visualization : Any -> Integer -> Text
prepare_visualization x max_rows=1000 = Helpers.recover_errors <| case x of
_ : Dataframe_Table ->
dataframe = x.take (First max_rows)
@ -28,16 +28,22 @@ prepare_visualization x max_rows=1000 = Helpers.recover_errors <| case x of
included_rows = dataframe.row_count
index = dataframe.index.catch Any _->
Dataframe_Column.from_vector "" (Vector.new included_rows i->i)
make_json dataframe [index] all_rows_count
## Workaround so that the JS String is converted to a Text
https://www.pivotaltracker.com/story/show/184061302
"" + make_json dataframe [index] all_rows_count
_ : Database_Table ->
# Materialize a table with indices as normal columns (because dataframe does not support multi-indexing).
df = x.reset_index.read max_rows
# Then split into actual columns and indices.
vis_df = df.select_columns (Column_Selector.By_Name (x.columns.map .name))
indices = df.select_columns (Column_Selector.By_Name (x.indices.map .name)) . columns
indices = if x.indices.length == 0 then [] else df.select_columns (Column_Selector.By_Name (x.indices.map .name)) . columns
all_rows_count = x.row_count
make_json vis_df indices all_rows_count
## Workaround so that the JS String is converted to a Text
https://www.pivotaltracker.com/story/show/184061302
"" + make_json vis_df indices all_rows_count
# We display columns as 1-column tables.
_ : Dataframe_Column ->
@ -49,13 +55,13 @@ prepare_visualization x max_rows=1000 = Helpers.recover_errors <| case x of
# We also visualize Vectors and arrays
_ : Vector ->
truncated = x.take (First max_rows)
Json.from_pairs [["json", truncated], ["all_rows_count", x.length]] . to_text
JS_Object.from_pairs [["json", truncated], ["all_rows_count", x.length]] . to_text
_ : Array ->
prepare_visualization (Vector.from_polyglot_array x) max_rows
# Anything else will be visualized with the JSON or matrix visualization
_ ->
Json.from_pairs [["json", x]] . to_text
JS_Object.from_pairs [["json", x]] . to_text
## PRIVATE
Creates a JSON representation for the visualizations.
@ -68,7 +74,7 @@ prepare_visualization x max_rows=1000 = Helpers.recover_errors <| case x of
`dataframe`.
- all_rows_count: the number of all rows in the underlying data, useful if
only a fragment is displayed.
make_json : (Dataframe_Table | Database_Table) -> Vector Column -> Integer -> Json
make_json : (Dataframe_Table | Database_Table) -> Vector Column -> Integer -> Text
make_json dataframe indices all_rows_count =
columns = dataframe.columns
header = ["header", columns.map .name]
@ -77,4 +83,4 @@ make_json dataframe indices all_rows_count =
ixes = ["indices", indices.map .to_vector]
ixes_header = ["indices_header", indices.map .name]
pairs = [header, data, all_rows, ixes, ixes_header]
Json.from_pairs pairs . to_text
JS_Object.from_pairs pairs . to_text

View File

@ -1951,6 +1951,10 @@ class RuntimeVisualizationsTest
}
it should "run internal IDE visualisation preprocessor catching error" in {
pending
// TODO [JD]: Disabled due to issue with context not allowing JS functions.
// https://www.pivotaltracker.com/story/show/184064564
val contextId = UUID.randomUUID()
val requestId = UUID.randomUUID()
val visualisationId = UUID.randomUUID()

View File

@ -159,11 +159,13 @@ public final class ParseStdLibTest extends TestCase {
Arrays.asList(
// Files containing type expressions not supported by old parser.
"Data/Index_Sub_Range.enso",
"Data/Json.enso",
"Data/Pair.enso",
"Data/Sort_Column_Selector.enso",
"Data/Text/Regex/Regex_Mode.enso",
"Data/Value_Type.enso",
"Error/Common.enso",
"Data/Vector.enso",
"Network/HTTP/HTTP_Status_Code.enso",
"Internal/Base_Generator.enso"));
}

View File

@ -0,0 +1,14 @@
package org.enso.base.file_format;
@org.openide.util.lookup.ServiceProvider(service = FileFormatSPI.class)
public class JSONFormatSPI extends FileFormatSPI {
@Override
protected String getModuleName() {
return "Standard.Base.System.File_Format";
}
@Override
protected String getTypeName() {
return "JSON_File";
}
}

View File

@ -1,449 +0,0 @@
package org.enso.base.json;
import java.util.ArrayDeque;
import java.util.Deque;
public class Parser {
/** An exception thrown when an unexpected token is encountered in JSON. */
public static class UnexpectedTokenException extends RuntimeException {
/**
* Creates a new instance of this error.
*
* @param position the position in input where the exception occured.
* @param expected a description of expected tokens.
*/
public UnexpectedTokenException(int position, String expected) {
super("Unexpected token at position " + position + ". Expected " + expected + ".");
}
}
/** An exception thrown when the input ends unexpectedly. */
public static class UnexpectedEndOfInputException extends RuntimeException {
/** Creates a new instance of this error. */
public UnexpectedEndOfInputException() {
super("Unexpected end of input.");
}
/**
* Creates a new instance of this error.
*
* @param expected a description of expected tokens.
*/
public UnexpectedEndOfInputException(String expected) {
super("Unexpected end of input. Expected " + expected + ".");
}
}
/**
* A consumer of parsing events. Called iteratively, whenever one of the events occurs in parsing.
* An event may either denote a parsed value or a start or end of a new nesting level.
*/
public interface JsonConsumer {
void on_start_object();
void on_key(String name);
void on_end_object();
void on_start_array();
void on_end_array();
void on_double(double n);
void on_long(long n);
void on_string(String str);
void on_true();
void on_false();
void on_null();
}
private enum State {
ANY,
ARRAY_END_OR_VALUE,
ARRAY_END_OR_COMMA,
ARRAY_VALUE,
OBJECT_KEY_OR_END,
OBJECT_VALUE,
OBJECT_END_OR_COMMA,
OBJECT_KEY
}
/**
* Parses a JSON string, iteratively calling the provided consumer on each JSON event.
*
* <p>Note that this parser internally checks the integrity of the parsed JSON, therefore it is
* guaranteed that no invalid sequences of events can be reported in the consumer. In case a an
* invalid sequence of characters is reported, an {@link UnexpectedEndOfInputException} or {@link
* UnexpectedTokenException} is thrown instead.
*
* @param jsonString the string to parse.
* @param consumer the consumer for reported events.
*/
public static void parse(String jsonString, JsonConsumer consumer) {
char[] chars = jsonString.toCharArray();
Deque<State> state = new ArrayDeque<>();
state.push(State.ANY);
int position = 0;
while (!state.isEmpty()) {
State current = state.pop();
position = consumeWhiteSpace(chars, position);
assertInput(chars, position);
switch (current) {
case ANY:
position = consumeAny(chars, position, consumer, state);
break;
case ARRAY_END_OR_VALUE:
position = consumeArrayEndOrValue(chars, position, consumer, state);
break;
case ARRAY_END_OR_COMMA:
position = consumeArrayEndOrComa(chars, position, consumer, state);
break;
case ARRAY_VALUE:
state.push(State.ARRAY_END_OR_COMMA);
position = consumeAny(chars, position, consumer, state);
break;
case OBJECT_KEY_OR_END:
position = consumeObjectKeyOrEnd(chars, position, consumer, state);
break;
case OBJECT_VALUE:
state.push(State.OBJECT_END_OR_COMMA);
position = consumeAny(chars, position, consumer, state);
break;
case OBJECT_END_OR_COMMA:
position = consumeObjectEndOrComma(chars, position, consumer, state);
break;
case OBJECT_KEY:
position = consumeObjectKey(chars, position, consumer, state);
break;
}
}
position = consumeWhiteSpace(chars, position);
if (position < chars.length) {
throw new UnexpectedTokenException(position, "end of input");
}
}
private static int consumeObjectEndOrComma(
char[] chars, int position, JsonConsumer consumer, Deque<State> state) {
if (chars[position] == '}') {
consumer.on_end_object();
position++;
return position;
} else if (chars[position] == ',') {
state.push(State.OBJECT_KEY);
position++;
return position;
}
throw new UnexpectedTokenException(position, "a comma or a closing brace");
}
private static int consumeObjectKey(
char[] chars, int position, JsonConsumer consumer, Deque<State> state) {
position = consumeString(chars, position, consumer, true);
state.push(State.OBJECT_VALUE);
position = consumeWhiteSpace(chars, position);
assertInput(chars, position);
if (chars[position] == ':') {
position++;
return position;
} else {
throw new UnexpectedTokenException(position, "a colon");
}
}
private static int consumeObjectKeyOrEnd(
char[] chars, int position, JsonConsumer consumer, Deque<State> state) {
if (chars[position] == '}') {
consumer.on_end_object();
position++;
return position;
}
return consumeObjectKey(chars, position, consumer, state);
}
private static int consumeArrayEndOrValue(
char[] chars, int position, JsonConsumer consumer, Deque<State> state) {
if (chars[position] == ']') {
consumer.on_end_array();
position++;
return position;
}
state.push(State.ARRAY_END_OR_COMMA);
return consumeAny(chars, position, consumer, state);
}
private static int consumeArrayEndOrComa(
char[] chars, int position, JsonConsumer consumer, Deque<State> state) {
switch (chars[position]) {
case ']':
consumer.on_end_array();
position++;
return position;
case ',':
state.push(State.ARRAY_VALUE);
position++;
return position;
default:
throw new UnexpectedTokenException(position, "a comma or a closing bracket");
}
}
private static int consumeAny(
char[] chars, int position, JsonConsumer consumer, Deque<State> state) {
switch (chars[position]) {
case '[':
consumer.on_start_array();
position++;
state.push(State.ARRAY_END_OR_VALUE);
return position;
case '{':
consumer.on_start_object();
position++;
state.push(State.OBJECT_KEY_OR_END);
return position;
case '"':
return consumeString(chars, position, consumer, false);
case '-':
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
return consumeNumber(chars, position, consumer);
case 'n':
return consumeNull(chars, position, consumer);
case 't':
return consumeTrue(chars, position, consumer);
case 'f':
return consumeFalse(chars, position, consumer);
}
throw new UnexpectedTokenException(position, "a start of a JSON value");
}
private static int consumeNull(char[] chars, int position, JsonConsumer consumer) {
if (position + 3 < chars.length) {
boolean match =
chars[position] == 'n'
&& chars[position + 1] == 'u'
&& chars[position + 2] == 'l'
&& chars[position + 3] == 'l';
if (match) {
consumer.on_null();
return position + 4;
}
throw new UnexpectedTokenException(position, "a null");
}
throw new UnexpectedEndOfInputException("a null");
}
private static int consumeTrue(char[] chars, int position, JsonConsumer consumer) {
if (position + 3 < chars.length) {
boolean match =
chars[position] == 't'
&& chars[position + 1] == 'r'
&& chars[position + 2] == 'u'
&& chars[position + 3] == 'e';
if (match) {
consumer.on_true();
return position + 4;
}
throw new UnexpectedTokenException(position, "a true");
}
throw new UnexpectedEndOfInputException("a true");
}
private static int consumeFalse(char[] chars, int position, JsonConsumer consumer) {
if (position + 4 < chars.length) {
boolean match =
chars[position] == 'f'
&& chars[position + 1] == 'a'
&& chars[position + 2] == 'l'
&& chars[position + 3] == 's'
&& chars[position + 4] == 'e';
if (match) {
consumer.on_false();
return position + 5;
}
throw new UnexpectedTokenException(position, "a false");
}
throw new UnexpectedEndOfInputException("a false");
}
private static int consumeString(
char[] chars, int position, JsonConsumer consumer, boolean isKey) {
if (chars[position] != '"') {
throw new UnexpectedTokenException(position, "a string");
}
position++;
StringBuilder bldr = new StringBuilder();
while (position < chars.length) {
switch (chars[position]) {
case '"':
position++;
if (isKey) {
consumer.on_key(bldr.toString());
} else {
consumer.on_string(bldr.toString());
}
return position;
case '\\':
position++;
position = consumeEscape(chars, position, bldr);
break;
default:
bldr.append(chars[position]);
position++;
}
}
throw new UnexpectedEndOfInputException("a closing quote");
}
private static int consumeEscape(char[] chars, int position, StringBuilder builder) {
if (position >= chars.length) {
throw new UnexpectedEndOfInputException("an escape sequence");
}
switch (chars[position]) {
case '"':
builder.append('"');
position++;
return position;
case '\\':
builder.append('\\');
position++;
return position;
case '/':
builder.append('/');
position++;
return position;
case 'b':
builder.append('\u0008');
position++;
return position;
case 'f':
builder.append('\u000C');
position++;
return position;
case 'n':
builder.append('\n');
position++;
return position;
case 'r':
builder.append('\r');
position++;
return position;
case 't':
builder.append('\t');
position++;
return position;
case 'u':
position++;
return consumeHexEscape(chars, position, builder);
default:
throw new UnexpectedTokenException(position, "a valid escape character");
}
}
private static int consumeHexEscape(char[] chars, int position, StringBuilder builder) {
if (position + 3 >= chars.length) {
throw new UnexpectedEndOfInputException("four hexadecimal digits");
}
char c = 0;
for (int i = 0; i < 4; i++) {
c *= 16;
char current = Character.toLowerCase(chars[position]);
if (current >= '0' && current <= '9') {
c += current - '0';
} else if (current >= 'a' && current <= 'f') {
c += 10 + current - 'a';
} else {
throw new UnexpectedTokenException(position, "a hexadecimal digit");
}
position++;
}
builder.append(c);
return position;
}
private static int consumeDigits(char[] chars, int position, StringBuilder bldr) {
if (position >= chars.length || !Character.isDigit(chars[position])) {
throw new UnexpectedTokenException(position, "a digit");
}
while (position < chars.length) {
if (Character.isDigit(chars[position])) {
bldr.append(chars[position]);
position++;
} else {
return position;
}
}
return position;
}
private static int consumeNumber(char[] chars, int position, JsonConsumer consumer) {
StringBuilder bldr = new StringBuilder();
if (chars[position] == '-') {
bldr.append('-');
position++;
}
position = consumeDigits(chars, position, bldr);
if (position < chars.length && chars[position] == '.') {
bldr.append('.');
position++;
position = consumeDigits(chars, position, bldr);
}
if (position < chars.length && Character.toLowerCase(chars[position]) == 'e') {
bldr.append('E');
position++;
if (position >= chars.length) {
throw new UnexpectedEndOfInputException("an exponent");
}
if (chars[position] == '+' || chars[position] == '-') {
bldr.append(chars[position]);
position++;
}
position = consumeDigits(chars, position, bldr);
}
String strNum = bldr.toString();
try {
consumer.on_long(Long.parseLong(strNum, 10));
} catch (NumberFormatException e) {
consumer.on_double(Double.parseDouble(strNum));
}
return position;
}
private static void assertInput(char[] chars, int position) {
if (position >= chars.length) {
throw new UnexpectedEndOfInputException();
}
}
private static int consumeWhiteSpace(char[] chars, int position) {
while (position < chars.length) {
switch (chars[position]) {
case '\t':
case '\n':
case '\r':
case ' ':
position++;
break;
default:
return position;
}
}
return position;
}
}

View File

@ -1,50 +0,0 @@
package org.enso.base.json;
public class Printer {
/**
* Escapes a string into an RFC-8259 compliant format.
*
* @param string the string to escape
* @return the original string with special characters escaped.
*/
public static String json_escape(String string) {
StringBuilder builder = new StringBuilder();
builder.append("\"");
string
.chars()
.forEach(
ch -> {
switch (ch) {
case '\\':
builder.append("\\\\");
break;
case '\"':
builder.append("\\\"");
break;
case '\b':
builder.append("\\b");
break;
case '\f':
builder.append("\\f");
break;
case '\n':
builder.append("\\n");
break;
case '\r':
builder.append("\\r");
break;
case '\t':
builder.append("\\t");
break;
default:
if (ch <= 0x1F) {
builder.append(String.format("\\u%08X", ch));
} else {
builder.append((char) ch);
}
}
});
builder.append("\"");
return builder.toString();
}
}

View File

@ -31,7 +31,7 @@ spec =
Test.specify "should allow converting a GeoJSON array of features into a table" <|
fields = ['foo', 'bar', 'baz', 'longitude', 'elevation']
t = geo_json.get "features" . geo_json_to_table fields
t = Geo.geo_json_to_table (geo_json.get "features") fields
t.columns.map .name . should_equal fields
t.at 'foo' . to_vector . should_equal [1, 2]
t.at 'bar' . to_vector . should_equal ['value2', Nothing]
@ -41,7 +41,7 @@ spec =
Test.specify "should allow converting a GeoJSON object into a table with provided fields" <|
fields = ['foo', 'bar', 'longitude']
t = geo_json.geo_json_to_table fields
t = Geo.geo_json_to_table geo_json fields
t.columns.map .name . should_equal fields
t.at 'foo' . to_vector . should_equal [1, 2]
t.at 'bar' . to_vector . should_equal ['value2', Nothing]
@ -49,7 +49,7 @@ spec =
Test.specify "should allow converting a GeoJSON object into a table containing all available fields" <|
fields = ['bar', 'baz', 'elevation', 'foo', 'latitude', 'longitude']
t = geo_json.geo_json_to_table
t = Geo.geo_json_to_table geo_json
t.columns.map .name . should_equal fields
t.at 'foo' . to_vector . should_equal [1, 2]
t.at 'bar' . to_vector . should_equal ['value2', Nothing]

View File

@ -30,15 +30,17 @@ spec =
Test.group "[Codegen] JSON serialization" <|
Test.specify "should serialize Tables and Columns to their SQL representation" <|
q1 = t1.filter (t1.at "A" == 42) . to_json
part1 = Json.from_pairs [["sql_code", 'SELECT "T1"."A" AS "A", "T1"."B" AS "B", "T1"."C" AS "C" FROM "T1" AS "T1" WHERE ("T1"."A" = ']]
interp = Json.from_pairs [["value", 42], ["expected_sql_type", "INTEGER"]]
part2 = Json.from_pairs [["sql_interpolation", interp]]
part3 = Json.from_pairs [["sql_code", ")"]]
expected = Json.from_pairs [["query", Json.Array [part1, part2, part3]]]
part1 = JS_Object.from_pairs [["sql_code", 'SELECT "T1"."A" AS "A", "T1"."B" AS "B", "T1"."C" AS "C" FROM "T1" AS "T1" WHERE ("T1"."A" = ']]
part2_sub = JS_Object.from_pairs [["value", 42], ["expected_sql_type", "INTEGER"]]
part2 = JS_Object.from_pairs [["sql_interpolation", part2_sub]]
part3 = JS_Object.from_pairs [["sql_code", ")"]]
expected = JS_Object.from_pairs [["query", [part1, part2, part3]]] . to_text
q1.should_equal expected
q2 = t1.at "A" . to_json
q2.should_equal (Json.from_pairs [["query", Json.Array [Json.from_pairs [["sql_code", 'SELECT "T1"."A" AS "A" FROM "T1" AS "T1"']]]]])
expected_2 = JS_Object.from_pairs [["query", [JS_Object.from_pairs [["sql_code", 'SELECT "T1"."A" AS "A" FROM "T1" AS "T1"']]]]] . to_text
q2.should_equal expected_2
Test.group "[Codegen] Basic Select" <|
Test.specify "should select columns from a table" <|
@ -61,7 +63,7 @@ spec =
Test.specify "should work correctly when there are no columns" <|
empty = t1.select_columns (Column_Selector.By_Name [])
json = Json.from_pairs [["query", Nothing], ["message", "The table has no columns so a query cannot be generated."]]
json = JS_Object.from_pairs [["query", Nothing], ["message", "The table has no columns so a query cannot be generated."]] . to_text
empty.to_json . should_equal json
empty.column_count . should_equal 0
empty.to_sql . should_fail_with Unsupported_Database_Operation.Error

View File

@ -73,7 +73,7 @@ connect_via_json_config =
msg = "Redshift connection is not set up. Please create a JSON file containing the credentials in `data/redshift_credentials.json`"
if credentials.exists.not then msg else
creds = Json.parse credentials.read_text . unwrap
creds = Json.parse credentials.read_text
access_key = creds.get 'access_key_id'
secret_key = creds.get 'secret_access_key'
uri = uri_parse (creds.get 'db_uri')

View File

@ -20,7 +20,7 @@ spec =
c_2 = ["b", ['2', Nothing, '8', '11']]
c_3 = ["c", [Nothing, '6', '9', '12']]
expected_table = Table.new [c_1, c_2, c_3]
simple_empty = Data.read_file (enso_project.data / "simple_empty.csv") (Delimited "," headers=True value_formatter=Nothing)
simple_empty = Data.read (enso_project.data / "simple_empty.csv") (Delimited "," headers=True value_formatter=Nothing)
simple_empty.should_equal expected_table
Test.specify "should load a simple table without headers" <|
@ -28,11 +28,11 @@ spec =
c_2 = ["Column_2", ['b', '2', Nothing, '8', '11']]
c_3 = ["Column_3", ['c', Nothing, '6', '9', '12']]
expected_table = Table.new [c_1, c_2, c_3]
simple_empty = Data.read_file (enso_project.data / "simple_empty.csv") (Delimited "," headers=False value_formatter=Nothing)
simple_empty = Data.read (enso_project.data / "simple_empty.csv") (Delimited "," headers=False value_formatter=Nothing)
simple_empty.should_equal expected_table
Test.specify "should work in presence of missing headers" <|
action on_problems = Data.read_file (enso_project.data / "missing_header.csv") (Delimited "," headers=True value_formatter=Nothing) on_problems
action on_problems = Data.read (enso_project.data / "missing_header.csv") (Delimited "," headers=True value_formatter=Nothing) on_problems
tester table =
table.columns.map .name . should_equal ["a", "Column_1", "c", "Column_2", "d"]
table.at "a" . to_vector . should_equal ["1"]
@ -44,61 +44,61 @@ spec =
Problems.test_problem_handling action problems tester
Test.specify "should infer headers based on the first two rows" <|
t1 = Data.read_file (enso_project.data / "data_small.csv") (Delimited ",")
t1 = Data.read (enso_project.data / "data_small.csv") (Delimited ",")
t1.columns.map .name . should_equal ["Code", "Index", "Flag", "Value", "ValueWithNothing", "TextWithNothing", "Hexadecimal", "Leading0s", "QuotedNumbers", "Mixed Types"]
t2 = Data.read_file (enso_project.data / "all_text.csv") (Delimited ",")
t2 = Data.read (enso_project.data / "all_text.csv") (Delimited ",")
t2.columns.map .name . should_equal ["Column_1", "Column_2"]
t2.at "Column_1" . to_vector . should_equal ["a", "c", "e", "g"]
t2.at "Column_2" . to_vector . should_equal ["b", "d", "f", "h"]
t3 = Data.read_file (enso_project.data / "two_rows1.csv") (Delimited ",")
t3 = Data.read (enso_project.data / "two_rows1.csv") (Delimited ",")
t3.columns.map .name . should_equal ["a", "b", "c"]
t3.at "a" . to_vector . should_equal ["x"]
t3.at "b" . to_vector . should_equal [Nothing]
t3.at "c" . to_vector . should_equal [Nothing]
t4 = Data.read_file (enso_project.data / "two_rows2.csv") (Delimited ",")
t4 = Data.read (enso_project.data / "two_rows2.csv") (Delimited ",")
t4.columns.map .name . should_equal ["Column_1", "Column_2", "Column_3"]
t4.at "Column_1" . to_vector . should_equal ["a", "d"]
t4.at "Column_2" . to_vector . should_equal ["b", "e"]
t4.at "Column_3" . to_vector . should_equal ["c", "f"]
t5 = Data.read_file (enso_project.data / "numbers_in_header.csv") (Delimited ",")
t5 = Data.read (enso_project.data / "numbers_in_header.csv") (Delimited ",")
t5.columns.map .name . should_equal ["Column_1", "Column_2", "Column_3"]
t5.at "Column_1" . to_vector . should_equal ["a", "1"]
t5.at "Column_2" . to_vector . should_equal ["b", "2"]
t5.at "Column_3" . to_vector . should_equal [0, 3]
t6 = Data.read_file (enso_project.data / "quoted_numbers_in_header.csv") (Delimited ",")
t6 = Data.read (enso_project.data / "quoted_numbers_in_header.csv") (Delimited ",")
t6.columns.map .name . should_equal ["1", "x"]
t6.at "1" . to_vector . should_equal ["y"]
t6.at "x" . to_vector . should_equal [2]
Test.specify "should not use the first row as headers if it is the only row, unless specifically asked to" <|
t1 = Data.read_file (enso_project.data / "one_row.csv") (Delimited ",")
t1 = Data.read (enso_project.data / "one_row.csv") (Delimited ",")
t1.columns.map .name . should_equal ["Column_1", "Column_2", "Column_3"]
t1.at "Column_1" . to_vector . should_equal ["x"]
t1.at "Column_2" . to_vector . should_equal ["y"]
t1.at "Column_3" . to_vector . should_equal ["z"]
t2 = Data.read_file (enso_project.data / "one_row.csv") (Delimited "," headers=True)
t2 = Data.read (enso_project.data / "one_row.csv") (Delimited "," headers=True)
t2.columns.map .name . should_equal ["x", "y", "z"]
t2.row_count . should_equal 0
t2.at "x" . to_vector . should_equal []
Test.specify "should be able to load even an empty file" <|
table = Data.read_file (enso_project.data / "empty.txt") (Delimited "," headers=True value_formatter=Nothing)
table = Data.read (enso_project.data / "empty.txt") (Delimited "," headers=True value_formatter=Nothing)
table.columns.map .name . should_equal []
table.row_count . should_equal 0
Test.specify "should correctly handle file opening issues" <|
nonexistent_file = enso_project.data / "a_filename_that_does_not_exist.foobar"
r1 = Data.read_file nonexistent_file (Delimited "," headers=True value_formatter=Nothing)
r1 = Data.read nonexistent_file (Delimited "," headers=True value_formatter=Nothing)
r1.should_fail_with File_Error.Not_Found
directory = enso_project.data
r2 = Data.read_file directory (Delimited "," headers=True value_formatter=Nothing) Problem_Behavior.Report_Error
r2 = Data.read directory (Delimited "," headers=True value_formatter=Nothing) Problem_Behavior.Report_Error
r2.should_fail_with File_Error.IO_Error
Test.specify "should work with all kinds of line endings" <|
@ -109,7 +109,7 @@ spec =
text.write (path name)
test_file name =
table = Data.read_file (path name) (Delimited "," headers=True value_formatter=Nothing) Problem_Behavior.Report_Error
table = Data.read (path name) (Delimited "," headers=True value_formatter=Nothing) Problem_Behavior.Report_Error
table.columns.map .name . should_equal ['a', 'b', 'c']
table.at 'a' . to_vector . should_equal ['d', '1']
table.at 'b' . to_vector . should_equal ['e', '2']
@ -124,7 +124,7 @@ spec =
# Currently mixed line endings are not supported.
'a,b,c\nd,e,f\r1,2,3'.write (path 'mixed.csv')
Data.read_file (path 'mixed.csv') (Delimited "," headers=True value_formatter=Nothing) Problem_Behavior.Report_Error . should_fail_with Invalid_Row.Error
Data.read (path 'mixed.csv') (Delimited "," headers=True value_formatter=Nothing) Problem_Behavior.Report_Error . should_fail_with Invalid_Row.Error
['crlf.csv', 'lf.csv', 'cr.csv', 'mixed.csv'].each (path >> .delete)
@ -140,30 +140,30 @@ spec =
collapsed_table = Table.new <|
['a', 'b', 'c\nd', 'e', 'f\n1', 2, 3].map_with_index i-> v->
["Column_" + (i+1).to_text, [v]]
Data.read_file file format . should_equal reference_table
Data.read_file file (format.with_line_endings Line_Ending_Style.Unix) . should_equal reference_table
Data.read_file file (format.with_line_endings Line_Ending_Style.Mac_Legacy) . should_equal collapsed_table
Data.read_file file (format.with_line_endings Line_Ending_Style.Windows) . should_equal collapsed_table
Data.read file format . should_equal reference_table
Data.read file (format.with_line_endings Line_Ending_Style.Unix) . should_equal reference_table
Data.read file (format.with_line_endings Line_Ending_Style.Mac_Legacy) . should_equal collapsed_table
Data.read file (format.with_line_endings Line_Ending_Style.Windows) . should_equal collapsed_table
file.delete
file_2 = enso_project.data / "transient" / "crlf.csv"
lines.join '\r\n' . write file_2
Data.read_file file_2 (format.with_line_endings Line_Ending_Style.Windows) . should_equal reference_table
Data.read file_2 (format.with_line_endings Line_Ending_Style.Windows) . should_equal reference_table
# For some reason loading the CRLF file in Unix mode trims the CR characters. We may want to revisit this at some point.
table = Data.read_file file_2 (format.with_line_endings Line_Ending_Style.Unix)
table = Data.read file_2 (format.with_line_endings Line_Ending_Style.Unix)
table . should_equal reference_table
file_2.delete
Test.specify "should work with Windows-1252 encoding" <|
table = Data.read_file (enso_project.data / "windows.csv") (Delimited "," headers=True encoding=Encoding.windows_1252) Problem_Behavior.Report_Error
table = Data.read (enso_project.data / "windows.csv") (Delimited "," headers=True encoding=Encoding.windows_1252) Problem_Behavior.Report_Error
table.columns.map .name . should_equal ['a', 'b', 'c']
table.at 'a' . to_vector . should_equal ['$¢']
table.at 'b' . to_vector . should_equal ['¤']
table.at 'c' . to_vector . should_equal ['¥']
Test.specify "should work with UTF-16 encoding" <|
table = Data.read_file (enso_project.data / "utf16.csv") (Delimited "," headers=True encoding=Encoding.utf_16_be) Problem_Behavior.Report_Error
table = Data.read (enso_project.data / "utf16.csv") (Delimited "," headers=True encoding=Encoding.utf_16_be) Problem_Behavior.Report_Error
table.columns.map .name . should_equal ['ą', '🚀b', 'ć😎']
table.at 'ą' . to_vector . should_equal ['ą']
table.at '🚀b' . to_vector . should_equal ['✨🚀🚧😍😃😍😎😙😉☺']
@ -197,7 +197,7 @@ spec =
Problems.test_problem_handling action_2 problems_2 tester_2
Test.specify "should handle duplicated columns" <|
action on_problems = Data.read_file (enso_project.data / "duplicated_columns.csv") (Delimited "," headers=True value_formatter=Nothing) on_problems
action on_problems = Data.read (enso_project.data / "duplicated_columns.csv") (Delimited "," headers=True value_formatter=Nothing) on_problems
tester table =
table.columns.map .name . should_equal ['a', 'b', 'c', 'a_1']
table.at 'a' . to_vector . should_equal ['1']
@ -206,27 +206,27 @@ spec =
Problems.test_problem_handling action problems tester
Test.specify "should handle quotes" <|
t1 = Data.read_file (enso_project.data / "double_quoted.csv") (Delimited "," headers=True value_formatter=Nothing)
t1 = Data.read (enso_project.data / "double_quoted.csv") (Delimited "," headers=True value_formatter=Nothing)
t1.at 'a' . to_vector . should_equal ['a, x', '"a']
t1.at 'c' . to_vector . should_equal ['3', '"']
t2 = Data.read_file (enso_project.data / "escape_quoted.csv") (Delimited "," headers=True value_formatter=Nothing . with_quotes quote_escape="\")
t2 = Data.read (enso_project.data / "escape_quoted.csv") (Delimited "," headers=True value_formatter=Nothing . with_quotes quote_escape="\")
t2.at 'a' . to_vector . should_equal ['a"b', 'a\\\"z']
t3 = Data.read_file (enso_project.data / "no_quoting.csv") (Delimited "," headers=True value_formatter=Nothing . without_quotes)
t3 = Data.read (enso_project.data / "no_quoting.csv") (Delimited "," headers=True value_formatter=Nothing . without_quotes)
t3.at 'a' . to_vector . should_equal ['"y']
t3.at 'b' . to_vector . should_equal ['z"']
t3.at 'c' . to_vector . should_equal ['a']
Test.specify "should support rows spanning multiple lines if quoted" <|
t1 = Data.read_file (enso_project.data / "multiline_quoted.csv") (Delimited "," headers=True value_formatter=Nothing)
t1 = Data.read (enso_project.data / "multiline_quoted.csv") (Delimited "," headers=True value_formatter=Nothing)
t1.at 'a' . to_vector . should_equal ['1', '4']
t1.at 'b' . to_vector . should_equal ['start\n\ncontinue', '5']
t1.at 'c' . to_vector . should_equal ['3', '6']
Test.specify "should behave correctly in presence of a mismatched quote" <|
action_1 on_problems =
Data.read_file (enso_project.data / "mismatched_quote.csv") (Delimited "," headers=True value_formatter=Nothing) on_problems
Data.read (enso_project.data / "mismatched_quote.csv") (Delimited "," headers=True value_formatter=Nothing) on_problems
tester_1 table =
table.columns.map .name . should_equal ['a', 'b', 'c']
@ -237,7 +237,7 @@ spec =
Problems.test_problem_handling action_1 problems_1 tester_1
action_2 on_problems =
Data.read_file (enso_project.data / "mismatched_quote2.csv") (Delimited "," headers=True value_formatter=Nothing) on_problems
Data.read (enso_project.data / "mismatched_quote2.csv") (Delimited "," headers=True value_formatter=Nothing) on_problems
tester_2 table =
table.columns.map .name . should_equal ['a', 'b', 'c']
@ -249,7 +249,7 @@ spec =
Test.specify "should handle too long and too short rows" <|
action keep_invalid_rows on_problems =
Data.read_file (enso_project.data / "varying_rows.csv") (Delimited "," headers=True keep_invalid_rows=keep_invalid_rows value_formatter=Nothing) on_problems
Data.read (enso_project.data / "varying_rows.csv") (Delimited "," headers=True keep_invalid_rows=keep_invalid_rows value_formatter=Nothing) on_problems
tester_kept table =
table.columns.map .name . should_equal ['a', 'b', 'c']
@ -269,7 +269,7 @@ spec =
Test.specify "should aggregate invalid rows over some limit" <|
action on_problems =
Data.read_file (enso_project.data / "many_invalid_rows.csv") (Delimited "," headers=True keep_invalid_rows=False value_formatter=Nothing) on_problems
Data.read (enso_project.data / "many_invalid_rows.csv") (Delimited "," headers=True keep_invalid_rows=False value_formatter=Nothing) on_problems
tester table =
table.columns.map .name . should_equal ['a', 'b', 'c']
@ -280,33 +280,33 @@ spec =
Problems.test_problem_handling action problems tester
Test.specify "should allow to skip rows" <|
t1 = Data.read_file (enso_project.data / "simple_empty.csv") (Delimited "," headers=False skip_rows=3 value_formatter=Nothing)
t1 = Data.read (enso_project.data / "simple_empty.csv") (Delimited "," headers=False skip_rows=3 value_formatter=Nothing)
t1.at "Column_1" . to_vector . should_equal ['7', '10']
t2 = Data.read_file (enso_project.data / "simple_empty.csv") (Delimited "," headers=True skip_rows=3 value_formatter=Nothing)
t2 = Data.read (enso_project.data / "simple_empty.csv") (Delimited "," headers=True skip_rows=3 value_formatter=Nothing)
t2.columns.map .name . should_equal ['7', '8', '9']
t2.at "7" . to_vector . should_equal ['10']
Test.specify "should allow to set a limit of rows to read" <|
t1 = Data.read_file (enso_project.data / "simple_empty.csv") (Delimited "," headers=False row_limit=2 value_formatter=Nothing)
t1 = Data.read (enso_project.data / "simple_empty.csv") (Delimited "," headers=False row_limit=2 value_formatter=Nothing)
t1.at "Column_1" . to_vector . should_equal ['a', '1']
t2 = Data.read_file (enso_project.data / "simple_empty.csv") (Delimited "," headers=True row_limit=2 value_formatter=Nothing)
t2 = Data.read (enso_project.data / "simple_empty.csv") (Delimited "," headers=True row_limit=2 value_formatter=Nothing)
t2.at "a" . to_vector . should_equal ['1', '4']
t3 = Data.read_file (enso_project.data / "simple_empty.csv") (Delimited "," headers=False skip_rows=3 row_limit=1 value_formatter=Nothing)
t3 = Data.read (enso_project.data / "simple_empty.csv") (Delimited "," headers=False skip_rows=3 row_limit=1 value_formatter=Nothing)
t3.at "Column_1" . to_vector . should_equal ['7']
t4 = Data.read_file (enso_project.data / "simple_empty.csv") (Delimited "," headers=False row_limit=0 value_formatter=Nothing)
t4 = Data.read (enso_project.data / "simple_empty.csv") (Delimited "," headers=False row_limit=0 value_formatter=Nothing)
t4.columns.map .name . should_equal ['Column_1', 'Column_2', 'Column_3']
t4.row_count . should_equal 0
t5 = Data.read_file (enso_project.data / "simple_empty.csv") (Delimited "," headers=True row_limit=0 value_formatter=Nothing)
t5 = Data.read (enso_project.data / "simple_empty.csv") (Delimited "," headers=True row_limit=0 value_formatter=Nothing)
t5.columns.map .name . should_equal ['a', 'b', 'c']
t5.at 'a' . to_vector . should_equal []
t5.row_count . should_equal 0
t6 = Data.read_file (enso_project.data / "simple_empty.csv") (Delimited "," headers=False skip_rows=3 row_limit=1000 value_formatter=Nothing)
t6 = Data.read (enso_project.data / "simple_empty.csv") (Delimited "," headers=False skip_rows=3 row_limit=1000 value_formatter=Nothing)
t6.at "Column_1" . to_vector . should_equal ['7', '10']
Test.specify "should check arguments" <|
@ -363,15 +363,15 @@ spec =
Test.specify "should be able to read column names starting with #" <|
reference_table = Table.new [["#", ["a", ";1", "5"]], ["x", [42, 2, 6]], ["y", ["c # comment??", "3", "7;comment?"]]]
table = Data.read_file (enso_project.data / "comments.csv")
table = Data.read (enso_project.data / "comments.csv")
table.should_equal reference_table
Test.specify "should be able to handle comments if enabled" <|
table_hash = Table.new [["a", [";1", "5"]], ["42", [2, 6]], ["c # comment??", ["3", "7;comment?"]]]
table_semicolon = Table.new [["#", ["a", "5"]], ["x", [42, 6]], ["y", ["c # comment??", "7;comment?"]]]
Data.read_file (enso_project.data / "comments.csv") (Delimited ',' . with_comments . with_headers) . should_equal table_hash
Data.read_file (enso_project.data / "comments.csv") (Delimited ',' . with_comments ';' . with_headers) . should_equal table_semicolon
Data.read (enso_project.data / "comments.csv") (Delimited ',' . with_comments . with_headers) . should_equal table_hash
Data.read (enso_project.data / "comments.csv") (Delimited ',' . with_comments ';' . with_headers) . should_equal table_semicolon
Test.specify "should allow to build the Delimited configuration using builders" <|
Delimited "," . clone . should_equal (Delimited ",")

View File

@ -34,7 +34,7 @@ spec =
1,1.0,x,a
2,1.5,y,2
3,2.2,z,[[[My Type :: 10]]]
text = Data.read_text_file file
text = Data.read_text file
text.should_equal expected_text
file.delete
@ -46,7 +46,7 @@ spec =
separator=setting.second
file = (enso_project.data / "transient" / "endings.csv")
table.write file (Delimited ',' line_endings=style) on_problems=Report_Error . should_succeed
text = Data.read_text_file file
text = Data.read_text file
text.should_equal (lines.join separator suffix=separator)
file.delete
@ -55,7 +55,7 @@ spec =
file = (enso_project.data / "transient" / "empty.csv")
file.delete_if_exists
table.write file on_problems=Report_Error . should_succeed
text = Data.read_text_file file
text = Data.read_text file
text.should_equal ''
file.delete
@ -71,7 +71,7 @@ spec =
'bar',"1000000,5"
"""baz""","2,2"
"one, two, three","-1,5"
text = Data.read_text_file file
text = Data.read_text file
text.should_equal expected_text
file.delete
@ -88,7 +88,7 @@ spec =
one, two, three;0.0
"a;b; c ";-1.2
"a\\b";
text = Data.read_text_file file
text = Data.read_text file
text.should_equal expected_text
file.delete
@ -104,7 +104,7 @@ spec =
'The ''thing''.'\t'1''000.0'
one, "two", three\t
'a\tb'\t-1.2
text = Data.read_text_file file
text = Data.read_text file
text.should_equal expected_text
file.delete
@ -118,7 +118,7 @@ spec =
1,
,""
3,abc
text = Data.read_text_file file
text = Data.read_text file
text.should_equal expected_text
file.delete
@ -128,7 +128,7 @@ spec =
file.delete_if_exists
table.write file on_problems=Report_Error . should_succeed
expected_text = join_lines ['#,B','b,', 'x,#', '#,abc']
text = Data.read_text_file file
text = Data.read_text file
text.should_equal expected_text
file.delete
@ -139,7 +139,7 @@ spec =
b,
x,"#"
"#",abc
text_2 = Data.read_text_file file
text_2 = Data.read_text file
text_2.should_equal expected_text_2
file.delete
@ -155,7 +155,7 @@ spec =
'bar',1000000,5
"baz",2,2
one, two, three,-1,5
text = Data.read_text_file file
text = Data.read_text file
text.should_equal expected_text
file.delete
@ -171,7 +171,7 @@ spec =
"'bar'","1\"000\"000.5","[[[My Type :: 44]]]",2,13:55:00
"\"baz\"",2.2,"Tue, 21 Jun 2022",3,
expected_text = expected_text1 + '"one, two, three",-1.5,42,"4\\"000",\n'
text = Data.read_text_file file
text = Data.read_text file
text.should_equal expected_text
file.delete
@ -183,7 +183,7 @@ spec =
expected_text = normalize_lines <| """
ąęćś,ß
0,żółw 🐢
text = Data.read_text_file file encoding=Encoding.utf_16_be
text = Data.read_text file encoding=Encoding.utf_16_be
text.should_equal expected_text
file.delete
@ -196,7 +196,7 @@ spec =
A,B
0,s??wka
1,?
text = Data.read_text_file file encoding=Encoding.ascii
text = Data.read_text file encoding=Encoding.ascii
text.should_equal expected_text
result . should_equal Nothing
positions = [7, 8, 15]
@ -215,7 +215,7 @@ spec =
A,B
x,z
y,w
text_1 = Data.read_text_file file_1
text_1 = Data.read_text file_1
text_1.should_equal expected_text
result_1 . should_equal Nothing
@ -224,7 +224,7 @@ spec =
file_2.delete_if_exists
result_2 = table_2.write file_2 format
result_2 . should_fail_with Illegal_Argument.Error
text_2 = Data.read_text_file file_2
text_2 = Data.read_text file_2
text_2.should_equal ""
file_1.delete
@ -255,7 +255,7 @@ spec =
file.delete_if_exists
'A,B,C\r0,0,0'.write file
table.write file on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed
text = Data.read_text_file file
text = Data.read_text file
expected_lines = ["A,B,C", "0,0,0", "1,1.0,x", "2,1.5,y", "3,2.2,z"]
text.should_equal (expected_lines.join '\r' suffix='\r')
file.delete
@ -376,7 +376,7 @@ spec =
file = (enso_project.data / "transient" / "endings.csv")
initial_table.write file (Delimited ',' line_endings=style) on_problems=Report_Error . should_succeed
table_to_append.write file on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed
text = Data.read_text_file file
text = Data.read_text file
text.should_equal (expected_lines.join separator suffix=separator)
file.delete
@ -392,8 +392,8 @@ spec =
expected_lines = ["a,d", "x,z", "y,w"]
expected_text = join_lines expected_lines
Data.read_text_file empty_file . should_equal expected_text
Data.read_text_file nonexistent_file . should_equal expected_text
Data.read_text empty_file . should_equal expected_text
Data.read_text nonexistent_file . should_equal expected_text
Test.specify "should use the existing line ending style when appending to a file consisting of only comments" <|
initial_lines = ["# comment 1", "# comment 2"]
@ -406,7 +406,7 @@ spec =
(initial_lines.join separator suffix=separator).write file
format = Delimited ',' . with_comments
table_to_append.write file format on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed
text = Data.read_text_file file
text = Data.read_text file
expected_text = expected_lines.join separator suffix=separator
text.should_equal expected_text
file.delete
@ -422,7 +422,7 @@ spec =
(initial_lines.join separator).write file
format = Delimited ',' . with_comments
table_to_append.write file format on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed
text = Data.read_text_file file
text = Data.read_text file
expected_text = expected_lines.join separator suffix=separator
text.should_equal expected_text
file.delete
@ -442,25 +442,25 @@ spec =
separator=setting.second
(base_line+separator).write file
table.write file format on_existing_file=Existing_File_Behavior.Append match_columns=Match_Columns.By_Position on_problems=Report_Error . should_succeed
text = Data.read_text_file file
text = Data.read_text file
expected_text = expected_lines_1.join separator suffix=separator
text.should_equal expected_text
file.delete
base_line.write file
table.write file format on_existing_file=Existing_File_Behavior.Append match_columns=Match_Columns.By_Position on_problems=Report_Error . should_succeed
Data.read_text_file file . should_equal <| normalize_lines base_line+'\n1\n2\n'
Data.read_text file . should_equal <| normalize_lines base_line+'\n1\n2\n'
file.delete
# 1 character without trailing newline
"#".write file
table.write file format on_existing_file=Existing_File_Behavior.Append match_columns=Match_Columns.By_Position on_problems=Report_Error . should_succeed
Data.read_text_file file . should_equal <| normalize_lines '#\n1\n2\n'
Data.read_text file . should_equal <| normalize_lines '#\n1\n2\n'
file.delete
"#".write file
table.write file format.with_comments on_existing_file=Existing_File_Behavior.Append match_columns=Match_Columns.By_Position on_problems=Report_Error . should_succeed
Data.read_text_file file . should_equal <| normalize_lines '#\n1\n2\n'
Data.read_text file . should_equal <| normalize_lines '#\n1\n2\n'
file.delete
expected_lines_2 = ["#", "1", "2"]
@ -470,7 +470,7 @@ spec =
separator=setting.second
("#"+separator).write file
table.write file format on_existing_file=Existing_File_Behavior.Append match_columns=Match_Columns.By_Position on_problems=Report_Error . should_succeed
text = Data.read_text_file file
text = Data.read_text file
expected_text = expected_lines_2.join separator suffix=separator
text.should_equal expected_text
file.delete
@ -483,7 +483,7 @@ spec =
separator=setting.second
("A"+separator+middle_line).write file
table.write file format on_existing_file=Existing_File_Behavior.Append match_columns=Match_Columns.By_Position on_problems=Report_Error . should_succeed
text = Data.read_text_file file
text = Data.read_text file
expected_text = expected_lines_3.join separator suffix=separator
text.should_equal expected_text
file.delete
@ -493,7 +493,7 @@ spec =
separator=setting.second
("A"+separator+middle_line+separator).write file
table.write file format on_existing_file=Existing_File_Behavior.Append match_columns=Match_Columns.By_Position on_problems=Report_Error . should_succeed
text = Data.read_text_file file
text = Data.read_text file
expected_text = expected_lines_3.join separator suffix=separator
text.should_equal expected_text
file.delete
@ -509,7 +509,7 @@ spec =
(initial_line+separator).write file
format = Delimited ',' . with_comments
table_to_append.write file format on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed
text = Data.read_text_file file
text = Data.read_text file
expected_text = expected_lines.join separator suffix=separator
text.should_equal expected_text
file.delete
@ -523,7 +523,7 @@ spec =
(join_lines initial_lines trailing_newline=False).write file
format = Delimited ',' . with_comments
table_to_append.write file format on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed
text = Data.read_text_file file
text = Data.read_text file
expected_text = join_lines expected_lines
text.should_equal expected_text
file.delete

View File

@ -483,19 +483,19 @@ spec =
Test.group "Read XLSX / XLS Files" <|
Test.specify "should let you read the first sheet with Auto_Detect" <|
check_table <| xlsx_sheet.read
check_table <| Data.read_file xlsx_sheet
check_table <| Data.read_file xlsx_path
check_table <| Data.read xlsx_sheet
check_table <| Data.read xlsx_path
check_table <| xls_sheet.read
check_table <| Data.read_file xls_sheet
check_table <| Data.read_file xls_path
check_table <| Data.read xls_sheet
check_table <| Data.read xls_path
Test.specify "should let you read the first sheet with Excel" <|
check_table <| xlsx_sheet.read Excel
check_table <| Data.read_file xlsx_sheet Excel
check_table <| Data.read_file xlsx_path Excel
check_table <| Data.read xlsx_sheet Excel
check_table <| Data.read xlsx_path Excel
check_table <| xls_sheet.read Excel
check_table <| Data.read_file xls_sheet Excel
check_table <| Data.read_file xls_path Excel
check_table <| Data.read xls_sheet Excel
check_table <| Data.read xls_path Excel
Test.specify "should let you read the sheet names" <|
sheet_names = ["Sheet1", "Another", "NoHeaders", "Random"]

View File

@ -11,22 +11,22 @@ spec = Test.group 'JSON conversion' <|
clothes = enso_project.data/'clothes.csv' . read
simple_empty = enso_project.data/'simple_empty.csv' . read
Test.specify 'should convert tables to a format compatible with Json.Array.to_table' <|
Test.specify 'should convert tables to a format compatible with Table.from_objects' <|
clothes_json = clothes.to_json
clothes_json.to_table ['Id', 'Name', 'Quantity', 'Rating', 'Price'] . should_equal clothes
Table.from_objects (Json.parse clothes_json) ['Id', 'Name', 'Quantity', 'Rating', 'Price'] . should_equal clothes
Test.specify 'should write JSON tables to disk' <|
out = enso_project.data / 'out.json'
out.delete_if_exists
simple_empty.write_json out
(Json.parse out.read_text).to_table ['a', 'b', 'c'] . should_equal simple_empty
Table.from_objects (Json.parse out.read_text) ['a', 'b', 'c'] . should_equal simple_empty
out.delete_if_exists
Test.specify "should allow converting a JSON array into a table" <|
r_1 = Json.from_pairs [['foo', 20], ['bar', 'baz'], ['baz', False]]
r_2 = Json.from_pairs [['bar', 'xyz'], ['baz', True]]
r_3 = Json.from_pairs [['baz', False], ['foo', 13]]
t = [r_1, r_2, r_3].to_json.to_table ['foo', 'bar', 'baz']
r_1 = JS_Object.from_pairs [['foo', 20], ['bar', 'baz'], ['baz', False]]
r_2 = JS_Object.from_pairs [['bar', 'xyz'], ['baz', True]]
r_3 = JS_Object.from_pairs [['baz', False], ['foo', 13]]
t = Table.from_objects [r_1, r_2, r_3] ['foo', 'bar', 'baz']
t.columns.map .name . should_equal ['foo', 'bar', 'baz']
t.at 'foo' . to_vector . should_equal [20, Nothing, 13]
t.at 'bar' . to_vector . should_equal ['baz', 'xyz', Nothing]

View File

@ -1,7 +1,8 @@
from Standard.Base import all
import Standard.Base.Error.Common.Index_Out_Of_Bounds
import Standard.Base.Error.Illegal_Argument.Illegal_Argument
from Standard.Base.Data.Json import Json_Parse_Error, No_Such_Field
import Standard.Base.Error.No_Such_Key.No_Such_Key
from Standard.Base.Data.Json import Invalid_JSON
from Standard.Test import Test, Test_Suite
import Standard.Test.Extensions
@ -17,7 +18,7 @@ Text.should_fail_parsing_with self expected =
as_fail = case Json.parse self of
_ -> Test_Result.Failure "Expected a parse error, but no error reported."
result = as_fail.catch Any e-> case e of
Json_Parse_Error.Error msg ->
Invalid_JSON.Error msg ->
if msg.contains expected then Test_Result.Success else
fail_msg = "The reported message " + msg.to_text + " did not contain " + expected.to_text + "."
Test_Result.Failure fail_msg
@ -29,10 +30,10 @@ Text.should_fail_parsing_with self expected =
_ -> Panic.throw result
Text.should_parse_as self expected =
Json.parse self . should_equal expected.to_json
Json.parse self . should_equal expected
Text.should_render_itself self =
Json.parse self . to_text . should_equal self
Json.parse self . to_json . should_equal self
spec =
Test.group "JSON Deserialization" <|
@ -46,47 +47,24 @@ spec =
"false".should_parse_as False
"true".should_parse_as True
"null".should_parse_as Nothing
"[null, null, true, false]".should_parse_as <|
[Nothing, Nothing, True, False]
"[null, null, true, false]".should_parse_as [Nothing, Nothing, True, False]
"[]".should_parse_as []
'[[1, 3], [5 , "foo"], 7, 8]'.should_parse_as <|
[[1, 3], [5, 'foo'], 7, 8]
'[[1, 3], [5 , "foo"], 7, 8]'.should_parse_as [[1, 3], [5, 'foo'], 7, 8]
'"foo"'.should_parse_as 'foo'
'"foo\\n\\t\\u20AC\\u20AC"'.should_parse_as 'foo\n\t€€'
'["foo", "foo\\n\\u00aB", null, true]'.should_parse_as <|
["foo", 'foo\n\u{AB}', Nothing, True]
object_json = '''
{ "foo": "bar",
"baz": ["foo", "x", false],
"y": {"z": null, "w": null} }
'["foo", "foo\\n\\u00aB", null, true]'.should_parse_as ["foo", 'foo\n\u{AB}', Nothing, True]
object_json = '{"foo": "bar", "baz": ["foo", "x", false], "y": {"z": null, "w": null} }'
object_json.should_parse_as <|
y_v = Json.from_pairs [["z", Nothing], ["w", Nothing]]
Json.from_pairs [["foo", "bar"], ["baz", ["foo", "x", False]], ["y", y_v]]
y_v = JS_Object.from_pairs [["z", Nothing], ["w", Nothing]]
JS_Object.from_pairs [["foo", "bar"], ["baz", ["foo", "x", False]], ["y", y_v]]
Test.specify "should report meaningful parsing errors" <|
"foo".should_fail_parsing_with "Expected a false"
"[,]".should_fail_parsing_with "Expected a start of a JSON value"
"{,}".should_fail_parsing_with "Expected a string"
deep_err = '''
{ "foo": "bar",
"baz": ["foo", "x"", false],
"y": {"z": null, "w": null} }
deep_err.should_fail_parsing_with "Expected a comma"
"123 4".should_fail_parsing_with "Expected end of input"
Test.specify "should parse and convert JSON into domain model" <|
book_1 = Book.Value "Lord of the Rings" <|
Author.Value "J. R. R. Tolkien" 1892
book_2 = Book.Value "The Little Prince" <|
Author.Value "Antoine de Saint-Exupéry" 1900
book_3 = Book.Value "And Then There Were None" <|
Author.Value "Agatha Christie" 1890
books = [book_1, book_2, book_3]
json_string = (enso_project.data / "books.json").read_text
parsed = Json.parse json_string
domain = parsed.into (Vector.fill 1 (Book.Value title=Text (Author.Value name=Text year_of_birth=Number)))
domain.should_equal books
"foo".should_fail_parsing_with "Unexpected token f in JSON at position 0"
"[,]".should_fail_parsing_with "Unexpected token , in JSON at position 1"
"{,}".should_fail_parsing_with "Unexpected token , in JSON at position 1"
deep_err = '{ "foo": "bar", "baz": ["foo", "x"", false], "y": {"z": null, "w": null} }'
deep_err.should_fail_parsing_with "closing quote ] expected at position 34"
"123 4".should_fail_parsing_with "JSON cannot be fully parsed at position 4"
Test.group "JSON Serialization" <|
Test.specify "should print JSON structures to valid json" <|
@ -106,66 +84,66 @@ spec =
object_json.should_render_itself
Test.specify "should convert arbitrary types to JSON" <|
1.to_json.should_equal (Json.Number 1)
1.54.to_json.should_equal (Json.Number 1.54)
["foo", "bar", "baz"].to_json.should_equal <|
(Json.Array [Json.String "foo", Json.String "bar", Json.String "baz"])
1.to_json.should_equal "1"
1.54.to_json.should_equal "1.54"
["foo", "bar", "baz"].to_json.should_equal '["foo","bar","baz"]'
Author.Value "Tolkien" 1892 . to_json . should_equal <|
n = Json.String "Tolkien"
y = Json.Number 1892
t = Json.String "Value"
fields = Map.empty . insert "type" t . insert "name" n . insert "year_of_birth" y
Json.Object fields
'{"type":"Author","constructor":"Value","name":"Tolkien","year_of_birth":1892}'
Test.specify "should render NaN and Infinity to null" <|
Number.nan.to_json.to_text . should_equal "null"
Number.positive_infinity.to_json.to_text . should_equal "null"
Number.negative_infinity.to_json.to_text . should_equal "null"
Number.nan.to_json . should_equal "null"
Number.positive_infinity.to_json . should_equal "null"
Number.negative_infinity.to_json . should_equal "null"
Test.group "JS_Object" <|
Test.specify "should be buildable from pairs" <|
JS_Object.from_pairs [["foo", "bar"]] . to_json . should_equal '{"foo":"bar"}'
JS_Object.from_pairs [["foo", "bar"], ["baz", Nothing]] . to_json . should_equal '{"foo":"bar","baz":null}'
Test.specify "should be handle equality on a key level" <|
JS_Object.from_pairs [["a", 42]] . should_equal <| Json.parse '{"a": 42}'
JS_Object.from_pairs [["a", 42]] . should_not_equal <| JS_Object.from_pairs [["a", 43]]
JS_Object.from_pairs [["a", 42]] . should_not_equal <| JS_Object.from_pairs [["b", 42]]
JS_Object.from_pairs [["a", 42], ["b", Nothing]] . should_equal <| JS_Object.from_pairs [["b", Nothing], ["a", 42]]
JS_Object.from_pairs [["a", 42], ["b", Nothing]] . should_not_equal <| JS_Object.from_pairs [["c", Nothing], ["a", 42]]
obj1 = JS_Object.from_pairs [["a", 42], ["b", JS_Object.from_pairs [["c",1], ["d",2]]]]
obj1 . should_equal <| JS_Object.from_pairs [["a", 42], ["b", JS_Object.from_pairs [["c",1], ["d",2]]]]
obj1 . should_equal <| JS_Object.from_pairs [["b", JS_Object.from_pairs [["d",2], ["c",1]]], ["a", 42]]
obj1 . should_not_equal <| JS_Object.from_pairs [["a", 43], ["b", 123]]
obj1 . should_not_equal <| JS_Object.from_pairs [["a", 42], ["b", JS_Object.from_pairs [["c",1], ["d",3]]]]
Test.group "JSON" <|
Test.specify "should allow getting object fields" <|
object = Json.parse '''
{ "foo": "bar",
"baz": ["foo", "x", false],
"y": {"z": null, "w": null} }
object.get "foo" . should_equal (Json.String "bar")
object.get_or_else "foo" . should_equal (Json.String "bar")
object.get "bar" . should_fail_with No_Such_Field.Error
object.get_or_else "bar" . should_equal Json.Null
object.get_or_else "bar" 1 . should_equal 1
Json.Null.get "foo" . should_fail_with No_Such_Field.Error
Json.Null.get_or_else "foo" . should_equal Json.Null
Json.Null.get_or_else "foo" 1 . should_equal 1
object = Json.parse '{ "foo": "bar", "baz": ["foo", "x", false],"y": {"z": null, "w": null} }'
object.get "foo" . should_equal "bar"
object.get "bar" . should_equal Nothing
object.get "bar" 1 . should_equal 1
object.at "foo" . should_equal "bar"
object.at "bar" . should_fail_with No_Such_Key.Error
Json.parse "null" . get "foo" . should_equal Nothing
Json.parse "null" . get "foo" 1 . should_equal 1
Test.specify "should be able to get field_names" <|
Json.parse '{ "foo": "bar", "baz": ["foo", "x", false] }' . field_names . should_equal ["baz", "foo"]
Json.parse '{ "foo": "bar", "baz": ["foo", "x", false] }' . field_names . should_equal ["foo", "baz"]
Json.parse '{}' . field_names . should_equal []
Json.parse 'null' . field_names . should_fail_with Illegal_Argument.Error
Json.parse '[1,2,3]' . field_names . should_fail_with Illegal_Argument.Error
Test.specify "should be able to get a value by index" <|
Json.parse "[1, 2, 3, 4, 5]" . at 2 . should_equal (Json.Number 3)
Json.parse "[1, 2, 3, 4, 5]" . at -2 . should_equal (Json.Number 4)
Json.parse "[1, 2, 3, 4, 5]" . at 2 . should_equal 3
Json.parse "[1, 2, 3, 4, 5]" . at -2 . should_equal 4
Json.parse "[1, 2, 3, 4, 5]" . at 5 . should_fail_with Index_Out_Of_Bounds.Error
Json.parse '"foo"' . at 0 . should_equal (Json.String "f")
Json.parse '"foo"' . at -1 . should_equal (Json.String "o")
Json.parse '"foo"' . at 3 . should_fail_with Index_Out_Of_Bounds.Error
Json.parse '{}' . at 1 . should_fail_with Illegal_Argument.Error
Test.specify "should be able to make empty objects and array" <|
Json.empty_object.should_equal (Json.Object Map.empty)
Json.empty_object.to_text.should_equal "{}"
Json.empty_array.should_equal (Json.Array [])
Json.empty_array.to_text.should_equal "[]"
Json.parse "[1, 2, 3, 4, 5]" . get 2 . should_equal 3
Json.parse "[1, 2, 3, 4, 5]" . get -2 . should_equal 4
Json.parse "[1, 2, 3, 4, 5]" . get 5 . should_equal Nothing
Json.parse "[1, 2, 3, 4, 5]" . get 5 "?" . should_equal "?"
Test.specify "should be able to get length" <|
Json.empty_object.length.should_equal 0
Json.empty_array.length.should_equal 0
Json.parse "{}" . length . should_equal 0
Json.parse "[]" . length . should_equal 0
Json.parse '{ "foo": "bar", "baz": ["foo", "x", false] }' . length . should_equal 2
Json.parse '[1, 2, 3, 4, 5]' . length . should_equal 5
Json.parse '"foo"' . length . should_equal 3
Json.parse '""' . length . should_equal 0
Json.Null.length.should_fail_with Illegal_Argument.Error
main = Test_Suite.run_main spec

View File

@ -69,8 +69,7 @@ spec = Test.group "Locale" <|
Locale.uk.to_text . should_equal "en-GB"
Locale.us.to_text . should_equal "en-US"
Test.specify "should convert to Json" <|
en_gb.to_json.should_equal <|
Json.from_pairs [["type", "Locale"], ["language", "en"], ["country", "GB"]]
en_gb.to_json.should_equal '{"type":"Locale","constructor":"new","language":"en","country":"GB","variant":null}'
Test.specify "should allow equality comparisons" <|
Locale.uk . should_equal Locale.uk
Locale.uk . should_not_equal Locale.us

View File

@ -1,6 +1,5 @@
from Standard.Base import all
from Standard.Base.Data.Map import No_Value_For_Key
import Standard.Base.Error.No_Such_Key.No_Such_Key
from Standard.Test import Test, Test_Suite
import Standard.Test.Extensions
@ -43,15 +42,15 @@ spec = Test.group "Maps" <|
map_2==map_3 . should_be_false
Test.specify "should allow inserting and looking up values" <|
m = Map.empty . insert "foo" 134 . insert "bar" 654 . insert "baz" "spam"
m.get "foo" . should_equal 134
m.get "bar" . should_equal 654
m.get "baz" . should_equal "spam"
(m.get "nope").should_fail_with No_Value_For_Key.Error
Test.specify "should support get_or_else" <|
m.at "foo" . should_equal 134
m.at "bar" . should_equal 654
m.at "baz" . should_equal "spam"
(m.at "nope").should_fail_with No_Such_Key.Error
Test.specify "should support get" <|
m = Map.empty . insert 2 3
m.get_or_else 2 0 . should_equal 3
m.get_or_else 1 10 . should_equal 10
m.get_or_else 2 (Panic.throw "missing") . should_equal 3
m.get 2 0 . should_equal 3
m.get 1 10 . should_equal 10
m.get 2 (Panic.throw "missing") . should_equal 3
Test.specify "should allow transforming the map" <|
m = Map.empty . insert 1 2 . insert 2 4
expected = Map.empty . insert "1" 4 . insert "2" 8
@ -113,13 +112,13 @@ spec = Test.group "Maps" <|
m.last . should_equal (Pair.new Nothing 1)
Test.specify "should be able to add a Nothing key to the map of Text" <|
m = Map.empty . insert "A" 2 . insert Nothing 1 . insert "B" 3
m.get "A" . should_equal 2
m.get "B" . should_equal 3
m.get Nothing . should_equal 1
m.at "A" . should_equal 2
m.at "B" . should_equal 3
m.at Nothing . should_equal 1
Test.specify "should be able to add a Nothing key to the map of Integer" <|
m = Map.empty . insert 100 2 . insert Nothing 1 . insert 200 3
m.get 100 . should_equal 2
m.get 200 . should_equal 3
m.get Nothing . should_equal 1
m.at 100 . should_equal 2
m.at 200 . should_equal 3
m.at Nothing . should_equal 1
main = Test_Suite.run_main spec

View File

@ -501,14 +501,14 @@ spec =
Test.specify "should return the results of all named groups" <|
groups = match.named_groups
groups.size . should_equal 2
groups.get "letters" . should_equal "abc a bc bcd"
groups.get "empty" . should_equal Nothing
groups.at "letters" . should_equal "abc a bc bcd"
groups.at "empty" . should_equal Nothing
Test.specify "should replace unmatched groups by a user-specified value" <|
groups = match.named_groups "UNMATCHED"
groups.size . should_equal 2
groups.get "letters" . should_equal "abc a bc bcd"
groups.get "empty" . should_equal "UNMATCHED"
groups.at "letters" . should_equal "abc a bc bcd"
groups.at "empty" . should_equal "UNMATCHED"
Test.group "Match.start" <|
engine = Default_Engine.new

View File

@ -88,7 +88,7 @@ spec_with name create_new_date parse_date =
Test.specify "should convert to Json" <|
date = create_new_date 2001 12 21
date.to_json.should_equal <|
Json.from_pairs [["type", "Date"], ["day", date.day], ["month", date.month], ["year", date.year]]
JS_Object.from_pairs [["type", "Date"], ["constructor", "new"], ["day", date.day], ["month", date.month], ["year", date.year]] . to_text
Test.specify "should add date-based interval" <|
date = create_new_date 1970 + (Period.new days=1)

View File

@ -67,7 +67,7 @@ spec_with name create_new_datetime parse_datetime nanoseconds_loss_in_precision=
time.to_json.should_equal <|
zone_pairs = [["zone", Time_Zone.utc]]
time_pairs = [["year", time.year], ["month", time.month], ["day", time.day], ["hour", time.hour], ["minute", time.minute], ["second", time.second], ["nanosecond", time.nanosecond]]
Json.from_pairs ([["type", "Date_Time"]] + time_pairs + zone_pairs)
JS_Object.from_pairs ([["type", "Date_Time"], ["constructor", "new"]] + time_pairs + zone_pairs) . to_text
Test.specify "should parse default time format" <|
text = create_new_datetime 1970 (zone = Time_Zone.utc) . to_text

View File

@ -9,19 +9,6 @@ polyglot java import java.time.Duration as Java_Duration
polyglot java import java.time.LocalDate
polyglot java import java.time.LocalDateTime as Java_DateTime
java_datetime year month=1 day=1 hour=0 minute=0 second=0 nanosecond=0 =
Panic.catch Any (Java_DateTime.of year month day hour minute second nanosecond) (err -> Error.throw (Time_Error.Error <| err.payload.getMessage))
js_datetime year month=1 day=1 hour=0 minute=0 second=0 nanosecond=0 =
Panic.catch Any (js_datetime_impl year month day hour minute second nanosecond) (err -> Error.throw (Time_Error.Error err.payload))
foreign js js_datetime_impl year month day hour minute second nanosecond = """
if (month > 12 || month < 1) {
throw `Invalid value for MonthOfYear (valid values 1 - 12): ${month}`;
}
return new Date(year, month - 1, day, hour, minute, second, nanosecond / 1000000);
spec =
Test.group "Duration" <|
@ -54,8 +41,8 @@ spec =
Test.specify "should convert to Json" <|
interval = (Duration.new nanoseconds=120) + (Duration.new seconds=30) + (Duration.new hours=14)
interval.to_json.should_equal <|
duration_pairs = [["nanoseconds", interval.nanoseconds], ["seconds", interval.seconds], ["hours", interval.hours]]
Json.from_pairs ([["type", "Duration"]] + duration_pairs)
duration_pairs = [["hours", interval.hours], ["seconds", interval.seconds], ["nanoseconds", interval.nanoseconds]]
JS_Object.from_pairs ([["type", "Duration"], ["constructor", "new"]] + duration_pairs) . to_text
Test.specify "should be comparable" <|
duration_1 = (Duration.new hours=5)
@ -105,5 +92,16 @@ spec =
(Duration.between (js_datetime 2022 09 08) (java_datetime 2022 09 09) timezone_aware=False).total_hours . should_equal 24
(Duration.between (java_datetime 2022 09 09) (js_datetime 2022 09 08) timezone_aware=False).total_hours . should_equal (-24)
java_datetime year month=1 day=1 hour=0 minute=0 second=0 nanosecond=0 =
Panic.catch Any (Java_DateTime.of year month day hour minute second nanosecond) (err -> Error.throw (Time_Error.Error <| err.payload.getMessage))
js_datetime year month=1 day=1 hour=0 minute=0 second=0 nanosecond=0 =
Panic.catch Any (js_datetime_impl year month day hour minute second nanosecond) (err -> Error.throw (Time_Error.Error err.payload))
foreign js js_datetime_impl year month day hour minute second nanosecond = """
if (month > 12 || month < 1) {
throw `Invalid value for MonthOfYear (valid values 1 - 12): ${month}`;
}
return new Date(year, month - 1, day, hour, minute, second, nanosecond / 1000000);
main = Test_Suite.run_main spec

View File

@ -41,7 +41,7 @@ specWith name create_new_time parse_time =
time = create_new_time 1 2 3
time.to_json.should_equal <|
time_pairs = [["hour", time.hour], ["minute", time.minute], ["second", time.second], ["nanosecond", time.nanosecond]]
Json.from_pairs ([["type", "Time_Of_Day"]] + time_pairs)
JS_Object.from_pairs ([["type", "Time_Of_Day"], ["constructor", "new"]] + time_pairs) . to_text
Test.specify "should parse default time format" <|
text = create_new_time 12 20 44 . to_text

View File

@ -30,9 +30,9 @@ spec =
Test.specify "should convert to Json" <|
zone = Time_Zone.new 1 2 3
zone.to_json.should_equal <|
Json.from_pairs [["type", "Time_Zone"], ["id", "+01:02:03"]]
JS_Object.from_pairs [["type", "Time_Zone"], ["constructor", "new"], ["id", "+01:02:03"]] . to_text
Time_Zone.utc.to_json.should_equal <|
Json.from_pairs [["type", "Time_Zone"], ["id", "UTC"]]
JS_Object.from_pairs [["type", "Time_Zone"], ["constructor", "new"], ["id", "UTC"]] . to_text
Test.specify "should throw error when parsing invalid zone id" <|
case Time_Zone.parse "foo" . catch of
Time_Error.Error msg ->
@ -64,9 +64,9 @@ spec =
Test.specify "should convert to Json" <|
zone = ZoneOffset.ofHoursMinutesSeconds 1 2 3
zone.to_json.should_equal <|
Json.from_pairs [["type", "Time_Zone"], ["id", "+01:02:03"]]
JS_Object.from_pairs [["type", "Time_Zone"], ["constructor", "new"], ["id", "+01:02:03"]] . to_text
(ZoneId.of "UTC").to_json.should_equal <|
Json.from_pairs [["type", "Time_Zone"], ["id", "UTC"]]
JS_Object.from_pairs [["type", "Time_Zone"], ["constructor", "new"], ["id", "UTC"]] . to_text
Test.specify "should correctly determine the type of zone" <|
zone = ZoneId.systemDefault
Meta.type_of zone . should_equal_type Time_Zone

View File

@ -98,15 +98,24 @@ spec = Test.group "Vectors" <|
vec = [Error.throw (My_Error.Value "foo"), "bar"]
vec.at 1 . should_equal "bar"
vec.at 0 . should_fail_with My_Error.Value
vec.get 1 . should_equal "bar"
vec.get 0 . should_fail_with My_Error.Value
Test.specify "should allow accessing elements with negative indices" <|
[1,2,3].at -1 . should_equal 3
[1,2,3].at -2 . should_equal 2
[1,2,3].at -3 . should_equal 1
[1,2,3].get -1 . should_equal 3
[1,2,3].get -2 . should_equal 2
[1,2,3].get -3 . should_equal 1
Test.specify "should return a dataflow error when accessing elements out of bounds" <|
Test.specify "should correctly handle out of bounds access" <|
[1,2,3].at -4 . should_fail_with Index_Out_Of_Bounds.Error
[1,2,3].at 3 . should_fail_with Index_Out_Of_Bounds.Error
[1,2,3].get -4 . should_equal Nothing
[1,2,3].get 3 . should_equal Nothing
[1,2,3].get -4 "???" . should_equal "???"
[1,2,3].get 3 "???" . should_equal "???"
Test.specify "should have a well-defined length" <|
[1,2,3].length . should_equal 3

View File

@ -55,7 +55,7 @@ spec =
}
res = HTTP.new.get url_get
res.code.should_equal HTTP_Status_Code.ok
res.body.to_json.should_equal expected_response
res.body.parse_json.should_equal expected_response
Test.specify "should send Get request using module method" <|
expected_response = Json.parse <| '''
{
@ -69,7 +69,7 @@ spec =
}
res = HTTP.new.get url_get
res.code.should_equal HTTP_Status_Code.ok
res.body.to_json.should_equal expected_response
res.body.parse_json.should_equal expected_response
Test.specify "should fetch the body of a Get request" <|
expected_response = Json.parse <| '''
@ -83,7 +83,7 @@ spec =
"args": {}
}
res = HTTP.fetch url_get
res.to_json.should_equal expected_response
res.parse_json.should_equal expected_response
Test.specify "should return error if the fetch method fails" <|
HTTP.fetch "http://undefined_host" . should_fail_with Request_Error.Error
@ -110,7 +110,7 @@ spec =
body_empty = Request_Body.Empty
res = HTTP.new.post url_post body_empty
res.code.should_equal HTTP_Status_Code.ok
res.body.to_json.should_equal expected_response
res.body.parse_json.should_equal expected_response
Test.specify "should Post empty body using module method" <|
expected_response = Json.parse <| '''
{
@ -129,7 +129,7 @@ spec =
body_empty = Request_Body.Empty
res = HTTP.new.post url_post body_empty
res.code.should_equal HTTP_Status_Code.ok
res.body.to_json.should_equal expected_response
res.body.parse_json.should_equal expected_response
Test.specify "should Post text body" <|
expected_response = Json.parse <| '''
{
@ -149,7 +149,7 @@ spec =
body_text = Request_Body.Text "Hello World!"
res = HTTP.new.post url_post body_text
res.code.should_equal HTTP_Status_Code.ok
res.body.to_json.should_equal expected_response
res.body.parse_json.should_equal expected_response
Test.specify "should Post form text" <|
expected_response = Json.parse <| '''
{
@ -169,7 +169,7 @@ spec =
form_parts = [Form.text_field "key" "val"]
res = HTTP.new.post_form url_post form_parts
res.code.should_equal HTTP_Status_Code.ok
res.body.to_json.should_equal expected_response
res.body.parse_json.should_equal expected_response
Test.specify "should Post form text using module method" <|
expected_response = Json.parse <| '''
{
@ -189,7 +189,7 @@ spec =
form_parts = [Form.text_field "key" "val"]
res = HTTP.new.post_form url_post form_parts
res.code.should_equal HTTP_Status_Code.ok
res.body.to_json.should_equal expected_response
res.body.parse_json.should_equal expected_response
Test.specify "should Post form file" <|
test_file = enso_project.data / "sample.txt"
form_parts = [Form.text_field "key" "val", Form.file_field "sample" test_file]
@ -218,11 +218,10 @@ spec =
"key": "val"
}
}
json = Json.parse <| '''
{"key":"val"}
json = '{"key":"val"}'
res = HTTP.new.post_json url_post json
res.code.should_equal HTTP_Status_Code.ok
res.body.to_json.should_equal expected_response
res.body.parse_json.should_equal expected_response
Test.specify "should Post Json using module method" <|
expected_response = Json.parse <| '''
{
@ -241,11 +240,10 @@ spec =
"key": "val"
}
}
json = Json.parse <| '''
{"key":"val"}
json = '{"key":"val"}'
res = HTTP.new.post_json url_post json
res.code.should_equal HTTP_Status_Code.ok
res.body.to_json.should_equal expected_response
res.body.parse_json.should_equal expected_response
Test.specify "should Post binary" <|
expected_response = Json.parse <| '''
{
@ -265,7 +263,7 @@ spec =
body_bytes = Request_Body.Bytes "Hello World!".utf_8
res = HTTP.new.post url_post body_bytes
res.code.should_equal HTTP_Status_Code.ok
res.body.to_json.should_equal expected_response
res.body.parse_json.should_equal expected_response
Test.specify "should create and send Get request" <|
expected_response = Json.parse <| '''
@ -281,7 +279,7 @@ spec =
req = Request.new HTTP_Method.Get url_get
res = HTTP.new.request req
res.code.should_equal HTTP_Status_Code.ok
res.body.to_json.should_equal expected_response
res.body.parse_json.should_equal expected_response
Test.specify "should create and send Post request with json body" <|
expected_response = Json.parse <| '''
{
@ -300,13 +298,12 @@ spec =
"key": "val"
}
}
json_body = Json.parse <| '''
{ "key": "val" }
json_body = '{"key":"val"}'
req = Request.new HTTP_Method.Post url_post
req_with_body = req.with_json json_body
res = HTTP.new.request req_with_body
res.code.should_equal HTTP_Status_Code.ok
res.body.to_json.should_equal expected_response
res.body.parse_json.should_equal expected_response
Test.specify "should create and send Post request with json text" <|
expected_response = Json.parse <| '''
{
@ -331,6 +328,6 @@ spec =
req_with_body = req.with_json json_text
res = HTTP.new.request req_with_body
res.code.should_equal HTTP_Status_Code.ok
res.body.to_json.should_equal expected_response
res.body.parse_json.should_equal expected_response
main = Test_Suite.run_main spec

View File

@ -25,12 +25,12 @@ spec =
bytes.should_equal [72, 101, 108, 108, 111, 32, 87, 111, 114, 108, 100, 33]
Test.specify "should be able to read a file as Bytes by method" <|
bytes = Data.read_file (sample_xxx) Bytes
bytes = Data.read (sample_xxx) Bytes
bytes.should_equal [72, 101, 108, 108, 111, 32, 87, 111, 114, 108, 100, 33]
Test.specify "should be able to read a file as Bytes by path" <|
path = sample_xxx.path
bytes = Data.read_file path Bytes
bytes = Data.read path Bytes
bytes.should_equal [72, 101, 108, 108, 111, 32, 87, 111, 114, 108, 100, 33]
Test.group "Plain_Text" <|

View File

@ -119,7 +119,7 @@ spec =
Test.specify "should allow reading a UTF-8 file via path" <|
full_path = sample_file . path
contents = Data.read_text_file full_path
contents = Data.read_text full_path
contents.should_start_with "Cupcake ipsum dolor sit amet."
Test.specify "should allow reading a Windows file via path" <|
@ -134,16 +134,16 @@ spec =
Test.specify "should handle exceptions when reading a non-existent file" <|
file = File.new "does_not_exist.txt"
Data.read_text_file "does_not_exist.txt" . should_fail_with File_Error.Not_Found
Data.read_text "does_not_exist.txt" . should_fail_with File_Error.Not_Found
file.read_text . should_fail_with File_Error.Not_Found
non_existent_file.read_text . should_fail_with File_Error.Not_Found
Test.specify "should open and read the file in one shot" <|
path_name = sample_file.path
contents = Data.read_text_file path_name
contents = Data.read_text path_name
contents.should_start_with "Cupcake ipsum dolor sit amet."
file = sample_file
contents_2 = Data.read_text_file file
contents_2 = Data.read_text file
contents_2.should_start_with "Cupcake ipsum dolor sit amet."
Test.group "write operations" <|

View File

@ -11,7 +11,6 @@ import project.Helpers
spec =
expect value expected_json_text =
result = Geo_Map.process_to_json_text value
IO.println result
Json.parse result . should_equal <| Json.parse expected_json_text
Test.group "Geo_Map" <|

View File

@ -4,7 +4,7 @@ from Standard.Table import Table
import Standard.Visualization.Helpers
from Standard.Test import Test
from Standard.Test import Test, Test_Suite
import Standard.Test.Extensions
import project.Helpers
@ -57,8 +57,8 @@ spec =
Test.group "Table.rows" <|
table = Table.new [["X", [1, 2, 3, 4]], ["Y", [5, 6, 7, 8]], ["Z", ["A", "B", "C", "D"]]]
Test.specify "should visualize nicely" <|
table.rows . to_default_visualization_data . should_equal <|
'[[1,5,"A"],[2,6,"B"],[3,7,"C"],[4,8,"D"]]'
Json.parse (table.rows . to_default_visualization_data) . should_equal <|
Json.parse '[[1,5,"A"],[2,6,"B"],[3,7,"C"],[4,8,"D"]]'
# We limit to at most 100 rows, in the future we should add some kind of 'and N more rows' to the visualization, like is done for Table - or just integrate the lazy vis.
t2 = Table.new [["X", 0.up_to 200 . to_vector]]
@ -77,39 +77,44 @@ spec =
vec = Vector.fill 1000 0
text = vec.to_default_visualization_data
json = Json.parse text
as_vec = json.into (Vector.fill 1 Number)
as_vec.should_equal <| Vector.fill 100 0
json.should_equal <| Vector.fill 100 0
Test.specify "should be able to visualize an Enso Array" <|
arr = Vector.fill 1000 0 . to_array
text = arr.to_default_visualization_data
json = Json.parse text
as_vec = json.into (Vector.fill 1 Number)
as_vec.should_equal <| Vector.fill 100 0
json.should_equal <| Vector.fill 100 0
Test.specify "should be able to visualize a Polyglot Array" pending="`to_default_visualization_data` does not work for polyglot arrays" <|
arr = make_java_array (Vector.fill 1000 0)
text = arr.to_default_visualization_data
json = Json.parse text
as_vec = json.into (Vector.fill 1 Number)
as_vec.should_equal <| Vector.fill 100 0
json.should_equal <| Vector.fill 100 0
Test.group "Dataflow Error Visualization" <|
Test.specify "should be able to be shown in the default visualization" <|
json = (Error.throw <| My_Type.Value "aaa").to_default_visualization_data
json . should_equal <| (Json.from_pairs [["foo", "aaa"], ["type", "Value"]]).to_text
Json.parse json . should_equal <|
JS_Object.from_pairs [["type", "My_Type"], ["constructor", "Value"], ["foo", "aaa"]]
Test.specify "should be able to be shown in the default vector visualization" <|
vec = [My_Type.Value "bar", Error.throw (My_Type.Value 42)]
visualization_text = vec.to_default_visualization_data
expected_json = Json.parse '''
[
{ "foo":"bar",
"type":"Value"
},
{ "content":{ "foo":42, "type":"Value" },
"message":"My_Type.Value",
"type":"Error"
}
]
visualization_text.should_equal expected_json.to_text
Json.parse visualization_text . should_equal <|
expected_json = '''
[
{
"type":"My_Type",
"constructor":"Value",
"foo":"bar"
},
{
"type":"Error",
"content":{ "type":"My_Type", "constructor":"Value", "foo":42 },
"message":"My_Type.Value"
}
]
Json.parse expected_json
main = Test_Suite.run_main spec

View File

@ -13,13 +13,13 @@ spec =
expect value expected_label expected_values =
text = Histogram.process_to_json_text value
json = Json.parse text
expected_data = Json.from_pairs [['values', expected_values]]
expected_data = JS_Object.from_pairs [['values', expected_values]]
expected_json = case expected_label of
Nothing -> Json.from_pairs [['data', expected_data]]
Nothing -> JS_Object.from_pairs [['data', expected_data]]
_ ->
expected_x = Json.from_pairs [['label', expected_label]]
expected_axis = ['axis', Json.from_pairs [['x', expected_x]]]
Json.from_pairs [['data', expected_data], expected_axis]
expected_x = JS_Object.from_pairs [['label', expected_label]]
expected_axis = ['axis', JS_Object.from_pairs [['x', expected_x]]]
JS_Object.from_pairs [['data', expected_data], expected_axis]
json.should_equal expected_json
Test.group "Histogram Visualization" <|

View File

@ -14,7 +14,7 @@ type My_Type
spec = Test.group "Serializable Visualization Identifiers" <|
Test.specify "serializes builtin visualizations in the correct format" <|
json_for_name n = Json.from_pairs [["library", Nothing], ["name", n]]
json_for_name n = JS_Object.from_pairs [["library", Nothing], ["name", n]] . to_text
Visualization.Id.json.to_json . should_equal (json_for_name "JSON")
Visualization.Id.scatter_plot.to_json . should_equal (json_for_name "Scatter Plot")
Visualization.Id.histogram.to_json . should_equal (json_for_name "Histogram")
@ -26,8 +26,8 @@ spec = Test.group "Serializable Visualization Identifiers" <|
Test.specify "serializes library visualizations in the correct format" <|
expected p_name v_name =
lib = Json.from_pairs [["name", p_name]]
Json.from_pairs [["library", lib], ["name", v_name]]
lib = JS_Object.from_pairs [["name", p_name]]
JS_Object.from_pairs [["library", lib], ["name", v_name]] . to_text
v_1 = Visualization.Id.from_module Id_Spec "My Vis"
v_2 = Visualization.Id.from_module Base "Other Vis"
v_1.to_json.should_equal (expected "enso_dev.Visualization_Tests" "My Vis")

View File

@ -14,10 +14,10 @@ visualization_spec connection =
Test.specify "should provide type metadata for interpolations" <|
q = t.filter ((t.at "B" == 2) && (t.at "A" == True)) . at "C"
vis = Visualization.prepare_visualization q
int_param = Json.from_pairs [["value", 2], ["actual_type", "Standard.Base.Data.Numbers.Integer"], ["expected_sql_type", "INTEGER"], ["expected_enso_type", "Standard.Base.Data.Numbers.Integer"]]
str_param = Json.from_pairs [["value", True], ["actual_type", "Standard.Base.Data.Boolean.Boolean"], ["expected_sql_type", "VARCHAR"], ["expected_enso_type", "Standard.Base.Data.Text.Text"]]
int_param = JS_Object.from_pairs [["value", 2], ["actual_type", "Standard.Base.Data.Numbers.Integer"], ["expected_sql_type", "INTEGER"], ["expected_enso_type", "Standard.Base.Data.Numbers.Integer"]]
str_param = JS_Object.from_pairs [["value", True], ["actual_type", "Standard.Base.Data.Boolean.Boolean"], ["expected_sql_type", "VARCHAR"], ["expected_enso_type", "Standard.Base.Data.Text.Text"]]
code = 'SELECT "T"."C" AS "C" FROM "T" AS "T" WHERE (("T"."B" = ?) AND ("T"."A" = ?))'
json = Json.from_pairs [["dialect", "SQLite"], ["code", code], ["interpolations", [int_param, str_param]]]
json = JS_Object.from_pairs [["dialect", "SQLite"], ["code", code], ["interpolations", [int_param, str_param]]]
vis . should_equal json.to_text
spec =
enso_project.data.create_directory

View File

@ -12,11 +12,11 @@ import project
spec =
expect_text text axis_expected_text data_expected_text =
json = Json.parse text
json.fields.keys.should_equal ['axis','data']
json.field_names.should_equal ['data', 'axis']
expect_text = '{"axis": ' + axis_expected_text + ', "data": ' + data_expected_text + '}'
expected_result = Json.parse expect_text
expected_axis_labels = ['axis', Json.parse axis_expected_text]
expected_data_pair = ['data', Json.parse data_expected_text]
expected_result = Json.from_pairs [expected_axis_labels, expected_data_pair]
json.should_equal expected_result
expect value axis_expected_text data_expected_text =
@ -24,8 +24,8 @@ spec =
expect_text text axis_expected_text data_expected_text
index = Scatter_Plot.index_name
axis label = Json.from_pairs [['label',label]]
labels x y = Json.from_pairs [['x', axis x], ['y', axis y]] . to_text
axis label = JS_Object.from_pairs [['label',label]]
labels x y = JS_Object.from_pairs [['x', axis x], ['y', axis y]] . to_text
no_labels = 'null'
Test.group "Scatter Plot Visualization" <|
@ -114,19 +114,18 @@ spec =
vector = [0,10,20,30]
text = Scatter_Plot.process_to_json_text vector limit=2
json = Json.parse text
json.fields.keys.should_equal ['axis','data']
data = json.fields.get 'data'
data.unwrap.length . should_equal 2
json.field_names.should_equal ['data','axis']
data = json.get 'data'
data.length . should_equal 2
Test.specify "limit the number of squared elements" <|
vector = (-15).up_to 15 . map (x -> x * x)
text = Scatter_Plot.process_to_json_text vector limit=10
json = Json.parse text
json.fields.keys.should_equal ['axis','data']
data = (json.fields.get 'data') . unwrap
json.field_names.should_equal ['data','axis']
data = json.get 'data'
data.length . should_equal 10
(data.take (First 3)).to_text . should_equal '[[[\'x\', 0], [\'y\', 225]], [[\'x\', 15], [\'y\', 0]], [[\'x\', 29], [\'y\', 196]]]'
(data.take (First 3)).to_text . should_equal '[{"x":0,"y":225}, {"x":15,"y":0}, {"x":29,"y":196}]'
Test.specify "filter the elements" <|
vector = [0,10,20,30]

View File

@ -19,8 +19,8 @@ polyglot java import java.util.UUID
type Foo
Value x
to_json : Json
to_json self = Json.from_pairs [["x", self.x]]
to_js_object : JS_Object
to_js_object self = JS_Object.from_pairs [["x", self.x]]
visualization_spec connection =
t = connection.upload_table "T" <| Table.new [["A", ['a', 'a', 'a']], ["B", [2, 2, 3]], ["C", [3, 5, 6]]]
@ -32,13 +32,13 @@ visualization_spec connection =
p_ixes = ["indices", ixes]
p_ixes_header = ["indices_header", ixes_header]
pairs = [p_header, p_data, p_all_rows, p_ixes, p_ixes_header]
Json.from_pairs pairs . to_text
JS_Object.from_pairs pairs . to_text
Test.group "Table Visualization" <|
Test.specify "should wrap internal errors" <|
bad_table = Database_Table.Value Nothing Nothing Nothing Nothing
vis = Visualization.prepare_visualization bad_table 2
json = Json.from_pairs [["error", "Method `meta_index` of Nothing could not be found."]]
json = JS_Object.from_pairs [["error", "Method `meta_index` of Nothing could not be found."]]
vis . should_equal json.to_text
Test.specify "should visualize database tables" <|
@ -79,21 +79,21 @@ visualization_spec connection =
Test.specify "should handle Vectors" <|
vis = Visualization.prepare_visualization [1, 2, 3] 2
json = Json.from_pairs [["json", [1, 2]], ["all_rows_count", 3]]
json = JS_Object.from_pairs [["json", [1, 2]], ["all_rows_count", 3]]
vis . should_equal json.to_text
vis2 = Visualization.prepare_visualization [[1, 2], [3, 4]] 2
json2 = Json.from_pairs [["json", [[1, 2], [3, 4]]], ["all_rows_count", 2]]
json2 = JS_Object.from_pairs [["json", [[1, 2], [3, 4]]], ["all_rows_count", 2]]
vis2 . should_equal json2.to_text
Test.specify "should handle Arrays" <|
vis = Visualization.prepare_visualization ([1, 2, 3] . to_array) 2
json = Json.from_pairs [["json", [1, 2]], ["all_rows_count", 3]]
json = JS_Object.from_pairs [["json", [1, 2]], ["all_rows_count", 3]]
vis . should_equal json.to_text
Test.specify "should handle other datatypes" <|
vis = Visualization.prepare_visualization (Foo.Value 42) 2
json = Json.from_pairs [["json", (Foo.Value 42)]]
json = JS_Object.from_pairs [["json", (Foo.Value 42)]]
vis . should_equal json.to_text
spec =