mirror of
https://github.com/enso-org/enso.git
synced 2024-12-23 00:01:35 +03:00
Autoscoping for File_Format and minor tweaks. (#10348)
- Add `rename` ALIAS to `Table.use_first_row_as_names`. - Add a shorthand to `Faker.string_value` to allow quick creation of fake values such as National Insurance numbers. - Add `match` ALIAS for `Text.find` and `Text.find_all`. - Auto scoping for `File_Format`. Demonstrates how to use it in a dynamic context. - `SQLite_Format.For_File` renamed to `SQLite_Format.SQLite` (though kept for backwards compatibility. - Fixed bug in `SQLite_Format` which was calling a non-existent constructor. ![image](https://github.com/enso-org/enso/assets/4699705/4506d27c-c1ff-4ad6-9276-53c2ae00de17) ![image](https://github.com/enso-org/enso/assets/4699705/9043ffb0-6740-42ba-91f8-ab0df555f20f) ![image](https://github.com/enso-org/enso/assets/4699705/03122fac-bdbb-4bcf-ac96-9491da41a8b9) ![image](https://github.com/enso-org/enso/assets/4699705/79122aac-a74a-435d-9849-ac3421f7d080) ![image](https://github.com/enso-org/enso/assets/4699705/54544de8-9aea-4dc6-bb4d-a7d8233c6814)
This commit is contained in:
parent
ad5f2c9121
commit
791dba6729
@ -165,13 +165,14 @@ type S3_File
|
||||
read : File_Format -> Problem_Behavior -> Any ! S3_Error
|
||||
read self format=Auto_Detect (on_problems : Problem_Behavior = Problem_Behavior.Report_Warning) =
|
||||
if Data_Link.is_data_link self then Data_Link_Helpers.read_data_link self format on_problems else
|
||||
File_Format.handle_format_missing_arguments format <| case format of
|
||||
case format of
|
||||
Auto_Detect -> if self.is_directory then format.read self on_problems else
|
||||
response = translate_file_errors self <| S3.get_object self.s3_path.bucket self.s3_path.key self.credentials delimiter=S3_Path.delimiter
|
||||
response.decode Auto_Detect
|
||||
_ ->
|
||||
metadata = File_Format_Metadata.Value path=self.path name=self.name
|
||||
self.with_input_stream [File_Access.Read] (stream-> format.read_stream stream metadata)
|
||||
resolved_format = File_Format.resolve format
|
||||
self.with_input_stream [File_Access.Read] (stream-> resolved_format.read_stream stream metadata)
|
||||
|
||||
## ALIAS load bytes, open bytes
|
||||
ICON data_input
|
||||
@ -194,7 +195,7 @@ type S3_File
|
||||
@encoding Encoding.default_widget
|
||||
read_text : Encoding -> Problem_Behavior -> Text ! File_Error
|
||||
read_text self (encoding : Encoding = Encoding.default) (on_problems : Problem_Behavior = Problem_Behavior.Report_Warning) =
|
||||
self.read (Plain_Text encoding) on_problems
|
||||
self.read (Plain_Text_Format.Plain_Text encoding) on_problems
|
||||
|
||||
## ICON data_output
|
||||
Deletes the object.
|
||||
|
@ -64,7 +64,7 @@ from project.System.File_Format import Auto_Detect, File_Format
|
||||
from Standard.Table import all
|
||||
import Standard.Examples
|
||||
|
||||
example_xls_to_table = Data.read Examples.xls (Excel_Format.Sheet 'Dates')
|
||||
example_xls_to_table = Data.read Examples.xls (..Sheet 'Dates')
|
||||
@path Text_Input
|
||||
@format File_Format.default_widget
|
||||
read : Text | File -> File_Format -> Problem_Behavior -> Any ! File_Error
|
||||
|
@ -237,7 +237,8 @@ Text.characters self =
|
||||
Vector.build builder->
|
||||
self.each builder.append
|
||||
|
||||
## GROUP Selections
|
||||
## ALIAS match
|
||||
GROUP Selections
|
||||
ICON find
|
||||
Find the regular expression `pattern` in `self`, returning the first match
|
||||
if present or `Nothing` if not found.
|
||||
@ -266,7 +267,8 @@ Text.find self pattern=".*" case_sensitivity=Case_Sensitivity.Sensitive =
|
||||
compiled_pattern = Regex.compile pattern case_insensitive=case_insensitive
|
||||
compiled_pattern.match self
|
||||
|
||||
## GROUP Selections
|
||||
## ALIAS matches
|
||||
GROUP Selections
|
||||
ICON find
|
||||
Finds all the matches of the regular expression `pattern` in `self`,
|
||||
returning a Vector. If not found, will be an empty Vector.
|
||||
|
@ -130,7 +130,8 @@ type Regex
|
||||
Match_Iterator_Value.Last _ -> Nothing
|
||||
go it
|
||||
|
||||
## GROUP Selections
|
||||
## ALIAS match
|
||||
GROUP Selections
|
||||
ICON find
|
||||
Tries to match the provided `input` against the pattern `self`.
|
||||
|
||||
@ -143,7 +144,8 @@ type Regex
|
||||
find self (input : Text) =
|
||||
match_to_group_maybe <| self.match input
|
||||
|
||||
## GROUP Selections
|
||||
## ALIAS matches
|
||||
GROUP Selections
|
||||
ICON find
|
||||
Tries to match the provided `input` against the pattern `self`.
|
||||
|
||||
|
@ -3,6 +3,7 @@ import project.Data.Text.Text
|
||||
import project.Data.Vector.Vector
|
||||
import project.Data.XML.XML_Document
|
||||
import project.Errors.Problem_Behavior.Problem_Behavior
|
||||
import project.Function.Function
|
||||
import project.Meta
|
||||
import project.Network.URI.URI
|
||||
import project.Nothing.Nothing
|
||||
@ -15,6 +16,13 @@ from project.Metadata.Choice import Option
|
||||
|
||||
## A `File_Format` for reading and writing XML files.
|
||||
type XML_Format
|
||||
## PRIVATE
|
||||
Resolve an unresolved constructor to the actual type.
|
||||
resolve : Function -> XML_Format | Nothing
|
||||
resolve constructor =
|
||||
_ = constructor
|
||||
Nothing
|
||||
|
||||
## PRIVATE
|
||||
If the File_Format supports reading from the file, return a configured instance.
|
||||
for_read : File_Format_Metadata -> XML_Format | Nothing
|
||||
@ -38,7 +46,6 @@ type XML_Format
|
||||
_ = [file]
|
||||
Nothing
|
||||
|
||||
|
||||
## PRIVATE
|
||||
get_dropdown_options : Vector Option
|
||||
get_dropdown_options = [Option "XML" (Meta.get_qualified_type_name XML_Format)]
|
||||
|
@ -23,6 +23,7 @@ import project.System.File.File_Access.File_Access
|
||||
import project.System.File.Generic.File_Like.File_Like
|
||||
import project.System.File.Generic.Writable_File.Writable_File
|
||||
import project.System.File_Format.Auto_Detect
|
||||
import project.System.File_Format.File_Format
|
||||
import project.System.File_Format.Infer
|
||||
import project.System.File_Format.JSON_Format
|
||||
import project.System.File_Format_Metadata.Content_Type_Metadata
|
||||
@ -62,7 +63,8 @@ interpret_json_as_data_link json =
|
||||
read_data_link (file : File_Like) format (on_problems : Problem_Behavior) =
|
||||
json = Data_Link.read_config file
|
||||
data_link_instance = interpret_json_as_data_link json
|
||||
data_link_instance.read format on_problems
|
||||
resolved_format = File_Format.resolve format
|
||||
data_link_instance.read resolved_format on_problems
|
||||
|
||||
## PRIVATE
|
||||
read_data_link_as_stream (file : File_Like) (open_options : Vector) (f : Input_Stream -> Any) =
|
||||
|
@ -260,12 +260,12 @@ type Enso_File
|
||||
datalink = Data_Link_Helpers.interpret_json_as_data_link json
|
||||
datalink.read format on_problems
|
||||
Enso_Asset_Type.Directory -> if format == Auto_Detect then self.list else Error.throw (Illegal_Argument.Error "Directories can only be read using the Auto_Detect format.")
|
||||
Enso_Asset_Type.File -> File_Format.handle_format_missing_arguments format <|
|
||||
Enso_Asset_Type.File ->
|
||||
read_with_format effective_format =
|
||||
metadata = File_Format_Metadata.from self
|
||||
self.with_input_stream [File_Access.Read] (stream-> effective_format.read_stream stream metadata)
|
||||
|
||||
if format != Auto_Detect then read_with_format format else
|
||||
if format != Auto_Detect then read_with_format (File_Format.resolve format) else
|
||||
real_format = Auto_Detect.get_reading_format self
|
||||
if real_format == Nothing then Error.throw (File_Error.Unsupported_Type self) else
|
||||
read_with_format real_format
|
||||
|
@ -106,4 +106,3 @@ from project.Function export all
|
||||
from project.Meta.Enso_Project export enso_project
|
||||
from project.Network.Extensions export all
|
||||
from project.System.File_Format export Auto_Detect, Bytes, File_Format, Infer, JSON_Format, Plain_Text_Format
|
||||
from project.System.File_Format.Plain_Text_Format export Plain_Text
|
||||
|
@ -98,16 +98,17 @@ type Response_Body
|
||||
@format decode_format_selector
|
||||
decode : File_Format -> Any -> Any
|
||||
decode self format=Auto_Detect ~if_unsupported=(Error.throw (File_Error.Unsupported_Type self.metadata)) =
|
||||
File_Format.handle_format_missing_arguments format <| case format of
|
||||
case format of
|
||||
Auto_Detect ->
|
||||
detected_format = Auto_Detect.get_reading_format self.metadata
|
||||
if detected_format.is_nothing then if_unsupported else
|
||||
self.decode detected_format
|
||||
_ ->
|
||||
type_obj = Meta.type_of format
|
||||
if can_decode type_obj . not then Error.throw (Illegal_Argument.Error type_obj.to_text+" cannot be used to decode from a stream. It must be saved to a file first.") else
|
||||
resolved_format = File_Format.resolve format
|
||||
type_obj = Meta.type_of resolved_format
|
||||
if can_decode type_obj . not then Error.throw (Illegal_Argument.Error type_obj.to_text+" cannot read from a stream, save to a file first.") else
|
||||
self.with_stream stream->
|
||||
format.read_stream stream self.metadata
|
||||
resolved_format.read_stream stream self.metadata
|
||||
|
||||
## ALIAS bytes
|
||||
GROUP Input
|
||||
@ -127,7 +128,7 @@ type Response_Body
|
||||
fallback.
|
||||
@encoding Encoding.default_widget
|
||||
decode_as_text : Encoding | Infer -> Text
|
||||
decode_as_text self (encoding : Encoding | Infer = Infer) = self.decode (Plain_Text_Format.Plain_Text encoding)
|
||||
decode_as_text self (encoding : Encoding | Infer = Infer) = self.decode (..Plain_Text encoding)
|
||||
|
||||
## ALIAS parse as json, parse json
|
||||
GROUP Conversions
|
||||
|
@ -18,6 +18,7 @@ import project.Errors.Common.Type_Error
|
||||
import project.Errors.File_Error.File_Error
|
||||
import project.Errors.Illegal_Argument.Illegal_Argument
|
||||
import project.Errors.Problem_Behavior.Problem_Behavior
|
||||
import project.Function.Function
|
||||
import project.Meta
|
||||
import project.Metadata.Display
|
||||
import project.Metadata.Widget
|
||||
@ -288,14 +289,14 @@ type File
|
||||
from Standard.Table import all
|
||||
import Standard.Examples
|
||||
|
||||
example_xls_to_table = Examples.xls.read (Excel_Format.Sheet 'Dates')
|
||||
example_xls_to_table = Examples.xls.read (..Sheet 'Dates')
|
||||
@format File_Format.default_widget
|
||||
read : File_Format -> Problem_Behavior -> Any ! File_Error
|
||||
read self format=Auto_Detect (on_problems : Problem_Behavior = Problem_Behavior.Report_Warning) =
|
||||
read self format=Auto_Detect (on_problems : Problem_Behavior = ..Report_Warning) =
|
||||
if self.exists.not then Error.throw (File_Error.Not_Found self) else
|
||||
if Data_Link.is_data_link self then Data_Link_Helpers.read_data_link self format on_problems else
|
||||
File_Format.handle_format_missing_arguments format <|
|
||||
format.read self on_problems
|
||||
resolved_format = File_Format.resolve format
|
||||
resolved_format.read self on_problems
|
||||
|
||||
## ALIAS load bytes, open bytes
|
||||
ICON data_input
|
||||
|
@ -17,6 +17,7 @@ import project.Metadata.Display
|
||||
import project.Metadata.Widget
|
||||
import project.Network.URI.URI
|
||||
import project.Nothing.Nothing
|
||||
import project.Panic.Panic
|
||||
import project.System.File.File
|
||||
import project.System.File.Generic.Writable_File.Writable_File
|
||||
import project.System.File_Format_Metadata.File_Format_Metadata
|
||||
@ -89,6 +90,19 @@ type File_Format
|
||||
all : Vector
|
||||
all = [Auto_Detect] + format_types
|
||||
|
||||
## PRIVATE
|
||||
Resolve an unresolved constructor to the actual type.
|
||||
resolve : Function -> File_Format
|
||||
resolve constructor = case constructor of
|
||||
_ : Function ->
|
||||
try_next idx =
|
||||
if idx >= format_types.length then Error.throw (Illegal_Argument.Error "Expected a file format, but got a function.") else
|
||||
format = format_types.at idx
|
||||
resolved = format.resolve constructor
|
||||
if resolved.is_nothing then @Tail_Call try_next (idx + 1) else resolved
|
||||
try_next 0
|
||||
_ -> constructor
|
||||
|
||||
## PRIVATE
|
||||
Implements the `File.read` for this `File_Format`
|
||||
read : File -> Problem_Behavior -> Any
|
||||
@ -109,16 +123,6 @@ type File_Format
|
||||
options = ([Auto_Detect]+format_types).flat_map .get_dropdown_options
|
||||
Single_Choice display=Display.Always values=options
|
||||
|
||||
## PRIVATE
|
||||
Checks if the `format` argument is valid and raises a more user-friendly
|
||||
error if it is a `Function` (meaning that some arguments were missing).
|
||||
handle_format_missing_arguments : Any -> Any -> Any
|
||||
handle_format_missing_arguments format ~action =
|
||||
case format of
|
||||
_ : Function ->
|
||||
Error.throw (Illegal_Argument.Error "Expected a file format, but got a function. Perhaps the format is missing some required arguments?")
|
||||
_ -> action
|
||||
|
||||
## A file format for plain text files.
|
||||
type Plain_Text_Format
|
||||
## A file format for plain text files with the specified encoding.
|
||||
@ -132,6 +136,12 @@ type Plain_Text_Format
|
||||
@encoding Encoding.default_widget
|
||||
Plain_Text (encoding : Encoding | Infer = Infer)
|
||||
|
||||
## PRIVATE
|
||||
Resolve an unresolved constructor to the actual type.
|
||||
resolve : Function -> Plain_Text_Format | Nothing
|
||||
resolve constructor =
|
||||
Panic.catch Any (constructor:Plain_Text_Format) _->Nothing
|
||||
|
||||
## PRIVATE
|
||||
If the File_Format supports reading from the file, return a configured instance.
|
||||
for_read : File_Format_Metadata -> Plain_Text_Format | Nothing
|
||||
@ -155,7 +165,7 @@ type Plain_Text_Format
|
||||
|
||||
## PRIVATE
|
||||
get_dropdown_options : Vector Option
|
||||
get_dropdown_options = [Option "Plain Text" (Meta.get_qualified_type_name Plain_Text_Format)+".Plain_Text"]
|
||||
get_dropdown_options = [Option "Plain Text" "..Plain_Text"]
|
||||
|
||||
## PRIVATE
|
||||
Implements the `File.read` for this `File_Format`
|
||||
@ -179,6 +189,13 @@ type Plain_Text_Format
|
||||
|
||||
## A file format for reading or writing files as a sequence of bytes.
|
||||
type Bytes
|
||||
## PRIVATE
|
||||
Resolve an unresolved constructor to the actual type.
|
||||
resolve : Function -> Bytes | Nothing
|
||||
resolve constructor =
|
||||
_ = constructor
|
||||
Nothing
|
||||
|
||||
## PRIVATE
|
||||
If the File_Format supports reading from the file, return a configured instance.
|
||||
for_read : File_Format_Metadata -> Bytes | Nothing
|
||||
@ -212,6 +229,13 @@ type Bytes
|
||||
|
||||
## A file format for reading and writing files as JSON.
|
||||
type JSON_Format
|
||||
## PRIVATE
|
||||
Resolve an unresolved constructor to the actual type.
|
||||
resolve : Function -> Bytes | Nothing
|
||||
resolve constructor =
|
||||
_ = constructor
|
||||
Nothing
|
||||
|
||||
## PRIVATE
|
||||
If the File_Format supports reading from the file, return a configured instance.
|
||||
for_read : File_Format_Metadata -> JSON_Format | Nothing
|
||||
@ -259,7 +283,6 @@ JSON_Format.from (that : JS_Object) =
|
||||
## A setting to infer the default behaviour of some option.
|
||||
type Infer
|
||||
|
||||
|
||||
## PRIVATE
|
||||
A helper for parsing the JSON representation of `Boolean | Infer`.
|
||||
It defaults to `Infer` if the value was `Nothing`.
|
||||
|
@ -44,7 +44,7 @@ type Postgres_Data_Link
|
||||
## PRIVATE
|
||||
read self (format = Auto_Detect) (on_problems : Problem_Behavior) =
|
||||
_ = on_problems
|
||||
if format != Auto_Detect then Error.throw (Illegal_Argument.Error "Only the default Auto_Detect format should be used with a Postgres Data Link, because it does not point to a file resource, but a database entity, so setting a file format for it is meaningless.") else
|
||||
if format != Auto_Detect then Error.throw (Illegal_Argument.Error "Only Auto_Detect can be used with a Postgres Data Link, as it points to a database.") else
|
||||
# TODO add related asset id here: https://github.com/enso-org/enso/issues/9869
|
||||
audit_mode = if Enso_User.is_logged_in then "cloud" else "local"
|
||||
default_options = Connection_Options.Value [["enso.internal.audit", audit_mode]]
|
||||
|
@ -10,23 +10,33 @@ import project.Connection.SQLite.SQLite
|
||||
|
||||
## Read the file to a `SQLite_Connection` from a `.db` or `.sqlite` file
|
||||
type SQLite_Format
|
||||
## Read SQLite files
|
||||
## PRIVATE
|
||||
Read SQLite files (kept for backwards compatibility)
|
||||
For_File
|
||||
|
||||
## Read SQLite files
|
||||
SQLite
|
||||
|
||||
## PRIVATE
|
||||
Resolve an unresolved constructor to the actual type.
|
||||
resolve : Function -> SQLite_Format | Nothing
|
||||
resolve constructor =
|
||||
Panic.catch Any (constructor:SQLite_Format) _->Nothing
|
||||
|
||||
## PRIVATE
|
||||
If the File_Format supports reading from the file, return a configured instance.
|
||||
for_read : File_Format_Metadata -> SQLite_Format | Nothing
|
||||
for_read file:File_Format_Metadata =
|
||||
expected_header = magic_header_string
|
||||
got_header = file.read_first_bytes expected_header.length
|
||||
if got_header == expected_header then SQLite_Format.For_File else
|
||||
if got_header == expected_header then SQLite_Format.SQLite else
|
||||
## To allow for reading a SQLite file by extension if we cannot read the file header.
|
||||
If we were able to read the header and it is invalid - we do not try the extension,
|
||||
because later the read will fail anyway due to bad format.
|
||||
if got_header.is_nothing.not then Nothing else
|
||||
case file.guess_extension of
|
||||
".db" -> SQLite_Format.For_File
|
||||
".sqlite" -> SQLite_Format.For_File
|
||||
".db" -> SQLite_Format.SQLite
|
||||
".sqlite" -> SQLite_Format.SQLite
|
||||
_ -> Nothing
|
||||
|
||||
## PRIVATE
|
||||
@ -34,13 +44,13 @@ type SQLite_Format
|
||||
for_file_write : Writable_File -> SQLite_Format | Nothing
|
||||
for_file_write file:Writable_File =
|
||||
case (file:File_Format_Metadata).extension of
|
||||
".db" -> SQLite_Format.For_File
|
||||
".sqlite" -> SQLite_Format.For_File
|
||||
".db" -> SQLite_Format.SQLite
|
||||
".sqlite" -> SQLite_Format.SQLite
|
||||
_ -> Nothing
|
||||
|
||||
## PRIVATE
|
||||
get_dropdown_options : Vector Option
|
||||
get_dropdown_options = [Option "SQLite" (Meta.get_qualified_type_name SQLite_Format)+".For_File"]
|
||||
get_dropdown_options = [Option "SQLite" "..SQLite"]
|
||||
|
||||
## PRIVATE
|
||||
Implements the `File.read` for this `File_Format`
|
||||
@ -57,5 +67,5 @@ type SQLite_Format
|
||||
|
||||
## PRIVATE
|
||||
Based on the File Format definition at: https://www.sqlite.org/fileformat.html
|
||||
magic_header_string =
|
||||
private magic_header_string =
|
||||
"SQLite format 3".utf_8 + [0]
|
||||
|
@ -573,7 +573,8 @@ type DB_Table
|
||||
Warning.with_suspended new_names names->
|
||||
self.updated_columns (self.internal_columns.map c-> c.rename (names.at c.name))
|
||||
|
||||
## GROUP Standard.Base.Metadata
|
||||
## ALIAS rename
|
||||
GROUP Standard.Base.Metadata
|
||||
ICON table_edit
|
||||
Returns a new table with the columns renamed based on entries in the
|
||||
first row.
|
||||
@ -1724,7 +1725,7 @@ type DB_Table
|
||||
_ = [right, keep_unmatched, right_prefix, on_problems]
|
||||
Error.throw (Unsupported_Database_Operation.Error "DB_Table.zip is not implemented yet for the Database backends.")
|
||||
|
||||
## ALIAS append, concat
|
||||
## ALIAS append, concat, join
|
||||
GROUP Standard.Base.Calculations
|
||||
ICON union
|
||||
Appends records from other table(s) to this table.
|
||||
|
@ -3,7 +3,7 @@ private
|
||||
from Standard.Base import all
|
||||
import Standard.Base.Runtime.Managed_Resource.Managed_Resource
|
||||
|
||||
from Standard.Table import Delimited_Format, Table
|
||||
from Standard.Table import Table
|
||||
|
||||
import project.Google_Credential.Google_Credential
|
||||
from project.Internal.Google_Credential_Internal import all
|
||||
@ -52,4 +52,4 @@ read_api_data property_id:Text dimensions:Vector metrics:Vector start_date:Date
|
||||
|
||||
read_sample_data dimensions:Vector metrics:Vector start_date:Date end_date:Date -> Table =
|
||||
_ = [dimensions, metrics, start_date, end_date]
|
||||
Data.read ((Project_Description.new Standard.Google_Api . data) / "Google_Analytics_Sample.csv") (Delimited_Format.Delimited value_formatter=Standard.Base.Nothing)
|
||||
Data.read ((Project_Description.new Standard.Google_Api . data) / "Google_Analytics_Sample.csv") (..Delimited value_formatter=Standard.Base.Nothing)
|
||||
|
@ -14,14 +14,20 @@ supported = [".bmp", ".dib", ".jpeg", ".jpg", ".jpe", ".jp2", ".png", ".webp", "
|
||||
## Read the file to a `Image` from a supported file format.
|
||||
type Image_File_Format
|
||||
## File_Format to read Image files
|
||||
For_File
|
||||
Image
|
||||
|
||||
## PRIVATE
|
||||
Resolve an unresolved constructor to the actual type.
|
||||
resolve : Function -> Image_File_Format | Nothing
|
||||
resolve constructor =
|
||||
Panic.catch Any (constructor:Image_File_Format) _->Nothing
|
||||
|
||||
## PRIVATE
|
||||
If the File_Format supports reading from the file, return a configured instance.
|
||||
for_read : File_Format_Metadata -> Image_File_Format | Nothing
|
||||
for_read file:File_Format_Metadata =
|
||||
extension = file.guess_extension
|
||||
if supported.contains extension then Image_File_Format.For_File else Nothing
|
||||
if supported.contains extension then Image_File_Format.Image else Nothing
|
||||
|
||||
## PRIVATE
|
||||
If this File_Format should be used for writing to that file, return a configured instance.
|
||||
@ -30,7 +36,7 @@ type Image_File_Format
|
||||
|
||||
## PRIVATE
|
||||
get_dropdown_options : Vector Option
|
||||
get_dropdown_options = [Option "Image" (Meta.get_qualified_type_name Image_File_Format)+".For_File"]
|
||||
get_dropdown_options = [Option "Image" "..Image"]
|
||||
|
||||
## PRIVATE
|
||||
Implements the `File.read` for this `File_Format`
|
||||
|
@ -40,7 +40,7 @@ type Snowflake_Data_Link
|
||||
## PRIVATE
|
||||
read self (format = Auto_Detect) (on_problems : Problem_Behavior) =
|
||||
_ = on_problems
|
||||
if format != Auto_Detect then Error.throw (Illegal_Argument.Error "Only the default Auto_Detect format should be used with a Snowflake Data Link, because it does not point to a file resource, but a database entity, so setting a file format for it is meaningless.") else
|
||||
if format != Auto_Detect then Error.throw (Illegal_Argument.Error "Only Auto_Detect can be used with a Snowflake Data Link, as it points to a database.") else
|
||||
default_options = Connection_Options.Value
|
||||
connection = self.details.connect default_options
|
||||
case self of
|
||||
|
@ -61,6 +61,12 @@ type Delimited_Format
|
||||
@row_limit Rows_To_Read.default_widget
|
||||
Delimited (delimiter:Text=',') (encoding:Encoding=Encoding.default) (skip_rows:Integer=0) (row_limit:Rows_To_Read=..All_Rows) (quote_style:Quote_Style=Quote_Style.With_Quotes) (headers:Headers=Headers.Detect_Headers) (value_formatter:Data_Formatter|Nothing=Data_Formatter.Value) (keep_invalid_rows:Boolean=True) (line_endings:Line_Ending_Style|Infer=Infer) (comment_character:Text|Nothing=Nothing)
|
||||
|
||||
## PRIVATE
|
||||
Resolve an unresolved constructor to the actual type.
|
||||
resolve : Function -> Delimited_Format | Nothing
|
||||
resolve constructor =
|
||||
Panic.catch Any (constructor:Delimited_Format) _->Nothing
|
||||
|
||||
## PRIVATE
|
||||
ADVANCED
|
||||
If the File_Format supports reading from the file, return a configured instance.
|
||||
@ -87,7 +93,7 @@ type Delimited_Format
|
||||
|
||||
## PRIVATE
|
||||
get_dropdown_options : Vector Option
|
||||
get_dropdown_options = [Option "Delimited" (Meta.get_qualified_type_name Delimited_Format)+".Delimited"]
|
||||
get_dropdown_options = [Option "Delimited" "..Delimited"]
|
||||
|
||||
## PRIVATE
|
||||
ADVANCED
|
||||
|
@ -1,4 +1,5 @@
|
||||
from Standard.Base import all
|
||||
import Standard.Base.Errors.Common.Missing_Argument
|
||||
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
|
||||
import Standard.Base.Metadata.Display
|
||||
import Standard.Base.System.File.Generic.Writable_File.Writable_File
|
||||
@ -57,7 +58,7 @@ type Excel_Format
|
||||
`Infer` will attempt to deduce this from the extension of the filename.
|
||||
@sheet (Text_Input display=Display.Always)
|
||||
@row_limit Rows_To_Read.default_widget
|
||||
Sheet (sheet:(Integer|Text)=1) (headers:Headers=Headers.Detect_Headers) (skip_rows:Integer=0) (row_limit:Rows_To_Read=..All_Rows) (xls_format:Boolean|Infer=Infer)
|
||||
Sheet (sheet:(Integer|Text)=1) (headers:Headers=..Detect_Headers) (skip_rows:Integer=0) (row_limit:Rows_To_Read=..All_Rows) (xls_format:Boolean|Infer=Infer)
|
||||
|
||||
## Reads a range from an Excel file as a `Table`.
|
||||
|
||||
@ -75,7 +76,13 @@ type Excel_Format
|
||||
`Infer` will attempt to deduce this from the extension of the filename.
|
||||
@address Text_Input
|
||||
@row_limit Rows_To_Read.default_widget
|
||||
Range (address:(Text|Excel_Range)) (headers:Headers=Headers.Detect_Headers) (skip_rows:Integer=0) (row_limit:Rows_To_Read=..All_Rows) (xls_format : Boolean | Infer = Infer)
|
||||
Range (address:(Text|Excel_Range)=(Missing_Argument.throw 'address')) (headers:Headers=..Detect_Headers) (skip_rows:Integer=0) (row_limit:Rows_To_Read=..All_Rows) (xls_format : Boolean | Infer = Infer)
|
||||
|
||||
## PRIVATE
|
||||
Resolve an unresolved constructor to the actual type.
|
||||
resolve : Function -> Excel_Format | Nothing
|
||||
resolve constructor =
|
||||
Panic.catch Any (constructor:Excel_Format) _->Nothing
|
||||
|
||||
## PRIVATE
|
||||
ADVANCED
|
||||
@ -93,10 +100,9 @@ type Excel_Format
|
||||
## PRIVATE
|
||||
get_dropdown_options : Vector Option
|
||||
get_dropdown_options =
|
||||
fqn = Meta.get_qualified_type_name Excel_Format
|
||||
workbook = Option "Excel Workbook" fqn+".Workbook"
|
||||
sheet = Option "Excel Sheet" fqn+".Sheet"
|
||||
range = Option "Excel Range" fqn+".Range"
|
||||
workbook = Option "Excel Workbook" "..Workbook"
|
||||
sheet = Option "Excel Sheet" "..Sheet"
|
||||
range = Option "Excel Range" "..Range"
|
||||
[workbook, sheet, range]
|
||||
|
||||
## PRIVATE
|
||||
|
@ -709,7 +709,8 @@ type Table
|
||||
Warning.with_suspended new_names names->
|
||||
Table.new (self.columns.map c-> c.rename (names.at c.name))
|
||||
|
||||
## GROUP Standard.Base.Metadata
|
||||
## ALIAS rename
|
||||
GROUP Standard.Base.Metadata
|
||||
ICON table_edit
|
||||
Returns a new table with the columns renamed based on entries in the
|
||||
first row.
|
||||
@ -2258,7 +2259,7 @@ type Table
|
||||
self.java_table.zip right.java_table keep_unmatched_bool right_prefix java_aggregator
|
||||
Table.Value new_java_table
|
||||
|
||||
## ALIAS append, concat
|
||||
## ALIAS append, concat, join
|
||||
GROUP Standard.Base.Calculations
|
||||
ICON union
|
||||
Appends records from other table(s) to this table.
|
||||
@ -2677,9 +2678,8 @@ type Table
|
||||
Write a table to a CSV file, without writing the header.
|
||||
|
||||
import Standard.Examples
|
||||
from Standard.Table import Delimited
|
||||
|
||||
example_to_csv = Examples.inventory_table.write (Enso_Project.data / "example_csv_output.csv") (Delimited_Format.Delimited delimiter="," headers=False)
|
||||
example_to_csv = Examples.inventory_table.write (Enso_Project.data / "example_csv_output.csv") (..Delimited delimiter="," headers=False)
|
||||
|
||||
> Example
|
||||
Write a table to an XLSX file.
|
||||
@ -2687,29 +2687,30 @@ type Table
|
||||
import Standard.Examples
|
||||
from Standard.Table import all
|
||||
|
||||
example_to_xlsx = Examples.inventory_table.write (enso_project.data / "example_xlsx_output.xlsx") (Excel_Format.Sheet "MySheetName")
|
||||
example_to_xlsx = Examples.inventory_table.write (enso_project.data / "example_xlsx_output.xlsx") (..Sheet "MySheetName")
|
||||
@path (Widget.File_Browse existing_only=False display=Display.Always)
|
||||
@format Widget_Helpers.write_table_selector
|
||||
write : Writable_File -> File_Format -> Existing_File_Behavior -> Match_Columns -> Problem_Behavior -> File ! Column_Count_Mismatch | Illegal_Argument | File_Error
|
||||
write self path:Writable_File format=Auto_Detect on_existing_file:Existing_File_Behavior=..Backup match_columns:Match_Columns=..By_Name on_problems:Problem_Behavior=..Report_Warning =
|
||||
File_Format.handle_format_missing_arguments format <| case format of
|
||||
case format of
|
||||
_ : Auto_Detect ->
|
||||
base_format = format.get_writing_format path
|
||||
if base_format == Nothing then Error.throw (File_Error.Unsupported_Output_Type path.file Table) else
|
||||
self.write path format=base_format on_existing_file match_columns on_problems
|
||||
_ ->
|
||||
resolved_format = File_Format.resolve format
|
||||
handle_no_write_method caught_panic =
|
||||
is_write = caught_panic.payload.method_name == "write_table"
|
||||
if is_write.not then Panic.throw caught_panic else
|
||||
Error.throw (File_Error.Unsupported_Output_Type format Table)
|
||||
Error.throw (File_Error.Unsupported_Output_Type resolved_format Table)
|
||||
Panic.catch No_Such_Method handler=handle_no_write_method <|
|
||||
to_write = if Context.Output.is_enabled then self else self.take 1000
|
||||
format.write_table path to_write on_existing_file match_columns on_problems
|
||||
resolved_format.write_table path to_write on_existing_file match_columns on_problems
|
||||
|
||||
## ICON convert
|
||||
Creates a text representation of the table using the CSV format.
|
||||
to_csv : Text
|
||||
to_csv self = Text.from self (Delimited_Format.Delimited delimiter=",")
|
||||
to_csv self = Text.from self (..Delimited delimiter=",")
|
||||
|
||||
## GROUP Standard.Base.Conversions
|
||||
ICON convert
|
||||
@ -3107,9 +3108,10 @@ Table.from (that:Column) = that.to_table
|
||||
- that: The text to convert.
|
||||
- format: The format of the text.
|
||||
- on_problems: What to do if there are problems reading the text.
|
||||
Table.from (that : Text) (format:Delimited_Format = Delimited_Format.Delimited '\t') (on_problems:Problem_Behavior=Report_Warning) =
|
||||
File_Format.handle_format_missing_arguments format <| case format of
|
||||
_ : Delimited_Format -> Delimited_Reader.read_text that format on_problems
|
||||
Table.from (that : Text) (format = Delimited_Format.Delimited '\t') (on_problems:Problem_Behavior=..Report_Warning) =
|
||||
resolved_format = File_Format.resolve format
|
||||
case resolved_format of
|
||||
_ : Delimited_Format -> Delimited_Reader.read_text that resolved_format on_problems
|
||||
_ -> Unimplemented.throw "Table.from is currently only implemented for Delimited_Format."
|
||||
|
||||
## PRIVATE
|
||||
@ -3120,7 +3122,7 @@ Table.from (that : Text) (format:Delimited_Format = Delimited_Format.Delimited '
|
||||
Arguments:
|
||||
- that: The table to convert.
|
||||
- format: The format of the text.
|
||||
Text.from (that : Table) (format:Delimited_Format = Delimited_Format.Delimited '\t') =
|
||||
Text.from (that : Table) (format:Delimited_Format = ..Delimited '\t') =
|
||||
Delimited_Writer.write_text that format
|
||||
|
||||
## PRIVATE
|
||||
|
@ -36,7 +36,8 @@ type Faker
|
||||
|
||||
Arguments:
|
||||
- template: Vector of vectors that represent the possible characters for each
|
||||
letter, as UTF-16 code units.
|
||||
letter, as UTF-16 code units or a Text value made up of `A` for upper case,
|
||||
`a` for lower case, `0` for numbers, and any other character for itself.
|
||||
|
||||
> Example
|
||||
Creates a fake UK National Insurance number:
|
||||
@ -46,12 +47,21 @@ type Faker
|
||||
s = "ABCDFMP ".char_vector
|
||||
template = [l, l, n, n, n, n, n, s]
|
||||
ni_number = Faker.new . string_value template
|
||||
string_value : Vector -> Text
|
||||
string_value : Vector | Text -> Text
|
||||
string_value self template =
|
||||
characters = template.map possible_chars->
|
||||
selected_char_ix = self.generator.nextInt possible_chars.length
|
||||
possible_chars.at selected_char_ix
|
||||
Text.from_char_vector characters
|
||||
case template of
|
||||
_ : Text ->
|
||||
char_vector = template.char_vector.map c-> case c of
|
||||
"A" -> Faker.upper_case_letters
|
||||
"a" -> Faker.lower_case_letters
|
||||
"0" -> Faker.numbers
|
||||
_ -> c
|
||||
self.string_value char_vector
|
||||
_ ->
|
||||
characters = template.map possible_chars->
|
||||
selected_char_ix = self.generator.nextInt possible_chars.length
|
||||
possible_chars.at selected_char_ix
|
||||
Text.from_char_vector characters
|
||||
|
||||
## GROUP Standard.Base.Random
|
||||
ICON random
|
||||
|
@ -420,12 +420,12 @@ add_specs suite_builder =
|
||||
my_file.read . should_equal "version2"
|
||||
|
||||
# ensure correct format is used for reading the .bak file - Auto_Detect fails because it does not know the `.bak` extension
|
||||
bak_file.read Plain_Text . should_equal "version1"
|
||||
bak_file.read ..Plain_Text . should_equal "version1"
|
||||
|
||||
"version3".write my_file . should_succeed
|
||||
|
||||
my_file.read . should_equal "version3"
|
||||
bak_file.read Plain_Text . should_equal "version2"
|
||||
bak_file.read ..Plain_Text . should_equal "version2"
|
||||
|
||||
# No new file was created
|
||||
parent_dir = my_file.parent
|
||||
@ -460,7 +460,7 @@ add_specs suite_builder =
|
||||
|
||||
# And should be able to add another sheet
|
||||
table2 = Table.new [["A", [9, 10, 11, 12]], ["B", ["i", "j", "k", "l"]]]
|
||||
table2.write file (Excel_Format.Sheet "MySheet2") . should_succeed
|
||||
table2.write file (..Sheet "MySheet2") . should_succeed
|
||||
|
||||
workbook2 = file.read
|
||||
workbook2.sheet_names . should_equal ['EnsoSheet', 'MySheet2']
|
||||
@ -557,7 +557,7 @@ add_specs suite_builder =
|
||||
raw_content = Data_Link.read_raw_config (enso_project.data / "simple.datalink")
|
||||
Data_Link.write_raw_config s3_link raw_content replace_existing=True . should_succeed
|
||||
Panic.with_finalizer s3_link.delete <|
|
||||
r = s3_link.read (Delimited_Format.Delimited " " headers=False)
|
||||
r = s3_link.read (..Delimited " " headers=False)
|
||||
r.should_be_a Table
|
||||
r.column_names . should_equal ["Column 1", "Column 2"]
|
||||
r.rows.at 0 . to_vector . should_equal ["Hello", "WORLD!"]
|
||||
|
@ -1,6 +1,5 @@
|
||||
from Standard.Base import all
|
||||
from Standard.Base.Data.Array_Proxy import Array_Proxy
|
||||
import Standard.Base.Errors.Common.Missing_Argument
|
||||
import Standard.Base.Errors.Empty_Error.Empty_Error
|
||||
import Standard.Base.Data.Vector.Builder
|
||||
import Standard.Base.Data.Vector.Map_Error
|
||||
|
@ -118,7 +118,7 @@ add_specs suite_builder setup:Cloud_Tests_Setup = suite_builder.group "Enso Clou
|
||||
output_stream.write_bytes "Hello".utf_8
|
||||
42
|
||||
r.should_equal 42
|
||||
f.read Plain_Text . should_equal "Hello"
|
||||
f.read ..Plain_Text . should_equal "Hello"
|
||||
|
||||
group_builder.specify "will respect Create_New in with_output_stream" <|
|
||||
test_file = test_root.get / "test_file.json"
|
||||
@ -129,12 +129,12 @@ add_specs suite_builder setup:Cloud_Tests_Setup = suite_builder.group "Enso Clou
|
||||
42
|
||||
r.should_fail_with File_Error
|
||||
r.catch.should_be_a File_Error.Already_Exists
|
||||
test_file.read Plain_Text . should_equal test_file_text
|
||||
test_file.read ..Plain_Text . should_equal test_file_text
|
||||
|
||||
group_builder.specify "should be able to write a file using write_bytes" <|
|
||||
f = test_root.get / "written_file2.txt"
|
||||
"hi!".utf_8.write_bytes f . should_succeed
|
||||
f.read Plain_Text . should_equal "hi!"
|
||||
f.read ..Plain_Text . should_equal "hi!"
|
||||
|
||||
group_builder.specify "does not currently support append" <|
|
||||
f = test_root.get / "written_file3.txt"
|
||||
@ -151,7 +151,7 @@ add_specs suite_builder setup:Cloud_Tests_Setup = suite_builder.group "Enso Clou
|
||||
f.exists.should_be_false
|
||||
"ABC".write f on_existing_file=Existing_File_Behavior.Overwrite . should_equal f
|
||||
"DEF".write f on_existing_file=Existing_File_Behavior.Backup . should_equal f
|
||||
f.read Plain_Text . should_equal "DEF"
|
||||
f.read ..Plain_Text . should_equal "DEF"
|
||||
|
||||
# But there should not be any other files in the directory
|
||||
dir.list.should_equal [f]
|
||||
@ -189,7 +189,7 @@ add_specs suite_builder setup:Cloud_Tests_Setup = suite_builder.group "Enso Clou
|
||||
test_file = test_root.get / "test_file.json"
|
||||
test_file.exists . should_be_true
|
||||
|
||||
test_file.read Plain_Text . should_equal test_file_text
|
||||
test_file.read ..Plain_Text . should_equal test_file_text
|
||||
|
||||
# auto-detection of JSON format:
|
||||
json = test_file.read
|
||||
|
@ -53,7 +53,7 @@ add_specs suite_builder =
|
||||
|
||||
group_builder.specify "but will succeed if an unknown format is not used because it was overridden" <| Test.with_retries <|
|
||||
f = replace_url_in_data_link (data_link_root / "example-http-format-delimited.datalink")
|
||||
r = f.read Plain_Text
|
||||
r = f.read ..Plain_Text
|
||||
r.should_be_a Text
|
||||
r.trim.should_equal '{"hello": "world"}'
|
||||
|
||||
@ -67,7 +67,7 @@ add_specs suite_builder =
|
||||
r1 = Data.read base_url_with_slash+"dynamic.datalink" JSON_Format
|
||||
r1.should_equal ('{"hello": "world"}'.parse_json)
|
||||
|
||||
r2 = Data.read base_url_with_slash+"dynamic-datalink" Plain_Text
|
||||
r2 = Data.read base_url_with_slash+"dynamic-datalink" ..Plain_Text
|
||||
r2.trim.should_equal '{"hello": "world"}'
|
||||
|
||||
group_builder.specify "should be able to follow a datalink from HTTP in Data.fetch/post, if auto parse is on" <| Test.with_retries <|
|
||||
@ -92,7 +92,7 @@ add_specs suite_builder =
|
||||
r1_decoded.catch . should_be_a File_Error.Unsupported_Type
|
||||
|
||||
# Still raw data link config is returned if we successfully decode it by overriding the format.
|
||||
r1_plain = r1.decode Plain_Text
|
||||
r1_plain = r1.decode ..Plain_Text
|
||||
r1_plain.should_contain '"libraryName": "Standard.Base"'
|
||||
|
||||
r2 = Data.post base_url_with_slash+"dynamic-datalink" response_format=Raw_Response
|
||||
@ -101,7 +101,7 @@ add_specs suite_builder =
|
||||
r2_decoded.should_fail_with File_Error
|
||||
r2_decoded.catch . should_be_a File_Error.Unsupported_Type
|
||||
|
||||
r2_plain = r2.decode Plain_Text
|
||||
r2_plain = r2.decode ..Plain_Text
|
||||
r2_plain.should_contain '"libraryName": "Standard.Base"'
|
||||
|
||||
group_builder.specify "should follow a datalink encountered in Data.download" <| Test.with_retries <|
|
||||
|
@ -212,7 +212,7 @@ add_specs suite_builder =
|
||||
auto_response = Data.read url_get
|
||||
auto_response.should_be_a JS_Object
|
||||
|
||||
plain_response = Data.read url_get format=Plain_Text
|
||||
plain_response = Data.read url_get format=..Plain_Text
|
||||
plain_response.should_be_a Text
|
||||
|
||||
suite_builder.group "post" pending=pending_has_url group_builder->
|
||||
|
@ -41,22 +41,22 @@ add_specs suite_builder =
|
||||
|
||||
suite_builder.group "Plain_Text" group_builder->
|
||||
group_builder.specify "should be able to read a file as Text" <|
|
||||
text = sample_xxx.read Plain_Text
|
||||
text = sample_xxx.read ..Plain_Text
|
||||
text.should_equal "Hello World!"
|
||||
|
||||
group_builder.specify "should be able to read a file as Text with Encoding" <|
|
||||
text = windows_log.read (Plain_Text Encoding.windows_1252)
|
||||
text = windows_log.read (..Plain_Text Encoding.windows_1252)
|
||||
text.should_equal "Hello World! $¢¤¥"
|
||||
|
||||
group_builder.specify "should raise a warning when invalid encoding in a Text file" <|
|
||||
action = windows_log.read (Plain_Text Encoding.ascii) on_problems=_
|
||||
action = windows_log.read (..Plain_Text Encoding.ascii) on_problems=_
|
||||
tester result = result . should_equal 'Hello World! $\uFFFD\uFFFD\uFFFD'
|
||||
problems = [Encoding_Error.Error "Failed to decode 3 code units (at positions: 14, 15, 16)."]
|
||||
Problems.test_problem_handling action problems tester
|
||||
|
||||
# Check that it defaults to warning.
|
||||
Problems.expect_only_warning Encoding_Error <|
|
||||
windows_log.read (Plain_Text Encoding.ascii)
|
||||
windows_log.read (..Plain_Text Encoding.ascii)
|
||||
|
||||
suite_builder.group "JSON_Format" group_builder->
|
||||
group_builder.specify "should be able to read a file as Json" <|
|
||||
|
@ -26,7 +26,7 @@ add_specs suite_builder =
|
||||
tmp.with_file f->
|
||||
"test".write f
|
||||
r = tmp.with_file f->
|
||||
f.read Plain_Text
|
||||
f.read ..Plain_Text
|
||||
r.should_equal "test"
|
||||
|
||||
raw_file = tmp.unsafe_get
|
||||
@ -34,7 +34,7 @@ add_specs suite_builder =
|
||||
|
||||
tmp.dispose
|
||||
|
||||
Test.expect_panic Illegal_State <| tmp.with_file (f->f.read Plain_Text)
|
||||
Test.expect_panic Illegal_State <| tmp.with_file (f->f.read Plain_Text_Format.Plain_Text)
|
||||
raw_file.exists . should_be_false
|
||||
|
||||
group_builder.specify "should allow to create a new file and allow to dispose it once the reference is dropped" <|
|
||||
@ -48,7 +48,7 @@ add_specs suite_builder =
|
||||
raw_file.exists . should_be_true
|
||||
|
||||
res = tmp.with_file f->
|
||||
f.read Plain_Text
|
||||
f.read ..Plain_Text
|
||||
[res, raw_file]
|
||||
|
||||
result = f "foobar"
|
||||
@ -72,7 +72,7 @@ add_specs suite_builder =
|
||||
stream = make_stream "test payload 1"
|
||||
tmp = Temporary_File.from_stream stream
|
||||
tmp.with_file f->
|
||||
f.read Plain_Text . should_equal "test payload 1"
|
||||
f.read ..Plain_Text . should_equal "test payload 1"
|
||||
|
||||
group_builder.specify "will fail if materializing an already closed input stream" <|
|
||||
stream = Input_Stream.new (InputStream.nullInputStream) (File_Error.handle_java_exceptions Nothing)
|
||||
|
@ -914,9 +914,9 @@ add_data_link_specs suite_builder =
|
||||
data_link_table.at "Y" . to_vector . should_equal ["o"]
|
||||
|
||||
group_builder.specify "will reject any format overrides or stream operations on the data link" <|
|
||||
r1 = Data.read data_link_file.get Plain_Text
|
||||
r1 = Data.read data_link_file.get ..Plain_Text
|
||||
r1.should_fail_with Illegal_Argument
|
||||
r1.catch.to_display_text . should_contain "Only the default Auto_Detect format should be used"
|
||||
r1.catch.to_display_text . should_contain "Only Auto_Detect can be used"
|
||||
|
||||
r2 = data_link_file.get.with_input_stream [File_Access.Read] .read_all_bytes
|
||||
r2.should_fail_with Illegal_Argument
|
||||
|
@ -257,7 +257,7 @@ add_specs suite_builder make_new_connection prefix persistent_connector=True =
|
||||
t1 = data.connection.create_table original_table_name structure=[Column_Description.Value "X" Value_Type.Integer] temporary=True
|
||||
t1.column_names . should_equal ["X"]
|
||||
|
||||
logs = log_file.read Plain_Text . lines
|
||||
logs = log_file.read ..Plain_Text . lines
|
||||
deletes = logs.filter (_.contains "DROP")
|
||||
wrapped_name = data.connection.dialect.wrap_identifier original_table_name
|
||||
deletes.each line->
|
||||
|
@ -23,7 +23,7 @@ add_specs suite_builder =
|
||||
|
||||
group_builder.specify "should create a table from a textual CSV" <|
|
||||
file_contents = (enso_project.data / "simple_empty.csv") . read_text
|
||||
table = Table.from file_contents (format = Delimited_Format.Delimited ",")
|
||||
table = Table.from file_contents (format = ..Delimited ",")
|
||||
table.should_equal data.expected_table
|
||||
|
||||
suite_builder.group "File.read (Delimited)" group_builder->
|
||||
@ -51,13 +51,13 @@ add_specs suite_builder =
|
||||
csv = """
|
||||
name,x,y,x,y
|
||||
foo,10,20,30,20
|
||||
t = Table.from csv (format = Delimited_Format.Delimited ",")
|
||||
t = Table.from csv (format = ..Delimited ",")
|
||||
t.columns.map .name . should_equal ['name', 'x', 'y', 'x 1', 'y 1']
|
||||
|
||||
suite_builder.group 'Writing' group_builder->
|
||||
group_builder.specify 'should properly serialize simple tables' <|
|
||||
varied_column = (enso_project.data / "varied_column.csv") . read
|
||||
res = Text.from varied_column format=(Delimited_Format.Delimited ",")
|
||||
res = Text.from varied_column format=(..Delimited ",")
|
||||
exp = normalize_lines <| '''
|
||||
Column 1,Column 2,Column 3,Column 4,Column 5,Column 6
|
||||
2005-02-25,2005-02-25,1,1,1.0,1
|
||||
@ -81,7 +81,7 @@ add_specs suite_builder =
|
||||
"This;Name;;Is""""Strange";20
|
||||
Marcin,,;"hello;world"
|
||||
|
||||
res = Text.from t format=(Delimited_Format.Delimited ";")
|
||||
res = Text.from t format=(..Delimited ";")
|
||||
res.should_equal expected
|
||||
|
||||
group_builder.specify 'should allow forced quoting of records' <|
|
||||
|
@ -33,7 +33,7 @@ add_specs suite_builder = suite_builder.group "parsing Table formats in DataLink
|
||||
|
||||
group_builder.specify "overriding the custom format in Delimited datalink" <|
|
||||
data_link_file = replace_url_in_data_link (data_link_root / "example-http-format-delimited-ignore-quote.datalink")
|
||||
r = data_link_file.read Plain_Text
|
||||
r = data_link_file.read ..Plain_Text
|
||||
r.should_be_a Text
|
||||
r.trim.should_equal '{"hello": "world"}'
|
||||
|
||||
@ -60,7 +60,7 @@ add_specs suite_builder = suite_builder.group "parsing Table formats in DataLink
|
||||
group_builder.specify "overriding Excel format" <|
|
||||
data_link_file = replace_url_in_data_link (data_link_root / "example-http-format-excel-workbook.datalink")
|
||||
|
||||
table = data_link_file.read (Excel_Format.Range "MyTestSheet!A1:B1")
|
||||
table = data_link_file.read (..Range "MyTestSheet!A1:B1")
|
||||
table . should_equal (Table.from_rows ["A", "B"] [[1, 'x']])
|
||||
|
||||
bytes = data_link_file.read_bytes
|
||||
|
@ -18,7 +18,7 @@ add_specs suite_builder =
|
||||
c_2 = ["b", ['2', Nothing, '8', '11']]
|
||||
c_3 = ["c", [Nothing, '6', '9', '12']]
|
||||
expected_table = Table.new [c_1, c_2, c_3]
|
||||
simple_empty = Data.read (enso_project.data / "simple_empty.csv") (Delimited_Format.Delimited "," headers=True value_formatter=Nothing)
|
||||
simple_empty = Data.read (enso_project.data / "simple_empty.csv") (..Delimited "," headers=True value_formatter=Nothing)
|
||||
simple_empty.should_equal expected_table
|
||||
|
||||
group_builder.specify "should load a simple table without headers" <|
|
||||
@ -26,11 +26,11 @@ add_specs suite_builder =
|
||||
c_2 = ["Column 2", ['b', '2', Nothing, '8', '11']]
|
||||
c_3 = ["Column 3", ['c', Nothing, '6', '9', '12']]
|
||||
expected_table = Table.new [c_1, c_2, c_3]
|
||||
simple_empty = Data.read (enso_project.data / "simple_empty.csv") (Delimited_Format.Delimited "," headers=False value_formatter=Nothing)
|
||||
simple_empty = Data.read (enso_project.data / "simple_empty.csv") (..Delimited "," headers=False value_formatter=Nothing)
|
||||
simple_empty.should_equal expected_table
|
||||
|
||||
group_builder.specify "should work in presence of missing headers" <|
|
||||
action on_problems = Data.read (enso_project.data / "missing_header.csv") (Delimited_Format.Delimited "," headers=True value_formatter=Nothing) on_problems
|
||||
action on_problems = Data.read (enso_project.data / "missing_header.csv") (..Delimited "," headers=True value_formatter=Nothing) on_problems
|
||||
tester table =
|
||||
table.columns.map .name . should_equal ["a", "Column 1", "c", "Column 2", "d"]
|
||||
table.at "a" . to_vector . should_equal ["1"]
|
||||
@ -42,61 +42,61 @@ add_specs suite_builder =
|
||||
Problems.test_problem_handling action problems tester
|
||||
|
||||
group_builder.specify "should infer headers based on the first two rows" <|
|
||||
t1 = Data.read (enso_project.data / "data_small.csv") (Delimited_Format.Delimited ",")
|
||||
t1 = Data.read (enso_project.data / "data_small.csv") (..Delimited ",")
|
||||
t1.columns.map .name . should_equal ["Code", "Index", "Flag", "Value", "ValueWithNothing", "TextWithNothing", "Hexadecimal", "Leading0s", "QuotedNumbers", "Mixed Types"]
|
||||
|
||||
t2 = Data.read (enso_project.data / "all_text.csv") (Delimited_Format.Delimited ",")
|
||||
t2 = Data.read (enso_project.data / "all_text.csv") (..Delimited ",")
|
||||
t2.columns.map .name . should_equal ["Column 1", "Column 2"]
|
||||
t2.at "Column 1" . to_vector . should_equal ["a", "c", "e", "g"]
|
||||
t2.at "Column 2" . to_vector . should_equal ["b", "d", "f", "h"]
|
||||
|
||||
t3 = Data.read (enso_project.data / "two_rows1.csv") (Delimited_Format.Delimited ",")
|
||||
t3 = Data.read (enso_project.data / "two_rows1.csv") (..Delimited ",")
|
||||
t3.columns.map .name . should_equal ["a", "b", "c"]
|
||||
t3.at "a" . to_vector . should_equal ["x"]
|
||||
t3.at "b" . to_vector . should_equal [Nothing]
|
||||
t3.at "c" . to_vector . should_equal [Nothing]
|
||||
|
||||
t4 = Data.read (enso_project.data / "two_rows2.csv") (Delimited_Format.Delimited ",")
|
||||
t4 = Data.read (enso_project.data / "two_rows2.csv") (..Delimited ",")
|
||||
t4.columns.map .name . should_equal ["Column 1", "Column 2", "Column 3"]
|
||||
t4.at "Column 1" . to_vector . should_equal ["a", "d"]
|
||||
t4.at "Column 2" . to_vector . should_equal ["b", "e"]
|
||||
t4.at "Column 3" . to_vector . should_equal ["c", "f"]
|
||||
|
||||
t5 = Data.read (enso_project.data / "numbers_in_header.csv") (Delimited_Format.Delimited ",")
|
||||
t5 = Data.read (enso_project.data / "numbers_in_header.csv") (..Delimited ",")
|
||||
t5.columns.map .name . should_equal ["Column 1", "Column 2", "Column 3"]
|
||||
t5.at "Column 1" . to_vector . should_equal ["a", "1"]
|
||||
t5.at "Column 2" . to_vector . should_equal ["b", "2"]
|
||||
t5.at "Column 3" . to_vector . should_equal [0, 3]
|
||||
|
||||
t6 = Data.read (enso_project.data / "quoted_numbers_in_header.csv") (Delimited_Format.Delimited ",")
|
||||
t6 = Data.read (enso_project.data / "quoted_numbers_in_header.csv") (..Delimited ",")
|
||||
t6.columns.map .name . should_equal ["1", "x"]
|
||||
t6.at "1" . to_vector . should_equal ["y"]
|
||||
t6.at "x" . to_vector . should_equal [2]
|
||||
|
||||
group_builder.specify "should not use the first row as headers if it is the only row, unless specifically asked to" <|
|
||||
t1 = Data.read (enso_project.data / "one_row.csv") (Delimited_Format.Delimited ",")
|
||||
t1 = Data.read (enso_project.data / "one_row.csv") (..Delimited ",")
|
||||
t1.columns.map .name . should_equal ["Column 1", "Column 2", "Column 3"]
|
||||
t1.at "Column 1" . to_vector . should_equal ["x"]
|
||||
t1.at "Column 2" . to_vector . should_equal ["y"]
|
||||
t1.at "Column 3" . to_vector . should_equal ["z"]
|
||||
|
||||
t2 = Data.read (enso_project.data / "one_row.csv") (Delimited_Format.Delimited "," headers=True)
|
||||
t2 = Data.read (enso_project.data / "one_row.csv") (..Delimited "," headers=True)
|
||||
t2.columns.map .name . should_equal ["x", "y", "z"]
|
||||
t2.row_count . should_equal 0
|
||||
t2.at "x" . to_vector . should_equal []
|
||||
|
||||
group_builder.specify "should raise an informative error when loading an empty file" <|
|
||||
t = Data.read (enso_project.data / "empty.txt") (Delimited_Format.Delimited "," headers=True value_formatter=Nothing)
|
||||
t = Data.read (enso_project.data / "empty.txt") (..Delimited "," headers=True value_formatter=Nothing)
|
||||
t.should_fail_with Empty_File_Error
|
||||
|
||||
group_builder.specify "should correctly handle file opening issues" <|
|
||||
nonexistent_file = enso_project.data / "a_filename_that_does_not_exist.foobar"
|
||||
r1 = Data.read nonexistent_file (Delimited_Format.Delimited "," headers=True value_formatter=Nothing)
|
||||
r1 = Data.read nonexistent_file (..Delimited "," headers=True value_formatter=Nothing)
|
||||
r1.should_fail_with File_Error
|
||||
r1.catch.should_be_a File_Error.Not_Found
|
||||
|
||||
directory = enso_project.data
|
||||
r2 = Data.read directory (Delimited_Format.Delimited "," headers=True value_formatter=Nothing) Problem_Behavior.Report_Error
|
||||
r2 = Data.read directory (..Delimited "," headers=True value_formatter=Nothing) Problem_Behavior.Report_Error
|
||||
r2.should_fail_with File_Error
|
||||
r2.catch.should_be_a File_Error.IO_Error
|
||||
|
||||
@ -108,7 +108,7 @@ add_specs suite_builder =
|
||||
text.write (path name)
|
||||
|
||||
test_file name =
|
||||
table = Data.read (path name) (Delimited_Format.Delimited "," headers=True value_formatter=Nothing) Problem_Behavior.Report_Error
|
||||
table = Data.read (path name) (..Delimited "," headers=True value_formatter=Nothing) Problem_Behavior.Report_Error
|
||||
table.columns.map .name . should_equal ['a', 'b', 'c']
|
||||
table.at 'a' . to_vector . should_equal ['d', '1']
|
||||
table.at 'b' . to_vector . should_equal ['e', '2']
|
||||
@ -123,7 +123,7 @@ add_specs suite_builder =
|
||||
|
||||
# Currently mixed line endings are not supported.
|
||||
'a,b,c\nd,e,f\r1,2,3'.write (path 'mixed.csv')
|
||||
Data.read (path 'mixed.csv') (Delimited_Format.Delimited "," headers=True value_formatter=Nothing) Problem_Behavior.Report_Error . should_fail_with Invalid_Row
|
||||
Data.read (path 'mixed.csv') (..Delimited "," headers=True value_formatter=Nothing) Problem_Behavior.Report_Error . should_fail_with Invalid_Row
|
||||
|
||||
['crlf.csv', 'lf.csv', 'cr.csv', 'mixed.csv'].each (path >> .delete)
|
||||
|
||||
@ -155,14 +155,14 @@ add_specs suite_builder =
|
||||
file_2.delete
|
||||
|
||||
group_builder.specify "should work with Windows-1252 encoding" <|
|
||||
table = Data.read (enso_project.data / "windows.csv") (Delimited_Format.Delimited "," headers=True encoding=Encoding.windows_1252) Problem_Behavior.Report_Error
|
||||
table = Data.read (enso_project.data / "windows.csv") (..Delimited "," headers=True encoding=Encoding.windows_1252) Problem_Behavior.Report_Error
|
||||
table.columns.map .name . should_equal ['a', 'b', 'c']
|
||||
table.at 'a' . to_vector . should_equal ['$¢']
|
||||
table.at 'b' . to_vector . should_equal ['¤']
|
||||
table.at 'c' . to_vector . should_equal ['¥']
|
||||
|
||||
group_builder.specify "should work with UTF-16 encoding" <|
|
||||
table = Data.read (enso_project.data / "utf16.csv") (Delimited_Format.Delimited "," headers=True encoding=Encoding.utf_16_be) Problem_Behavior.Report_Error
|
||||
table = Data.read (enso_project.data / "utf16.csv") (..Delimited "," headers=True encoding=Encoding.utf_16_be) Problem_Behavior.Report_Error
|
||||
table.columns.map .name . should_equal ['ą', '🚀b', 'ć😎']
|
||||
table.at 'ą' . to_vector . should_equal ['ą']
|
||||
table.at '🚀b' . to_vector . should_equal ['✨🚀🚧😍😃😍😎😙😉☺']
|
||||
@ -174,7 +174,7 @@ add_specs suite_builder =
|
||||
utf8_bytes.write_bytes utf8_file
|
||||
action_1 on_problems =
|
||||
# We need to set the encoding explicitly, as otherwise we'd just fallback to Windows-1252 and have no errors
|
||||
utf8_file.read (Delimited_Format.Delimited "," headers=True encoding=Encoding.utf_8) on_problems
|
||||
utf8_file.read (..Delimited "," headers=True encoding=Encoding.utf_8) on_problems
|
||||
tester_1 table =
|
||||
table.columns.map .name . should_equal ['a', 'b', 'c']
|
||||
table.at 'a' . to_vector . should_equal ['ą']
|
||||
@ -185,7 +185,7 @@ add_specs suite_builder =
|
||||
utf8_file.delete
|
||||
|
||||
action_2 on_problems =
|
||||
(enso_project.data / "utf16_invalid.csv").read (Delimited_Format.Delimited "," headers=True encoding=Encoding.utf_16_be) on_problems
|
||||
(enso_project.data / "utf16_invalid.csv").read (..Delimited "," headers=True encoding=Encoding.utf_16_be) on_problems
|
||||
tester_2 table =
|
||||
table.columns.map .name . should_equal ['a', 'b', 'c']
|
||||
# This column does not raise a problem - the '\uFFFD' is simply present in the input file.
|
||||
@ -197,7 +197,7 @@ add_specs suite_builder =
|
||||
Problems.test_problem_handling action_2 problems_2 tester_2
|
||||
|
||||
group_builder.specify "should handle duplicated columns" <|
|
||||
action on_problems = Data.read (enso_project.data / "duplicated_columns.csv") (Delimited_Format.Delimited "," headers=True value_formatter=Nothing) on_problems
|
||||
action on_problems = Data.read (enso_project.data / "duplicated_columns.csv") (..Delimited "," headers=True value_formatter=Nothing) on_problems
|
||||
tester table =
|
||||
table.columns.map .name . should_equal ['a', 'b', 'c', 'a 1']
|
||||
table.at 'a' . to_vector . should_equal ['1']
|
||||
@ -206,7 +206,7 @@ add_specs suite_builder =
|
||||
Problems.test_problem_handling action problems tester
|
||||
|
||||
group_builder.specify "should handle quotes" <|
|
||||
t1 = Data.read (enso_project.data / "double_quoted.csv") (Delimited_Format.Delimited "," headers=True value_formatter=Nothing)
|
||||
t1 = Data.read (enso_project.data / "double_quoted.csv") (..Delimited "," headers=True value_formatter=Nothing)
|
||||
t1.at 'a' . to_vector . should_equal ['a, x', '"a']
|
||||
t1.at 'c' . to_vector . should_equal ['3', '"']
|
||||
|
||||
@ -219,7 +219,7 @@ add_specs suite_builder =
|
||||
t3.at 'c' . to_vector . should_equal ['a']
|
||||
|
||||
group_builder.specify "should support rows spanning multiple lines if quoted" <|
|
||||
t1 = Data.read (enso_project.data / "multiline_quoted.csv") (Delimited_Format.Delimited "," headers=True value_formatter=Nothing)
|
||||
t1 = Data.read (enso_project.data / "multiline_quoted.csv") (..Delimited "," headers=True value_formatter=Nothing)
|
||||
t1.at 'a' . to_vector . should_equal ['1', '4']
|
||||
t1.at 'b' . to_vector . should_equal ['start\n\ncontinue', '5']
|
||||
t1.at 'c' . to_vector . should_equal ['3', '6']
|
||||
@ -264,13 +264,13 @@ add_specs suite_builder =
|
||||
f6.delete
|
||||
|
||||
group_builder.specify "should handle quotes if they are opened in the middle of an unquoted cell in a sane way" pending="ToDo: To be fixed in https://github.com/enso-org/enso/issues/5839" <|
|
||||
t1 = Data.read (enso_project.data / "mismatched_quote_at_end.csv") (Delimited_Format.Delimited "," headers=True value_formatter=Nothing)
|
||||
t1 = Data.read (enso_project.data / "mismatched_quote_at_end.csv") (..Delimited "," headers=True value_formatter=Nothing)
|
||||
t1.column_names . should_equal ["a", "b", "c"]
|
||||
t1.at 'a' . to_vector . should_equal ['1', 'abc', '7']
|
||||
t1.at 'b' . to_vector . should_equal ['2', 'def', '8']
|
||||
t1.at 'c' . to_vector . should_equal ['3', 'g h i"', '9']
|
||||
|
||||
t2 = Data.read (enso_project.data / "weird_quoting_stuff.csv") (Delimited_Format.Delimited "," headers=True value_formatter=Nothing)
|
||||
t2 = Data.read (enso_project.data / "weird_quoting_stuff.csv") (..Delimited "," headers=True value_formatter=Nothing)
|
||||
t2.column_names . should_equal ["A", "B", "C", "D", "E"]
|
||||
t2.at 'A' . to_vector . should_equal ['Te,s"t', 'Te,s"t']
|
||||
t2.at 'B' . to_vector . should_equal ['An " other', 'An " other"']
|
||||
@ -278,14 +278,14 @@ add_specs suite_builder =
|
||||
t2.at 'D' . to_vector . should_equal ['This is an escaped quote \\"', 'This is an escaped quote \\"']
|
||||
t2.at 'E' . to_vector . should_equal ['Excel escapes "" with 8 quotes """"""', 'Excel escapes "" with 8 quotes """"""']
|
||||
|
||||
t3 = Data.read (enso_project.data / "weird_quoting_stuff2.csv") (Delimited_Format.Delimited "," headers=True value_formatter=Nothing)
|
||||
t3 = Data.read (enso_project.data / "weird_quoting_stuff2.csv") (..Delimited "," headers=True value_formatter=Nothing)
|
||||
IO.println t3
|
||||
t3.column_names.should_equal ["A", "B", "C"]
|
||||
t3.print
|
||||
|
||||
group_builder.specify "should handle too long and too short rows" <|
|
||||
action keep_invalid_rows on_problems =
|
||||
Data.read (enso_project.data / "varying_rows.csv") (Delimited_Format.Delimited "," headers=True keep_invalid_rows=keep_invalid_rows value_formatter=Nothing) on_problems=on_problems
|
||||
Data.read (enso_project.data / "varying_rows.csv") (..Delimited "," headers=True keep_invalid_rows=keep_invalid_rows value_formatter=Nothing) on_problems=on_problems
|
||||
|
||||
tester_kept table =
|
||||
table.columns.map .name . should_equal ['a', 'b', 'c']
|
||||
@ -303,7 +303,7 @@ add_specs suite_builder =
|
||||
problems_dropped = [Invalid_Row.Error 2 Nothing ['1', '2', '3', '4'] 3, Invalid_Row.Error 4 Nothing ['1', '2'] 3, Invalid_Row.Error 5 Nothing [Nothing] 3, Invalid_Row.Error 6 Nothing ['1'] 3, Invalid_Row.Error 7 Nothing ['1', '2', '3', '4', '5', '6', '7', '8'] 3]
|
||||
Problems.test_problem_handling (action keep_invalid_rows=False) problems_dropped tester_dropped
|
||||
|
||||
r2 = Data.read (enso_project.data / "varying_rows2.csv") (Delimited_Format.Delimited "," headers=True keep_invalid_rows=False value_formatter=Nothing)
|
||||
r2 = Data.read (enso_project.data / "varying_rows2.csv") (..Delimited "," headers=True keep_invalid_rows=False value_formatter=Nothing)
|
||||
r2.column_names . should_equal ['a', 'b', 'c']
|
||||
Problems.expect_only_warning (Invalid_Row.Error 3 Nothing ['0', '0', '0', '10'] 3) r2
|
||||
warning2 = Problems.get_attached_warnings r2 . first
|
||||
@ -312,7 +312,7 @@ add_specs suite_builder =
|
||||
r2.at 'b' . to_vector . should_equal ['2', '5']
|
||||
r2.at 'c' . to_vector . should_equal ['3', '6']
|
||||
|
||||
r3 = Data.read (enso_project.data / "varying_rows3.csv") (Delimited_Format.Delimited "," headers=True keep_invalid_rows=True value_formatter=Nothing)
|
||||
r3 = Data.read (enso_project.data / "varying_rows3.csv") (..Delimited "," headers=True keep_invalid_rows=True value_formatter=Nothing)
|
||||
r3.column_names . should_equal ['a', 'b', 'c']
|
||||
Problems.expect_only_warning (Invalid_Row.Error 3 1 ['0', '0'] 3) r3
|
||||
warning3 = Problems.get_attached_warnings r3 . first
|
||||
@ -323,7 +323,7 @@ add_specs suite_builder =
|
||||
|
||||
group_builder.specify "should aggregate invalid rows over some limit" <|
|
||||
action on_problems =
|
||||
Data.read (enso_project.data / "many_invalid_rows.csv") (Delimited_Format.Delimited "," headers=True keep_invalid_rows=False value_formatter=Nothing) on_problems
|
||||
Data.read (enso_project.data / "many_invalid_rows.csv") (..Delimited "," headers=True keep_invalid_rows=False value_formatter=Nothing) on_problems
|
||||
|
||||
tester table =
|
||||
table.columns.map .name . should_equal ['a', 'b', 'c']
|
||||
@ -334,33 +334,33 @@ add_specs suite_builder =
|
||||
Problems.test_problem_handling action problems tester
|
||||
|
||||
group_builder.specify "should allow to skip rows" <|
|
||||
t1 = Data.read (enso_project.data / "simple_empty.csv") (Delimited_Format.Delimited "," headers=False skip_rows=3 value_formatter=Nothing)
|
||||
t1 = Data.read (enso_project.data / "simple_empty.csv") (..Delimited "," headers=False skip_rows=3 value_formatter=Nothing)
|
||||
t1.at "Column 1" . to_vector . should_equal ['7', '10']
|
||||
|
||||
t2 = Data.read (enso_project.data / "simple_empty.csv") (Delimited_Format.Delimited "," headers=True skip_rows=3 value_formatter=Nothing)
|
||||
t2 = Data.read (enso_project.data / "simple_empty.csv") (..Delimited "," headers=True skip_rows=3 value_formatter=Nothing)
|
||||
t2.columns.map .name . should_equal ['7', '8', '9']
|
||||
t2.at "7" . to_vector . should_equal ['10']
|
||||
|
||||
group_builder.specify "should allow to set a limit of rows to read" <|
|
||||
t1 = Data.read (enso_project.data / "simple_empty.csv") (Delimited_Format.Delimited "," headers=False row_limit=2 value_formatter=Nothing)
|
||||
t1 = Data.read (enso_project.data / "simple_empty.csv") (..Delimited "," headers=False row_limit=2 value_formatter=Nothing)
|
||||
t1.at "Column 1" . to_vector . should_equal ['a', '1']
|
||||
|
||||
t2 = Data.read (enso_project.data / "simple_empty.csv") (Delimited_Format.Delimited "," headers=True row_limit=2 value_formatter=Nothing)
|
||||
t2 = Data.read (enso_project.data / "simple_empty.csv") (..Delimited "," headers=True row_limit=2 value_formatter=Nothing)
|
||||
t2.at "a" . to_vector . should_equal ['1', '4']
|
||||
|
||||
t3 = Data.read (enso_project.data / "simple_empty.csv") (Delimited_Format.Delimited "," headers=False skip_rows=3 row_limit=1 value_formatter=Nothing)
|
||||
t3 = Data.read (enso_project.data / "simple_empty.csv") (..Delimited "," headers=False skip_rows=3 row_limit=1 value_formatter=Nothing)
|
||||
t3.at "Column 1" . to_vector . should_equal ['7']
|
||||
|
||||
t4 = Data.read (enso_project.data / "simple_empty.csv") (Delimited_Format.Delimited "," headers=False row_limit=0 value_formatter=Nothing)
|
||||
t4 = Data.read (enso_project.data / "simple_empty.csv") (..Delimited "," headers=False row_limit=0 value_formatter=Nothing)
|
||||
t4.columns.map .name . should_equal ['Column 1', 'Column 2', 'Column 3']
|
||||
t4.row_count . should_equal 0
|
||||
|
||||
t5 = Data.read (enso_project.data / "simple_empty.csv") (Delimited_Format.Delimited "," headers=True row_limit=0 value_formatter=Nothing)
|
||||
t5 = Data.read (enso_project.data / "simple_empty.csv") (..Delimited "," headers=True row_limit=0 value_formatter=Nothing)
|
||||
t5.columns.map .name . should_equal ['a', 'b', 'c']
|
||||
t5.at 'a' . to_vector . should_equal []
|
||||
t5.row_count . should_equal 0
|
||||
|
||||
t6 = Data.read (enso_project.data / "simple_empty.csv") (Delimited_Format.Delimited "," headers=False skip_rows=3 row_limit=1000 value_formatter=Nothing)
|
||||
t6 = Data.read (enso_project.data / "simple_empty.csv") (..Delimited "," headers=False skip_rows=3 row_limit=1000 value_formatter=Nothing)
|
||||
t6.at "Column 1" . to_vector . should_equal ['7', '10']
|
||||
|
||||
group_builder.specify "should check arguments" <|
|
||||
@ -369,10 +369,10 @@ add_specs suite_builder =
|
||||
path.read (Delimited_Format.Delimited "," headers=False . with_quotes quote='abc') pb . should_fail_with Illegal_Argument
|
||||
path.read (Delimited_Format.Delimited "," headers=False . with_quotes quote='🚧') pb . should_fail_with Illegal_Argument
|
||||
path.read (Delimited_Format.Delimited "," headers=False . with_quotes quote_escape='//') pb . should_fail_with Illegal_Argument
|
||||
path.read (Delimited_Format.Delimited 'a\u{301}' headers=False) pb . should_fail_with Illegal_Argument
|
||||
path.read (..Delimited 'a\u{301}' headers=False) pb . should_fail_with Illegal_Argument
|
||||
|
||||
group_builder.specify "should correctly guess column types" <|
|
||||
t = (enso_project.data / "data_small.csv") . read (Delimited_Format.Delimited "," headers=True)
|
||||
t = (enso_project.data / "data_small.csv") . read (..Delimited "," headers=True)
|
||||
t.at "Code" . to_vector . should_equal ["gxl", "wca", "nfw", "der"]
|
||||
t.at "Index" . to_vector . should_equal [7, 0, 1, 7]
|
||||
t.at "Flag" . to_vector . should_equal [True, False, True, True]
|
||||
@ -384,7 +384,7 @@ add_specs suite_builder =
|
||||
t.at "QuotedNumbers" . to_vector . should_equal ["1", "2", Nothing, "34"]
|
||||
t.at "Mixed Types" . to_vector . should_equal ["33", Nothing, "45", "True"]
|
||||
|
||||
t2 = (enso_project.data / "data_small.csv") . read (Delimited_Format.Delimited "," headers=True value_formatter=(Data_Formatter.Value allow_leading_zeros=True))
|
||||
t2 = (enso_project.data / "data_small.csv") . read (..Delimited "," headers=True value_formatter=(Data_Formatter.Value allow_leading_zeros=True))
|
||||
t2.at "Leading0s" . to_vector . should_equal [1, 2, 123, Nothing]
|
||||
|
||||
group_builder.specify "should be able to detect types automatically" <|
|
||||
@ -399,7 +399,7 @@ add_specs suite_builder =
|
||||
t2.columns.map .name . should_equal ["a", "b", "c"]
|
||||
|
||||
group_builder.specify "should be able to read in a file without splitting it to columns" <|
|
||||
t1 = (enso_project.data / "data_small.csv") . read (Delimited_Format.Delimited "" headers=False)
|
||||
t1 = (enso_project.data / "data_small.csv") . read (..Delimited "" headers=False)
|
||||
expected = ['Code,Index,Flag,Value,ValueWithNothing,TextWithNothing,"Hexadecimal",Leading0s,QuotedNumbers,"Mixed Types"']
|
||||
+ ['gxl,7,True,38.76109,63.13, pq6igd2wyd ,4DD4675B,001,"1","33"']
|
||||
+ ['wca,0,False,-66.77495,31," 2pr4102wc4 ",,002,"2",']
|
||||
@ -412,7 +412,7 @@ add_specs suite_builder =
|
||||
a,b,c
|
||||
1,2,3
|
||||
4,5,6
|
||||
t1 = Table.from text1 (format = Delimited_Format.Delimited ",")
|
||||
t1 = Table.from text1 (format = ..Delimited ",")
|
||||
t1.columns.map .name . should_equal ["a", "b", "c"]
|
||||
t1.at "a" . to_vector . should_equal [1, 4]
|
||||
t1.at "b" . to_vector . should_equal [2, 5]
|
||||
@ -500,7 +500,7 @@ add_specs suite_builder =
|
||||
f = File.create_temporary_file "delimited-utf-16-inverted-bom" ".csv"
|
||||
bytes.write_bytes f . should_succeed
|
||||
# We choose the correct encoding for the rest of the file, only BOM is bad
|
||||
r = f.read (Delimited_Format.Delimited "," encoding=Encoding.utf_16_be)
|
||||
r = f.read (..Delimited "," encoding=Encoding.utf_16_be)
|
||||
w = Problems.expect_only_warning Encoding_Error r
|
||||
w.to_display_text . should_contain "BOM"
|
||||
# The first column name now contains this invalid character, because it wasn't a BOM
|
||||
@ -514,10 +514,10 @@ add_specs suite_builder =
|
||||
group_builder.specify "should gracefully handle malformed data edge cases: 1 byte file with multi-byte encoding (UTF-8)" <|
|
||||
f = File.create_temporary_file "delimited-malformed-utf-16" ".csv"
|
||||
[-1].write_bytes f . should_succeed
|
||||
error = f.read (Delimited_Format.Delimited "," encoding=Encoding.utf_16_be) on_problems=..Report_Error
|
||||
error = f.read (..Delimited "," encoding=Encoding.utf_16_be) on_problems=..Report_Error
|
||||
error.should_fail_with Encoding_Error
|
||||
|
||||
r = f.read (Delimited_Format.Delimited "," encoding=Encoding.utf_16_be)
|
||||
r = f.read (..Delimited "," encoding=Encoding.utf_16_be)
|
||||
r.should_be_a Table
|
||||
r.column_names . should_equal ["Column 1"]
|
||||
r.first_column.to_vector . should_equal ['\uFFFD']
|
||||
|
@ -45,7 +45,7 @@ add_specs suite_builder =
|
||||
style=setting.first
|
||||
separator=setting.second
|
||||
file = (enso_project.data / "transient" / "endings.csv")
|
||||
table.write file (Delimited_Format.Delimited ',' line_endings=style) on_problems=Report_Error . should_succeed
|
||||
table.write file (..Delimited ',' line_endings=style) on_problems=Report_Error . should_succeed
|
||||
text = Data.read_text file
|
||||
text.should_equal (lines.join separator suffix=separator)
|
||||
file.delete
|
||||
@ -55,7 +55,7 @@ add_specs suite_builder =
|
||||
table = Table.new [['The Column "Name"', ["foo","'bar'",'"baz"', 'one, two, three', 'a\nb']], ["Hello, Column?", [1.0, 1000000.5, 2.2, -1.5, 0.0]]]
|
||||
file = (enso_project.data / "transient" / "quotes1.csv")
|
||||
file.delete_if_exists
|
||||
table.write file (Delimited_Format.Delimited "," value_formatter=data_formatter) on_problems=Report_Error . should_succeed
|
||||
table.write file (..Delimited "," value_formatter=data_formatter) on_problems=Report_Error . should_succeed
|
||||
expected_text = normalize_lines <| """
|
||||
"The Column ""Name""","Hello, Column?"
|
||||
foo,"1,0"
|
||||
@ -187,7 +187,7 @@ add_specs suite_builder =
|
||||
table = Table.new [["ąęćś", [0]], ["ß", ["żółw 🐢"]]]
|
||||
file = (enso_project.data / "transient" / "utf16.csv")
|
||||
file.delete_if_exists
|
||||
table.write file (Delimited_Format.Delimited "," encoding=Encoding.utf_16_be) on_problems=Report_Error . should_succeed
|
||||
table.write file (..Delimited "," encoding=Encoding.utf_16_be) on_problems=Report_Error . should_succeed
|
||||
expected_text = normalize_lines <| """
|
||||
ąęćś,ß
|
||||
0,żółw 🐢
|
||||
@ -199,7 +199,7 @@ add_specs suite_builder =
|
||||
table = Table.new [["A", [0, 1]], ["B", ["słówka", "🐢"]]]
|
||||
file = (enso_project.data / "transient" / "ascii.csv")
|
||||
file.delete_if_exists
|
||||
result = table.write file (Delimited_Format.Delimited "," encoding=Encoding.ascii)
|
||||
result = table.write file (..Delimited "," encoding=Encoding.ascii)
|
||||
expected_text = normalize_lines <| """
|
||||
A,B
|
||||
0,s??wka
|
||||
@ -382,7 +382,7 @@ add_specs suite_builder =
|
||||
style=setting.first
|
||||
separator=setting.second
|
||||
file = (enso_project.data / "transient" / "endings.csv")
|
||||
initial_table.write file (Delimited_Format.Delimited ',' line_endings=style) on_problems=Report_Error . should_succeed
|
||||
initial_table.write file (..Delimited ',' line_endings=style) on_problems=Report_Error . should_succeed
|
||||
table_to_append.write file on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed
|
||||
text = Data.read_text file
|
||||
text.should_equal (expected_lines.join separator suffix=separator)
|
||||
@ -541,8 +541,8 @@ add_specs suite_builder =
|
||||
table_to_append = Table.new [["a", ["x", "y"]]]
|
||||
file = (enso_project.data / "transient" / "endings_mismatch.csv")
|
||||
file.delete_if_exists
|
||||
initial_table.write file (Delimited_Format.Delimited ',' line_endings=Line_Ending_Style.Mac_Legacy)
|
||||
result = table_to_append.write file (Delimited_Format.Delimited ',' line_endings=Line_Ending_Style.Unix) on_existing_file=Existing_File_Behavior.Append match_columns=Match_Columns.By_Position
|
||||
initial_table.write file (..Delimited ',' line_endings=Line_Ending_Style.Mac_Legacy)
|
||||
result = table_to_append.write file (..Delimited ',' line_endings=Line_Ending_Style.Unix) on_existing_file=Existing_File_Behavior.Append match_columns=Match_Columns.By_Position
|
||||
result . should_fail_with Illegal_Argument
|
||||
result.catch.message . should_equal "The explicitly provided line endings ('\n') do not match the line endings in the file ('\r')."
|
||||
file.delete
|
||||
@ -603,10 +603,10 @@ add_specs suite_builder =
|
||||
|
||||
t1 = Table.new [["X", [1, 2, 3]]]
|
||||
[Existing_File_Behavior.Backup, Existing_File_Behavior.Overwrite, Existing_File_Behavior.Append].each behavior->
|
||||
r1 = t1.write f (Delimited_Format.Delimited ',') on_existing_file=behavior
|
||||
r1 = t1.write f (..Delimited ',') on_existing_file=behavior
|
||||
r1.should_fail_with File_Error
|
||||
r1.catch.should_be_a File_Error.Access_Denied
|
||||
f.read Plain_Text . should_equal initial_data
|
||||
f.read ..Plain_Text . should_equal initial_data
|
||||
|
||||
set_writable f True
|
||||
f.delete
|
||||
@ -617,7 +617,7 @@ add_specs suite_builder =
|
||||
|
||||
f = parent / "foo.csv"
|
||||
t1 = Table.new [["X", [1, 2, 3]]]
|
||||
r1 = t1.write f (Delimited_Format.Delimited ',')
|
||||
r1 = t1.write f (..Delimited ',')
|
||||
r1.should_fail_with File_Error
|
||||
r1.catch.should_be_a File_Error.Not_Found
|
||||
|
||||
@ -644,7 +644,7 @@ add_specs suite_builder =
|
||||
r2 = big_table.write f format on_problems=Problem_Behavior.Report_Error
|
||||
r2.should_fail_with Encoding_Error
|
||||
r2.catch.to_display_text . should_contain "Encoding issues"
|
||||
f.read Plain_Text . should_equal "Initial Content"
|
||||
f.read ..Plain_Text . should_equal "Initial Content"
|
||||
f.delete
|
||||
|
||||
main filter=Nothing =
|
||||
|
@ -1,5 +1,6 @@
|
||||
from Standard.Base import all
|
||||
import Standard.Base.Errors.Common.Dry_Run_Operation
|
||||
import Standard.Base.Errors.Common.Missing_Argument
|
||||
import Standard.Base.Errors.File_Error.File_Error
|
||||
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
|
||||
import Standard.Base.Errors.Illegal_State.Illegal_State
|
||||
@ -27,14 +28,14 @@ spec_fmt suite_builder header file read_method sheet_count=5 =
|
||||
wb.sheet_count . should_equal sheet_count
|
||||
|
||||
group_builder.specify "should read the specified sheet by index and use correct headers" <|
|
||||
t = read_method file (Excel_Format.Sheet 1)
|
||||
t = read_method file (..Sheet 1)
|
||||
t.columns.map .name . should_equal ['Name', 'Quantity', 'Price']
|
||||
t.at 'Name' . to_vector . should_equal ['blouse', 't-shirt', 'trousers', 'shoes', 'skirt', 'dress']
|
||||
t.at 'Quantity' . to_vector . should_equal [10, 20, Nothing, 30, Nothing, 5]
|
||||
t.at 'Price' . to_vector . should_equal [22.3, 32, 43.2, 54, 31, Nothing]
|
||||
|
||||
group_builder.specify "should read the specified sheet by index and properly format a table" <|
|
||||
t = read_method file (Excel_Format.Sheet 2 headers=False)
|
||||
t = read_method file (..Sheet 2 headers=False)
|
||||
t.columns.map .name . should_equal ['A', 'B', 'C', 'D', 'E']
|
||||
t.at 'A' . to_vector . should_equal [Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing]
|
||||
t.at 'B' . to_vector . should_equal [Nothing, Nothing, 10, Nothing, Nothing, Nothing, Nothing]
|
||||
@ -43,32 +44,32 @@ spec_fmt suite_builder header file read_method sheet_count=5 =
|
||||
t.at 'E' . to_vector . should_equal [Nothing, Nothing, Nothing, Nothing, Nothing, 'foo', Nothing]
|
||||
|
||||
group_builder.specify "should read the specified sheet by name and properly handle dates" <|
|
||||
t = read_method file (Excel_Format.Sheet 'Dates')
|
||||
t = read_method file (..Sheet 'Dates')
|
||||
t.columns.map .name . should_equal ['Student Name', 'Enrolment Date']
|
||||
t.at 'Enrolment Date' . map .day . to_vector . should_equal [2, 26, 4, 24, 31, 7]
|
||||
|
||||
group_builder.specify "should give an informative error when reading an empty table" <|
|
||||
t = read_method file (Excel_Format.Sheet "Empty")
|
||||
t = read_method file (..Sheet "Empty")
|
||||
t.should_fail_with Empty_Sheet
|
||||
|
||||
group_builder.specify "should gracefully handle duplicate column names and formulas" <|
|
||||
t = read_method file (Excel_Format.Sheet "Duplicate Columns")
|
||||
t = read_method file (..Sheet "Duplicate Columns")
|
||||
t.columns.map .name . should_equal ['Item', 'Price', 'Quantity', 'Price 1']
|
||||
t.at 'Price 1' . to_vector . should_equal [20, 40, 0, 60, 0, 10]
|
||||
|
||||
group_builder.specify "should allow reading with cell range specified" <|
|
||||
t_1 = read_method file (Excel_Format.Range "Simple!B:C")
|
||||
t_1 = read_method file (..Range "Simple!B:C")
|
||||
t_1.columns.map .name . should_equal ['Quantity', 'Price']
|
||||
t_1.at 'Quantity' . to_vector . should_equal [10, 20, Nothing, 30, Nothing, 5]
|
||||
t_1.at 'Price' . to_vector . should_equal [22.3, 32, 43.2, 54, 31, Nothing]
|
||||
|
||||
t_2 = read_method file (Excel_Format.Range "Simple!3:5" headers=False)
|
||||
t_2 = read_method file (..Range "Simple!3:5" headers=False)
|
||||
t_2.column_count.should_equal 3
|
||||
t_2.at 'A' . to_vector . should_equal ['t-shirt', 'trousers', 'shoes']
|
||||
t_2.at 'B' . to_vector . should_equal [20, Nothing, 30]
|
||||
t_2.at 'C' . to_vector . should_equal [32, 43.2, 54]
|
||||
|
||||
t_3 = read_method file (Excel_Format.Range "Simple!B4:C5" headers=False)
|
||||
t_3 = read_method file (..Range "Simple!B4:C5" headers=False)
|
||||
t_3.column_count.should_equal 2
|
||||
t_3.at 'B' . to_vector . should_equal [Nothing, 30]
|
||||
t_3.at 'C' . to_vector . should_equal [43.2, 54]
|
||||
@ -161,8 +162,8 @@ spec_write suite_builder suffix test_sheet_name =
|
||||
alltypes = enso_project.data / "transient" / "alltypes."+suffix
|
||||
alltypes.delete_if_exists . should_succeed
|
||||
t1 = enso_project.data/'all_data_types.csv' . read
|
||||
t1.write alltypes (Excel_Format.Sheet "AllTypes") . should_succeed
|
||||
t2 = alltypes.read (Excel_Format.Sheet "AllTypes")
|
||||
t1.write alltypes (..Sheet "AllTypes") . should_succeed
|
||||
t2 = alltypes.read (..Sheet "AllTypes")
|
||||
t2.should_equal t1
|
||||
|
||||
|
||||
@ -188,24 +189,24 @@ spec_write suite_builder suffix test_sheet_name =
|
||||
group_builder.specify 'should write a table to existing file in overwrite mode as a new sheet with headers' <|
|
||||
out = data.create_out
|
||||
(enso_project.data / test_sheet_name) . copy_to out
|
||||
data.table.write out (Excel_Format.Sheet "Another") on_existing_file=Existing_File_Behavior.Overwrite on_problems=Report_Error . should_succeed
|
||||
written = out.read (Excel_Format.Sheet "Another")
|
||||
data.table.write out (..Sheet "Another") on_existing_file=Existing_File_Behavior.Overwrite on_problems=Report_Error . should_succeed
|
||||
written = out.read (..Sheet "Another")
|
||||
written.should_equal data.table
|
||||
|
||||
group_builder.specify 'should write a table to existing file in overwrite mode as a new sheet without headers' <|
|
||||
out = data.create_out
|
||||
(enso_project.data / test_sheet_name) . copy_to out
|
||||
data.table.write out (Excel_Format.Sheet "NoHeaders") on_existing_file=Existing_File_Behavior.Overwrite on_problems=Report_Error . should_succeed
|
||||
written = out.read (Excel_Format.Sheet "NoHeaders")
|
||||
data.table.write out (..Sheet "NoHeaders") on_existing_file=Existing_File_Behavior.Overwrite on_problems=Report_Error . should_succeed
|
||||
written = out.read (..Sheet "NoHeaders")
|
||||
written.should_equal (data.table.rename_columns ['A', 'B', 'C', 'D', 'E', 'F'])
|
||||
|
||||
group_builder.specify 'should create new sheets at the start if index is 0' <|
|
||||
out = data.create_out
|
||||
data.table.write out (Excel_Format.Sheet 0) on_problems=Report_Error . should_succeed
|
||||
data.clothes.write out (Excel_Format.Sheet 0) on_problems=Report_Error . should_succeed
|
||||
read_1 = out.read (Excel_Format.Sheet "Sheet1")
|
||||
data.table.write out (..Sheet 0) on_problems=Report_Error . should_succeed
|
||||
data.clothes.write out (..Sheet 0) on_problems=Report_Error . should_succeed
|
||||
read_1 = out.read (..Sheet "Sheet1")
|
||||
read_1 . should_equal data.table
|
||||
read_2 = out.read (Excel_Format.Sheet "Sheet2")
|
||||
read_2 = out.read (..Sheet "Sheet2")
|
||||
read_2 . should_equal data.clothes
|
||||
written = out.read
|
||||
read_3 = written.sheet_names
|
||||
@ -215,47 +216,47 @@ spec_write suite_builder suffix test_sheet_name =
|
||||
group_builder.specify 'should write a table to specific single cell location of an existing sheet' <|
|
||||
out = data.create_out
|
||||
(enso_project.data / test_sheet_name) . copy_to out
|
||||
data.table.write out (Excel_Format.Range "Another!G1") on_problems=Report_Error . should_succeed
|
||||
written = out.read (Excel_Format.Range "Another!G1")
|
||||
data.table.write out (..Range "Another!G1") on_problems=Report_Error . should_succeed
|
||||
written = out.read (..Range "Another!G1")
|
||||
written.should_equal data.table
|
||||
|
||||
group_builder.specify 'should clear out an existing fixed range and replace' <|
|
||||
out = data.create_out
|
||||
(enso_project.data / test_sheet_name) . copy_to out
|
||||
data.sub_clothes.write out (Excel_Format.Range "Another!A1:D20") on_problems=Report_Error . should_succeed
|
||||
written = out.read (Excel_Format.Range "Another!A1")
|
||||
data.sub_clothes.write out (..Range "Another!A1:D20") on_problems=Report_Error . should_succeed
|
||||
written = out.read (..Range "Another!A1")
|
||||
written.should_equal data.sub_clothes
|
||||
|
||||
group_builder.specify 'should clear out an existing range and replace' <|
|
||||
out = data.create_out
|
||||
(enso_project.data / test_sheet_name) . copy_to out
|
||||
data.sub_clothes.write out (Excel_Format.Range "Another!A1") on_problems=Report_Error . should_succeed
|
||||
written = out.read (Excel_Format.Range "Another!A1")
|
||||
data.sub_clothes.write out (..Range "Another!A1") on_problems=Report_Error . should_succeed
|
||||
written = out.read (..Range "Another!A1")
|
||||
written.should_equal data.sub_clothes
|
||||
|
||||
group_builder.specify 'should result in Invalid_Location error if trying to write in a bad location' <|
|
||||
out = data.create_out
|
||||
(enso_project.data / test_sheet_name) . copy_to out
|
||||
data.sub_clothes.write out (Excel_Format.Range "DoesNotExist!A1") . should_fail_with Invalid_Location
|
||||
data.sub_clothes.write out (Excel_Format.Range "DoesNotExist!A1:B2") . should_fail_with Invalid_Location
|
||||
data.sub_clothes.write out (Excel_Format.Range "SillyRangeName") . should_fail_with Invalid_Location
|
||||
data.sub_clothes.write out (..Range "DoesNotExist!A1") . should_fail_with Invalid_Location
|
||||
data.sub_clothes.write out (..Range "DoesNotExist!A1:B2") . should_fail_with Invalid_Location
|
||||
data.sub_clothes.write out (..Range "SillyRangeName") . should_fail_with Invalid_Location
|
||||
|
||||
group_builder.specify 'should result in Range_Exceeded error if trying to write in too small a range' <|
|
||||
out = data.create_out
|
||||
(enso_project.data / test_sheet_name) . copy_to out
|
||||
data.sub_clothes.write out (Excel_Format.Range "Another!A1:B2") . should_fail_with Range_Exceeded
|
||||
data.sub_clothes.write out (..Range "Another!A1:B2") . should_fail_with Range_Exceeded
|
||||
|
||||
group_builder.specify 'should result in Existing_Data error if in Error mode and trying to replace' <|
|
||||
out = data.create_out
|
||||
(enso_project.data / test_sheet_name) . copy_to out
|
||||
lmd = out.last_modified_time
|
||||
r1 = data.sub_clothes.write out (Excel_Format.Sheet 1) on_existing_file=Existing_File_Behavior.Error
|
||||
r1 = data.sub_clothes.write out (..Sheet 1) on_existing_file=Existing_File_Behavior.Error
|
||||
r1.should_fail_with File_Error
|
||||
r1.catch.should_be_a File_Error.Already_Exists
|
||||
|
||||
data.sub_clothes.write out (Excel_Format.Sheet "Another") on_existing_file=Existing_File_Behavior.Error . should_fail_with File_Error
|
||||
data.sub_clothes.write out (Excel_Format.Range "Another!A1") on_existing_file=Existing_File_Behavior.Error . should_fail_with File_Error
|
||||
data.sub_clothes.write out (Excel_Format.Range "Sheet1!A9") on_existing_file=Existing_File_Behavior.Error . should_fail_with File_Error
|
||||
data.sub_clothes.write out (..Sheet "Another") on_existing_file=Existing_File_Behavior.Error . should_fail_with File_Error
|
||||
data.sub_clothes.write out (..Range "Another!A1") on_existing_file=Existing_File_Behavior.Error . should_fail_with File_Error
|
||||
data.sub_clothes.write out (..Range "Sheet1!A9") on_existing_file=Existing_File_Behavior.Error . should_fail_with File_Error
|
||||
|
||||
Test.with_clue "the original file should remain unmodified: " <|
|
||||
out.last_modified_time.should_equal lmd
|
||||
@ -264,7 +265,7 @@ spec_write suite_builder suffix test_sheet_name =
|
||||
out = data.create_out
|
||||
(enso_project.data / test_sheet_name) . copy_to out
|
||||
lmd = out.last_modified_time
|
||||
result = data.sub_clothes.write out (Excel_Format.Sheet "Testing") on_existing_file=Existing_File_Behavior.Error
|
||||
result = data.sub_clothes.write out (..Sheet "Testing") on_existing_file=Existing_File_Behavior.Error
|
||||
result.should_fail_with File_Error
|
||||
result.catch.should_be_a File_Error.Already_Exists
|
||||
Test.with_clue "the original file should remain unmodified: " <|
|
||||
@ -272,7 +273,7 @@ spec_write suite_builder suffix test_sheet_name =
|
||||
|
||||
group_builder.specify 'should write a table to non-existent file as a new sheet without headers' <|
|
||||
out = data.create_out
|
||||
data.table.write out (Excel_Format.Sheet "Sheet1" headers=False) on_problems=Report_Error . should_succeed
|
||||
data.table.write out (..Sheet "Sheet1" headers=False) on_problems=Report_Error . should_succeed
|
||||
written = out.read
|
||||
written.sheet_count . should_equal 1
|
||||
written.sheet_names . should_equal ['Sheet1']
|
||||
@ -286,8 +287,8 @@ spec_write suite_builder suffix test_sheet_name =
|
||||
(enso_project.data / test_sheet_name) . copy_to out
|
||||
extra_another = Table.new [['AA', ['d', 'e']], ['BB',[4, 5]], ['CC',[True, False]], ['DD', ['2022-01-20', '2022-01-21']]]
|
||||
expected = Table.new [['AA', ['a','b','c','d', 'e']], ['BB',[1,2,3,4,5]], ['CC',[True, False, False, True, False]]]
|
||||
extra_another.write out (Excel_Format.Sheet "Another") on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed
|
||||
written = out.read (Excel_Format.Sheet "Another") . select_columns [0, 1, 2]
|
||||
extra_another.write out (..Sheet "Another") on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed
|
||||
written = out.read (..Sheet "Another") . select_columns [0, 1, 2]
|
||||
written.should_equal expected
|
||||
|
||||
group_builder.specify 'should be able to append to a sheet by position' <|
|
||||
@ -295,8 +296,8 @@ spec_write suite_builder suffix test_sheet_name =
|
||||
(enso_project.data / test_sheet_name) . copy_to out
|
||||
extra_another = Table.new [['A', ['d', 'e']], ['B',[4, 5]], ['C',[True, False]], ['D', ['2022-01-20', '2022-01-21']]]
|
||||
expected = Table.new [['AA', ['a','b','c','d', 'e']], ['BB',[1,2,3,4,5]], ['CC',[True, False, False, True, False]]]
|
||||
extra_another.write out (Excel_Format.Sheet "Another") on_existing_file=Existing_File_Behavior.Append match_columns=Match_Columns.By_Position on_problems=Report_Error . should_succeed
|
||||
written = out.read (Excel_Format.Sheet "Another") . select_columns [0, 1, 2]
|
||||
extra_another.write out (..Sheet "Another") on_existing_file=Existing_File_Behavior.Append match_columns=Match_Columns.By_Position on_problems=Report_Error . should_succeed
|
||||
written = out.read (..Sheet "Another") . select_columns [0, 1, 2]
|
||||
written.should_equal expected
|
||||
|
||||
group_builder.specify 'should be able to append to a sheet by name out of order' <|
|
||||
@ -304,8 +305,8 @@ spec_write suite_builder suffix test_sheet_name =
|
||||
(enso_project.data / test_sheet_name) . copy_to out
|
||||
extra_another = Table.new [['CC',[True, False]], ['BB',[4, 5]], ['AA', ['d', 'e']], ['DD', ['2022-01-20', '2022-01-21']]]
|
||||
expected = Table.new [['AA', ['a','b','c','d', 'e']], ['BB',[1,2,3,4,5]], ['CC',[True, False, False, True, False]]]
|
||||
extra_another.write out (Excel_Format.Sheet "Another") on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed
|
||||
written = out.read (Excel_Format.Sheet "Another") . select_columns [0, 1, 2]
|
||||
extra_another.write out (..Sheet "Another") on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed
|
||||
written = out.read (..Sheet "Another") . select_columns [0, 1, 2]
|
||||
written.should_equal expected
|
||||
|
||||
group_builder.specify 'should be able to append to a single cell by name' <|
|
||||
@ -313,8 +314,8 @@ spec_write suite_builder suffix test_sheet_name =
|
||||
(enso_project.data / test_sheet_name) . copy_to out
|
||||
extra_another = Table.new [['AA', ['d', 'e']], ['BB',[4, 5]], ['CC',[True, False]], ['DD', ['2022-01-20', '2022-01-21']]]
|
||||
expected = Table.new [['AA', ['a','b','c','d', 'e']], ['BB',[1,2,3,4,5]], ['CC',[True, False, False, True, False]]]
|
||||
extra_another.write out (Excel_Format.Range "Another!A1") on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed
|
||||
written = out.read (Excel_Format.Sheet "Another") . select_columns [0, 1, 2]
|
||||
extra_another.write out (..Range "Another!A1") on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed
|
||||
written = out.read (..Sheet "Another") . select_columns [0, 1, 2]
|
||||
written.should_equal expected
|
||||
|
||||
group_builder.specify 'should be able to append to a single cell by position' <|
|
||||
@ -322,8 +323,8 @@ spec_write suite_builder suffix test_sheet_name =
|
||||
(enso_project.data / test_sheet_name) . copy_to out
|
||||
extra_another = Table.new [['A', ['d', 'e']], ['B',[4, 5]], ['C',[True, False]], ['D', ['2022-01-20', '2022-01-21']]]
|
||||
expected = Table.new [['AA', ['a','b','c','d', 'e']], ['BB',[1,2,3,4,5]], ['CC',[True, False, False, True, False]]]
|
||||
extra_another.write out (Excel_Format.Range "Another!A1") on_existing_file=Existing_File_Behavior.Append match_columns=Match_Columns.By_Position on_problems=Report_Error . should_succeed
|
||||
written = out.read (Excel_Format.Sheet "Another") . select_columns [0, 1, 2]
|
||||
extra_another.write out (..Range "Another!A1") on_existing_file=Existing_File_Behavior.Append match_columns=Match_Columns.By_Position on_problems=Report_Error . should_succeed
|
||||
written = out.read (..Sheet "Another") . select_columns [0, 1, 2]
|
||||
written.should_equal expected
|
||||
|
||||
group_builder.specify 'should be able to append to a single cell by name out of order' <|
|
||||
@ -331,8 +332,8 @@ spec_write suite_builder suffix test_sheet_name =
|
||||
(enso_project.data / test_sheet_name) . copy_to out
|
||||
extra_another = Table.new [['CC',[True, False]], ['BB',[4, 5]], ['AA', ['d', 'e']], ['DD', ['2022-01-20', '2022-01-21']]]
|
||||
expected = Table.new [['AA', ['a','b','c','d', 'e']], ['BB',[1,2,3,4,5]], ['CC',[True, False, False, True, False]]]
|
||||
extra_another.write out (Excel_Format.Range "Another!A1") on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed
|
||||
written = out.read (Excel_Format.Sheet "Another") . select_columns [0, 1, 2]
|
||||
extra_another.write out (..Range "Another!A1") on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed
|
||||
written = out.read (..Sheet "Another") . select_columns [0, 1, 2]
|
||||
written.should_equal expected
|
||||
|
||||
group_builder.specify 'should be able to append to a range by name' <|
|
||||
@ -340,8 +341,8 @@ spec_write suite_builder suffix test_sheet_name =
|
||||
(enso_project.data / test_sheet_name) . copy_to out
|
||||
extra_another = Table.new [['AA', ['d', 'e']], ['BB', [4, 5]], ['CC', [True, False]], ['DD', ['2022-01-20', '2022-01-21']]]
|
||||
expected = Table.new [['AA', ['a', 'b', 'c', 'd', 'e']], ['BB', [1, 2, 3, 4, 5]], ['CC', [True, False, False, True, False]]]
|
||||
extra_another.write out (Excel_Format.Range "Another!A1:D6") on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed
|
||||
written = out.read (Excel_Format.Sheet "Another") . select_columns [0, 1, 2]
|
||||
extra_another.write out (..Range "Another!A1:D6") on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed
|
||||
written = out.read (..Sheet "Another") . select_columns [0, 1, 2]
|
||||
written.should_equal expected
|
||||
|
||||
group_builder.specify 'should be able to append to a range by position' <|
|
||||
@ -349,8 +350,8 @@ spec_write suite_builder suffix test_sheet_name =
|
||||
(enso_project.data / test_sheet_name) . copy_to out
|
||||
extra_another = Table.new [['A', ['d', 'e']], ['B',[4, 5]], ['C',[True, False]], ['D', ['2022-01-20', '2022-01-21']]]
|
||||
expected = Table.new [['AA', ['a','b','c','d', 'e']], ['BB',[1,2,3,4,5]], ['CC',[True, False, False, True, False]]]
|
||||
extra_another.write out (Excel_Format.Range "Another!A1:D6") on_existing_file=Existing_File_Behavior.Append match_columns=Match_Columns.By_Position on_problems=Report_Error . should_succeed
|
||||
written = out.read (Excel_Format.Sheet "Another") . select_columns [0, 1, 2]
|
||||
extra_another.write out (..Range "Another!A1:D6") on_existing_file=Existing_File_Behavior.Append match_columns=Match_Columns.By_Position on_problems=Report_Error . should_succeed
|
||||
written = out.read (..Sheet "Another") . select_columns [0, 1, 2]
|
||||
written.should_equal expected
|
||||
|
||||
group_builder.specify 'should be able to append to a range by name not in top left' <|
|
||||
@ -358,8 +359,8 @@ spec_write suite_builder suffix test_sheet_name =
|
||||
(enso_project.data / test_sheet_name) . copy_to out
|
||||
extra_another = Table.new [['AA', ['d', 'e']], ['BB',[4, 5]], ['CC',[True, False]], ['DD', ['2022-01-20', '2022-01-21']]]
|
||||
expected = Table.new [['AA', ['f', 'g', 'h', 'd', 'e']], ['BB',[1, 2, 3, 4, 5]], ['CC',[True, False, False, True, False]]]
|
||||
extra_another.write out (Excel_Format.Range "Random!K9") on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed
|
||||
written = out.read (Excel_Format.Range "Random!K9") . select_columns [0, 1, 2]
|
||||
extra_another.write out (..Range "Random!K9") on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed
|
||||
written = out.read (..Range "Random!K9") . select_columns [0, 1, 2]
|
||||
written.should_equal expected
|
||||
|
||||
group_builder.specify 'should be able to append to a range by name after deduplication of names' <|
|
||||
@ -367,8 +368,8 @@ spec_write suite_builder suffix test_sheet_name =
|
||||
(enso_project.data / test_sheet_name) . copy_to out
|
||||
extra_another = Table.new [['AA', ['d', 'e']], ['BB',[4, 5]], ['AA 1',[True, False]], ['BB 1', ['2022-01-20', '2022-01-21']]]
|
||||
expected = Table.new [['AA', ['f', 'g', 'h', 'd', 'e']], ['BB',[1, 2, 3, 4, 5]], ['AA 1',[True, False, False, True, False]]]
|
||||
extra_another.write out (Excel_Format.Range "Random!S3") on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed
|
||||
written = out.read (Excel_Format.Range "Random!S3") . select_columns [0, 1, 2]
|
||||
extra_another.write out (..Range "Random!S3") on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed
|
||||
written = out.read (..Range "Random!S3") . select_columns [0, 1, 2]
|
||||
written.should_equal expected
|
||||
|
||||
group_builder.specify 'should be able to append to a range by position not in top left' <|
|
||||
@ -376,8 +377,8 @@ spec_write suite_builder suffix test_sheet_name =
|
||||
(enso_project.data / test_sheet_name) . copy_to out
|
||||
extra_another = Table.new [['A', ['d', 'e']], ['B',[4, 5]], ['C',[True, False]], ['D', ['2022-01-20', '2022-01-21']]]
|
||||
expected = Table.new [['AA', ['f', 'g', 'h', 'd', 'e']], ['BB',[1, 2, 3, 4, 5]], ['CC',[True, False, False, True, False]]]
|
||||
extra_another.write out (Excel_Format.Range "Random!K9") on_existing_file=Existing_File_Behavior.Append match_columns=Match_Columns.By_Position on_problems=Report_Error . should_succeed
|
||||
written = out.read (Excel_Format.Range "Random!K9") . select_columns [0, 1, 2]
|
||||
extra_another.write out (..Range "Random!K9") on_existing_file=Existing_File_Behavior.Append match_columns=Match_Columns.By_Position on_problems=Report_Error . should_succeed
|
||||
written = out.read (..Range "Random!K9") . select_columns [0, 1, 2]
|
||||
written.should_equal expected
|
||||
|
||||
group_builder.specify 'should be able to append to a range by name out of order' <|
|
||||
@ -385,14 +386,14 @@ spec_write suite_builder suffix test_sheet_name =
|
||||
(enso_project.data / test_sheet_name) . copy_to out
|
||||
extra_another = Table.new [['CC',[True, False]], ['BB',[4, 5]], ['AA', ['d', 'e']], ['DD', ['2022-01-20', '2022-01-21']]]
|
||||
expected = Table.new [['AA', ['a','b','c','d', 'e']], ['BB',[1,2,3,4,5]], ['CC',[True, False, False, True, False]]]
|
||||
extra_another.write out (Excel_Format.Range "Another!A1:D6") on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed
|
||||
written = out.read (Excel_Format.Sheet "Another") . select_columns [0, 1, 2]
|
||||
extra_another.write out (..Range "Another!A1:D6") on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed
|
||||
written = out.read (..Sheet "Another") . select_columns [0, 1, 2]
|
||||
written.should_equal expected
|
||||
|
||||
group_builder.specify 'should error gracefully if an unknown extension' <|
|
||||
out = data.create_out suffix="notxls"
|
||||
data.table.write out format=Excel_Format.Workbook on_problems=Report_Error . should_fail_with Illegal_Argument
|
||||
data.table.write out format=Excel_Format.Sheet on_problems=Report_Error . should_fail_with Illegal_Argument
|
||||
data.table.write out format=..Workbook on_problems=Report_Error . should_fail_with Illegal_Argument
|
||||
data.table.write out format=..Sheet on_problems=Report_Error . should_fail_with Illegal_Argument
|
||||
|
||||
group_builder.specify 'should be able to write to a new dry run file' <|
|
||||
out = data.create_out
|
||||
@ -426,7 +427,7 @@ spec_write suite_builder suffix test_sheet_name =
|
||||
opened_temp.sheet_names . should_equal ['EnsoSheet']
|
||||
|
||||
temp2 = Context.Output.with_disabled <|
|
||||
result = data.table.write out (Excel_Format.Sheet "Another") on_problems=Report_Error . should_succeed
|
||||
result = data.table.write out (..Sheet "Another") on_problems=Report_Error . should_succeed
|
||||
Problems.expect_only_warning Dry_Run_Operation result
|
||||
result.exists.should_be_true
|
||||
result
|
||||
@ -456,7 +457,7 @@ spec_write suite_builder suffix test_sheet_name =
|
||||
opened_temp.sheet_names . should_equal ['EnsoSheet']
|
||||
|
||||
temp2 = Context.Output.with_disabled <|
|
||||
result = data.table.write temp1 (Excel_Format.Sheet "Another") on_problems=Report_Error . should_succeed
|
||||
result = data.table.write temp1 (..Sheet "Another") on_problems=Report_Error . should_succeed
|
||||
Problems.expect_only_warning Dry_Run_Operation result
|
||||
result.exists.should_be_true
|
||||
result
|
||||
@ -484,7 +485,7 @@ spec_write suite_builder suffix test_sheet_name =
|
||||
|
||||
opened_out = out.read
|
||||
# We need to specify explicit format for the backup, because the extension is changed:
|
||||
opened_backup = bak.read (Excel_Format.Workbook xls_format=(suffix=="xls"))
|
||||
opened_backup = bak.read (..Workbook xls_format=(suffix=="xls"))
|
||||
|
||||
opened_out.read 'EnsoSheet' . should_equal t2
|
||||
opened_backup.read 'EnsoSheet' . should_equal t1
|
||||
@ -524,14 +525,14 @@ spec_write suite_builder suffix test_sheet_name =
|
||||
out = data.create_out
|
||||
(enso_project.data / test_sheet_name) . copy_to out
|
||||
extra_another = Table.new [['CC',[True, False]], ['BB',[4, 5]], ['AA', ['d', 'e']]]
|
||||
extra_another.write out (Excel_Format.Sheet "Another") on_existing_file=Existing_File_Behavior.Append . should_fail_with Column_Name_Mismatch
|
||||
extra_another.write out (..Sheet "Another") on_existing_file=Existing_File_Behavior.Append . should_fail_with Column_Name_Mismatch
|
||||
|
||||
group_builder.specify 'should fail to append to a sheet by name if extra columns' <|
|
||||
out = data.create_out
|
||||
(enso_project.data / test_sheet_name) . copy_to out
|
||||
lmd = out.last_modified_time
|
||||
extra_another = Table.new [['CC',[True, False]], ['BB',[4, 5]], ['AA', ['d', 'e']], ['DD', ['2022-01-20', '2022-01-21']], ['EE', ['2022-01-20', '2022-01-21']]]
|
||||
extra_another.write out (Excel_Format.Sheet "Another") on_existing_file=Existing_File_Behavior.Append . should_fail_with Column_Name_Mismatch
|
||||
extra_another.write out (..Sheet "Another") on_existing_file=Existing_File_Behavior.Append . should_fail_with Column_Name_Mismatch
|
||||
out.last_modified_time.should_equal lmd
|
||||
|
||||
group_builder.specify 'should fail to append to a sheet by name if no headers' <|
|
||||
@ -539,8 +540,8 @@ spec_write suite_builder suffix test_sheet_name =
|
||||
(enso_project.data / test_sheet_name) . copy_to out
|
||||
lmd = out.last_modified_time
|
||||
extra_another = Table.new [['CC',[True, False]], ['BB',[4, 5]], ['AA', ['d', 'e']], ['DD', ['2022-01-20', '2022-01-21']], ['EE', ['2022-01-20', '2022-01-21']]]
|
||||
extra_another.write out (Excel_Format.Sheet "NoHeaders") on_existing_file=Existing_File_Behavior.Append . should_fail_with Illegal_Argument
|
||||
extra_another.write out (Excel_Format.Sheet "Another" headers=False) on_existing_file=Existing_File_Behavior.Append . should_fail_with Illegal_Argument
|
||||
extra_another.write out (..Sheet "NoHeaders") on_existing_file=Existing_File_Behavior.Append . should_fail_with Illegal_Argument
|
||||
extra_another.write out (..Sheet "Another" headers=False) on_existing_file=Existing_File_Behavior.Append . should_fail_with Illegal_Argument
|
||||
out.last_modified_time.should_equal lmd
|
||||
|
||||
group_builder.specify 'should fail to append to a sheet by position if too few columns' <|
|
||||
@ -548,7 +549,7 @@ spec_write suite_builder suffix test_sheet_name =
|
||||
(enso_project.data / test_sheet_name) . copy_to out
|
||||
lmd = out.last_modified_time
|
||||
extra_another = Table.new [['CC',[True, False]], ['BB',[4, 5]], ['AA', ['d', 'e']]]
|
||||
extra_another.write out (Excel_Format.Sheet "Another") on_existing_file=Existing_File_Behavior.Append match_columns=Match_Columns.By_Position . should_fail_with Column_Count_Mismatch
|
||||
extra_another.write out (..Sheet "Another") on_existing_file=Existing_File_Behavior.Append match_columns=Match_Columns.By_Position . should_fail_with Column_Count_Mismatch
|
||||
out.last_modified_time.should_equal lmd
|
||||
|
||||
group_builder.specify 'should fail to append to a sheet by position if too many columns' <|
|
||||
@ -556,7 +557,7 @@ spec_write suite_builder suffix test_sheet_name =
|
||||
(enso_project.data / test_sheet_name) . copy_to out
|
||||
lmd = out.last_modified_time
|
||||
extra_another = Table.new [['CC',[True, False]], ['BB',[4, 5]], ['AA', ['d', 'e']], ['DD', ['2022-01-20', '2022-01-21']], ['EE', ['2022-01-20', '2022-01-21']]]
|
||||
extra_another.write out (Excel_Format.Sheet "Another") on_existing_file=Existing_File_Behavior.Append match_columns=Match_Columns.By_Position . should_fail_with Column_Count_Mismatch
|
||||
extra_another.write out (..Sheet "Another") on_existing_file=Existing_File_Behavior.Append match_columns=Match_Columns.By_Position . should_fail_with Column_Count_Mismatch
|
||||
out.last_modified_time.should_equal lmd
|
||||
|
||||
group_builder.specify 'should fail to append to a range by name if not large enough' <|
|
||||
@ -564,7 +565,7 @@ spec_write suite_builder suffix test_sheet_name =
|
||||
(enso_project.data / test_sheet_name) . copy_to out
|
||||
lmd = out.last_modified_time
|
||||
extra_another = Table.new [['AA', ['d', 'e']], ['BB',[4, 5]], ['CC',[True, False]], ['DD', ['2022-01-20', '2022-01-21']]]
|
||||
extra_another.write out (Excel_Format.Range "Another!A1:D5") on_existing_file=Existing_File_Behavior.Append . should_fail_with Range_Exceeded
|
||||
extra_another.write out (..Range "Another!A1:D5") on_existing_file=Existing_File_Behavior.Append . should_fail_with Range_Exceeded
|
||||
out.last_modified_time.should_equal lmd
|
||||
|
||||
group_builder.specify 'should fail to append to a range by name if it hits another table' <|
|
||||
@ -572,7 +573,7 @@ spec_write suite_builder suffix test_sheet_name =
|
||||
(enso_project.data / test_sheet_name) . copy_to out
|
||||
lmd = out.last_modified_time
|
||||
extra_another = Table.new [['AA', ['d', 'e']], ['BB',[4, 5]], ['CC',[True, False]], ['DD', ['2022-01-20', '2022-01-21']]]
|
||||
extra_another.write out (Excel_Format.Range "Random!B3") on_existing_file=Existing_File_Behavior.Append . should_fail_with Existing_Data
|
||||
extra_another.write out (..Range "Random!B3") on_existing_file=Existing_File_Behavior.Append . should_fail_with Existing_Data
|
||||
out.last_modified_time.should_equal lmd
|
||||
|
||||
group_builder.specify "should fail if the target file is read-only" <|
|
||||
@ -588,7 +589,7 @@ spec_write suite_builder suffix test_sheet_name =
|
||||
[Existing_File_Behavior.Backup, Existing_File_Behavior.Overwrite, Existing_File_Behavior.Append].each behavior-> Test.with_clue behavior.to_text+": " <|
|
||||
f.exists . should_be_true
|
||||
|
||||
r1 = t1.write f (Excel_Format.Sheet "Another") on_existing_file=behavior
|
||||
r1 = t1.write f (..Sheet "Another") on_existing_file=behavior
|
||||
Test.with_clue "("+r1.catch.to_display_text+") " <|
|
||||
r1.should_fail_with File_Error
|
||||
r1.catch.should_be_a File_Error.Access_Denied
|
||||
@ -641,7 +642,7 @@ spec_write suite_builder suffix test_sheet_name =
|
||||
|
||||
f = parent / "foo."+suffix
|
||||
t1 = Table.new [["X", [1, 2, 3]]]
|
||||
r1 = t1.write f (Excel_Format.Sheet "Another")
|
||||
r1 = t1.write f (..Sheet "Another")
|
||||
Test.with_clue "("+r1.catch.to_display_text+") " <|
|
||||
r1.should_fail_with File_Error
|
||||
r1.catch.should_be_a File_Error.Not_Found
|
||||
@ -651,8 +652,8 @@ spec_write suite_builder suffix test_sheet_name =
|
||||
encodings.delete_if_exists . should_succeed
|
||||
|
||||
t1 = Table.new [["A", ["A", "B", "😊", "D"]], ["B", [1, 2, 3, 4]]]
|
||||
t1.write encodings (Excel_Format.Sheet "Another") . should_succeed
|
||||
t2 = encodings.read (Excel_Format.Sheet "Another")
|
||||
t1.write encodings (..Sheet "Another") . should_succeed
|
||||
t2 = encodings.read (..Sheet "Another")
|
||||
t2.at "A" . to_vector . should_equal ["A", "B", "😊", "D"]
|
||||
encodings.delete
|
||||
|
||||
@ -661,8 +662,8 @@ spec_write suite_builder suffix test_sheet_name =
|
||||
custom_types.delete_if_exists . should_succeed
|
||||
|
||||
t1 = Table.new [["A", [Complex.Value 19 89, Complex.Value -1 -42]], ["B", [1, 2]]]
|
||||
t1.write custom_types (Excel_Format.Sheet "Another") . should_succeed
|
||||
t2 = custom_types.read (Excel_Format.Sheet "Another")
|
||||
t1.write custom_types (..Sheet "Another") . should_succeed
|
||||
t2 = custom_types.read (..Sheet "Another")
|
||||
t2.at "A" . to_vector . should_equal ["(Complex.Value 19.0 89.0)", "(Complex.Value -1.0 -42.0)"]
|
||||
custom_types.delete
|
||||
|
||||
@ -671,8 +672,8 @@ spec_write suite_builder suffix test_sheet_name =
|
||||
custom_types2.delete_if_exists . should_succeed
|
||||
|
||||
t1 = Table.new [["A", [Complex_With_To_String.Value 19 89, Complex_With_To_String.Value -1 -42]], ["B", [1, 2]]]
|
||||
t1.write custom_types2 (Excel_Format.Sheet "Another") . should_succeed
|
||||
t2 = custom_types2.read (Excel_Format.Sheet "Another")
|
||||
t1.write custom_types2 (..Sheet "Another") . should_succeed
|
||||
t2 = custom_types2.read (..Sheet "Another")
|
||||
t2.at "A" . to_vector . should_equal ["19.0 + 89.0i", "-1.0 + -42.0i"]
|
||||
custom_types2.delete
|
||||
|
||||
@ -688,7 +689,7 @@ spec_write suite_builder suffix test_sheet_name =
|
||||
t1.write empty on_existing_file=behavior . should_succeed
|
||||
empty.exists.should_be_true
|
||||
|
||||
t2 = empty.read (Excel_Format.Sheet "EnsoSheet")
|
||||
t2 = empty.read (..Sheet "EnsoSheet")
|
||||
t2.should_equal t1
|
||||
empty.delete
|
||||
|
||||
@ -829,13 +830,13 @@ add_specs suite_builder =
|
||||
check_workbook <| Data.read xls_path
|
||||
|
||||
group_builder.specify "should let you read the workbook with Excel" <|
|
||||
check_workbook <| xlsx_sheet.read Excel_Format.Workbook
|
||||
check_workbook <| Data.read xlsx_sheet Excel_Format.Workbook
|
||||
check_workbook <| Data.read xlsx_path Excel_Format.Workbook
|
||||
check_workbook <| xlsx_sheet.read ..Workbook
|
||||
check_workbook <| Data.read xlsx_sheet ..Workbook
|
||||
check_workbook <| Data.read xlsx_path ..Workbook
|
||||
|
||||
check_workbook <| xls_sheet.read Excel_Format.Workbook
|
||||
check_workbook <| Data.read xls_sheet Excel_Format.Workbook
|
||||
check_workbook <| Data.read xls_path Excel_Format.Workbook
|
||||
check_workbook <| xls_sheet.read ..Workbook
|
||||
check_workbook <| Data.read xls_sheet ..Workbook
|
||||
check_workbook <| Data.read xls_path ..Workbook
|
||||
|
||||
group_builder.specify "workbook should look like a database connection" <|
|
||||
workbook = xlsx_sheet.read
|
||||
@ -864,18 +865,18 @@ add_specs suite_builder =
|
||||
xls_sheet.read . named_ranges . should_equal range_names
|
||||
|
||||
group_builder.specify "should let you read by sheet index" <|
|
||||
table = xlsx_sheet.read (Excel_Format.Sheet 1)
|
||||
table = xlsx_sheet.read (..Sheet 1)
|
||||
check_table table
|
||||
|
||||
table_2 = xlsx_sheet.read (Excel_Format.Sheet 1 skip_rows=(table.row_count - col_a.length))
|
||||
table_2 = xlsx_sheet.read (..Sheet 1 skip_rows=(table.row_count - col_a.length))
|
||||
table_2.row_count . should_equal col_a.length
|
||||
check_table table_2
|
||||
|
||||
group_builder.specify "should let you read by sheet name" <|
|
||||
table = xlsx_sheet.read (Excel_Format.Sheet "Sheet1")
|
||||
table = xlsx_sheet.read (..Sheet "Sheet1")
|
||||
check_table table
|
||||
|
||||
table_2 = xlsx_sheet.read (Excel_Format.Sheet "Sheet1" skip_rows=(table.row_count - col_a.length))
|
||||
table_2 = xlsx_sheet.read (..Sheet "Sheet1" skip_rows=(table.row_count - col_a.length))
|
||||
table_2.row_count . should_equal col_a.length
|
||||
check_table table_2
|
||||
|
||||
@ -886,14 +887,14 @@ add_specs suite_builder =
|
||||
check_table table_4
|
||||
|
||||
group_builder.specify "should error if you read by an invalid sheet name" <|
|
||||
xlsx_sheet.read (Excel_Format.Sheet "NoSuchSheet") . should_fail_with Invalid_Location
|
||||
xlsx_sheet.read (..Sheet "NoSuchSheet") . should_fail_with Invalid_Location
|
||||
xlsx_sheet.read . read "NoSuchSheet" . should_fail_with Invalid_Location
|
||||
|
||||
group_builder.specify "should let you read XLS by sheet index" <|
|
||||
table = xls_sheet.read (Excel_Format.Sheet 1)
|
||||
table = xls_sheet.read (..Sheet 1)
|
||||
check_table table
|
||||
|
||||
table_2 = xls_sheet.read (Excel_Format.Sheet 1 skip_rows=(table.row_count - col_a.length))
|
||||
table_2 = xls_sheet.read (..Sheet 1 skip_rows=(table.row_count - col_a.length))
|
||||
table_2.row_count . should_equal col_a.length
|
||||
check_table table_2
|
||||
|
||||
@ -901,28 +902,28 @@ add_specs suite_builder =
|
||||
check_table table_4
|
||||
|
||||
group_builder.specify "should let you read XLS by sheet name" <|
|
||||
table = xls_sheet.read (Excel_Format.Sheet "Sheet1")
|
||||
table = xls_sheet.read (..Sheet "Sheet1")
|
||||
check_table table
|
||||
|
||||
table_2 = xls_sheet.read . read "Sheet1"
|
||||
check_table table_2
|
||||
|
||||
group_builder.specify "should let you read by range" <|
|
||||
table = xlsx_sheet.read (Excel_Format.Range "Sheet1!A:C")
|
||||
table = xlsx_sheet.read (..Range "Sheet1!A:C")
|
||||
check_table table 3
|
||||
|
||||
table_2 = xlsx_sheet.read (Excel_Format.Range "Sheet1!A:C" skip_rows=(table.row_count - col_a.length))
|
||||
table_2 = xlsx_sheet.read (..Range "Sheet1!A:C" skip_rows=(table.row_count - col_a.length))
|
||||
table_2.row_count . should_equal col_a.length
|
||||
check_table table_2 3
|
||||
|
||||
check_table <| xlsx_sheet.read (Excel_Format.Range "Sheet1!10:13")
|
||||
check_table count=3 <| xlsx_sheet.read (Excel_Format.Range "Sheet1!A10:C13")
|
||||
check_table <| xlsx_sheet.read (..Range "Sheet1!10:13")
|
||||
check_table count=3 <| xlsx_sheet.read (..Range "Sheet1!A10:C13")
|
||||
|
||||
check_table <| xlsx_sheet.read . read "Sheet1!10:13"
|
||||
check_table count=3 <| xlsx_sheet.read . read "Sheet1!A10:C13"
|
||||
|
||||
group_builder.specify "should let you read by range name" <|
|
||||
table = xlsx_sheet.read (Excel_Format.Range "myData")
|
||||
table = xlsx_sheet.read (..Range "myData")
|
||||
table.row_count . should_equal col_a.length
|
||||
check_table table 3
|
||||
|
||||
@ -931,28 +932,28 @@ add_specs suite_builder =
|
||||
check_table table_2 3
|
||||
|
||||
group_builder.specify "should let you restrict number of rows read and skip rows" <|
|
||||
table = xlsx_sheet.read (Excel_Format.Sheet "Sheet1")
|
||||
table = xlsx_sheet.read (..Sheet "Sheet1")
|
||||
check_table table
|
||||
|
||||
table_2 = xlsx_sheet.read (Excel_Format.Sheet "Sheet1" skip_rows=(table.row_count - col_a.length))
|
||||
table_2 = xlsx_sheet.read (..Sheet "Sheet1" skip_rows=(table.row_count - col_a.length))
|
||||
table_2.row_count . should_equal col_a.length
|
||||
check_table table_2
|
||||
|
||||
table_3 = xlsx_sheet.read (Excel_Format.Sheet "Sheet1" skip_rows=(table.row_count - col_a.length) row_limit=2)
|
||||
table_3 = xlsx_sheet.read (..Sheet "Sheet1" skip_rows=(table.row_count - col_a.length) row_limit=2)
|
||||
table_3.row_count . should_equal 2
|
||||
|
||||
table_4 = xlsx_sheet.read (Excel_Format.Sheet "Sheet1" row_limit=6)
|
||||
table_4 = xlsx_sheet.read (..Sheet "Sheet1" row_limit=6)
|
||||
table_4.row_count . should_equal 6
|
||||
|
||||
suite_builder.group "Problems" group_builder->
|
||||
group_builder.specify "should report a user-friendly error message when format is missing a required argument" <|
|
||||
r = xlsx_sheet.read (Excel_Format.Range)
|
||||
r.should_fail_with Illegal_Argument
|
||||
r.catch.to_display_text . should_contain "Perhaps the format is missing some required arguments?"
|
||||
r = xlsx_sheet.read (..Range)
|
||||
r.should_fail_with Missing_Argument
|
||||
r.catch.to_display_text . should_contain "Provide a value for the argument `address`."
|
||||
|
||||
group_builder.specify "should handle non-existing file gracefully" <|
|
||||
bad_file = enso_project.data / "DoesNotExists.xlsx"
|
||||
result = bad_file.read (Excel_Format.Range "Sheet1!A:C")
|
||||
result = bad_file.read (..Range "Sheet1!A:C")
|
||||
result.should_fail_with File_Error
|
||||
result.catch.should_be_a File_Error.Not_Found
|
||||
|
||||
@ -962,7 +963,7 @@ add_specs suite_builder =
|
||||
xlsx_sheet.copy_to xlsx_sheet_copy
|
||||
|
||||
# At first, it fails with File_Error
|
||||
r1 = xlsx_sheet.read (Excel_Format.Range "Sheet1!A:C" xls_format=True)
|
||||
r1 = xlsx_sheet.read (..Range "Sheet1!A:C" xls_format=True)
|
||||
r1.should_fail_with File_Error
|
||||
r1.catch.should_be_a File_Error.Corrupted_Format
|
||||
|
||||
@ -971,12 +972,12 @@ add_specs suite_builder =
|
||||
r1_2.should_succeed
|
||||
|
||||
# And then wrong again
|
||||
r1_3 = xlsx_sheet.read (Excel_Format.Range "Sheet1!A:C" xls_format=True)
|
||||
r1_3 = xlsx_sheet.read (..Range "Sheet1!A:C" xls_format=True)
|
||||
# It should still fail the same:
|
||||
r1_3.should_fail_with File_Error
|
||||
r1_3.catch.should_be_a File_Error.Corrupted_Format
|
||||
|
||||
r2 = xls_sheet.read (Excel_Format.Range "Sheet1!A:C" xls_format=False)
|
||||
r2 = xls_sheet.read (..Range "Sheet1!A:C" xls_format=False)
|
||||
r2.should_fail_with File_Error
|
||||
r2.catch.should_be_a File_Error.Corrupted_Format
|
||||
xlsx_sheet_copy.delete
|
||||
@ -990,11 +991,11 @@ add_specs suite_builder =
|
||||
r1.catch.should_be_a File_Error.Corrupted_Format
|
||||
r1.catch.to_display_text.should_contain "is corrupted"
|
||||
|
||||
r1a = bad_file.read Excel_Format.Workbook
|
||||
r1a = bad_file.read ..Workbook
|
||||
r1a.should_fail_with File_Error
|
||||
r1a.catch.should_be_a File_Error.Corrupted_Format
|
||||
|
||||
r2 = bad_file.read (Excel_Format.Range "Sheet1!A:C")
|
||||
r2 = bad_file.read (..Range "Sheet1!A:C")
|
||||
r2.should_fail_with File_Error
|
||||
r2.catch.should_be_a File_Error.Corrupted_Format
|
||||
r2.catch.to_display_text.should_contain "is corrupted"
|
||||
@ -1086,37 +1087,37 @@ add_specs suite_builder =
|
||||
table.at (col_names.at idx) . to_vector . should_equal values
|
||||
|
||||
group_builder.specify "Simple table" <|
|
||||
check_table (file.read (Excel_Format.Range "Sheet1!A1")) ["AA", "BB"] [[1,2,3,4,5,6], ["A","B","C","D","E","F"]]
|
||||
check_table (file.read (Excel_Format.Range "Sheet1!A2")) ["A", "B"] [[1,2,3,4,5,6], ["A","B","C","D","E","F"]]
|
||||
check_table (file.read (Excel_Format.Range "Sheet1!A1:A1")) ["A"] [["AA"]]
|
||||
check_table (file.read (Excel_Format.Range "Sheet1!B1")) ["B"] [["BB", "A","B","C","D","E","F"]]
|
||||
check_table (file.read (Excel_Format.Range "Sheet1!B1" headers=True)) ["BB"] [["A","B","C","D","E","F"]]
|
||||
check_table (file.read (Excel_Format.Range "Sheet1!B2")) ["B"] [["A","B","C","D","E","F"]]
|
||||
check_table (file.read (..Range "Sheet1!A1")) ["AA", "BB"] [[1,2,3,4,5,6], ["A","B","C","D","E","F"]]
|
||||
check_table (file.read (..Range "Sheet1!A2")) ["A", "B"] [[1,2,3,4,5,6], ["A","B","C","D","E","F"]]
|
||||
check_table (file.read (..Range "Sheet1!A1:A1")) ["A"] [["AA"]]
|
||||
check_table (file.read (..Range "Sheet1!B1")) ["B"] [["BB", "A","B","C","D","E","F"]]
|
||||
check_table (file.read (..Range "Sheet1!B1" headers=True)) ["BB"] [["A","B","C","D","E","F"]]
|
||||
check_table (file.read (..Range "Sheet1!B2")) ["B"] [["A","B","C","D","E","F"]]
|
||||
|
||||
group_builder.specify "Patchy table" <|
|
||||
check_table (file.read (Excel_Format.Range "Sheet1!D1")) ["A", "B", "F"] [[1,2,4], [4,4,Nothing], [6,Nothing,6]]
|
||||
check_table (file.read (Excel_Format.Range "Sheet1!D2")) ["D", "E", "F"] [[1,2,4], [4,4,Nothing], [6,Nothing,6]]
|
||||
check_table (file.read (Excel_Format.Range "Sheet1!E")) ["B"] [[4,4,Nothing,Nothing,Nothing,Nothing]]
|
||||
check_table (file.read (Excel_Format.Range "Sheet1!E1")) ["B", "F"] [[4,4,Nothing], [6,Nothing,6]]
|
||||
check_table (file.read (Excel_Format.Range "Sheet1!E2")) ["E", "F"] [[4,4,Nothing], [6,Nothing,6]]
|
||||
check_table (file.read (..Range "Sheet1!D1")) ["A", "B", "F"] [[1,2,4], [4,4,Nothing], [6,Nothing,6]]
|
||||
check_table (file.read (..Range "Sheet1!D2")) ["D", "E", "F"] [[1,2,4], [4,4,Nothing], [6,Nothing,6]]
|
||||
check_table (file.read (..Range "Sheet1!E")) ["B"] [[4,4,Nothing,Nothing,Nothing,Nothing]]
|
||||
check_table (file.read (..Range "Sheet1!E1")) ["B", "F"] [[4,4,Nothing], [6,Nothing,6]]
|
||||
check_table (file.read (..Range "Sheet1!E2")) ["E", "F"] [[4,4,Nothing], [6,Nothing,6]]
|
||||
|
||||
group_builder.specify "Single cell" <|
|
||||
check_table (file.read (Excel_Format.Range "Sheet1!H1")) ["H"] [["Single Cell"]]
|
||||
check_table (file.read (Excel_Format.Range "Sheet1!H2")) ["H"] [[]]
|
||||
check_table (file.read (..Range "Sheet1!H1")) ["H"] [["Single Cell"]]
|
||||
check_table (file.read (..Range "Sheet1!H2")) ["H"] [[]]
|
||||
|
||||
group_builder.specify "Single line" <|
|
||||
check_table (file.read (Excel_Format.Range "Sheet1!J1")) ["J", "K", "L"] [["Just"],["Some"],["Headers"]]
|
||||
check_table (file.read (..Range "Sheet1!J1")) ["J", "K", "L"] [["Just"],["Some"],["Headers"]]
|
||||
|
||||
group_builder.specify "Growing table" <|
|
||||
check_table (file.read (Excel_Format.Range "Sheet1!N1")) ["A", "Full", "Table", "Q"] [["Hello","World",Nothing,"Extend"],[1,Nothing,"Gap",3],[2,2,"Here",5],[Nothing,Nothing,"To","Hello"]]
|
||||
check_table (file.read (Excel_Format.Range "Sheet1!O1")) ["Full", "Table", "Q"] [[1,Nothing,"Gap",3],[2,2,"Here",5],[Nothing,Nothing,"To","Hello"]]
|
||||
check_table (file.read (Excel_Format.Range "Sheet1!O2")) ["O", "P", "Q"] [[1,Nothing,"Gap",3],[2,2,"Here",5],[Nothing,Nothing,"To","Hello"]]
|
||||
check_table (file.read (..Range "Sheet1!N1")) ["A", "Full", "Table", "Q"] [["Hello","World",Nothing,"Extend"],[1,Nothing,"Gap",3],[2,2,"Here",5],[Nothing,Nothing,"To","Hello"]]
|
||||
check_table (file.read (..Range "Sheet1!O1")) ["Full", "Table", "Q"] [[1,Nothing,"Gap",3],[2,2,"Here",5],[Nothing,Nothing,"To","Hello"]]
|
||||
check_table (file.read (..Range "Sheet1!O2")) ["O", "P", "Q"] [[1,Nothing,"Gap",3],[2,2,"Here",5],[Nothing,Nothing,"To","Hello"]]
|
||||
|
||||
group_builder.specify "Should handle blank headers without warnings" <|
|
||||
check_table (file.read (Excel_Format.Range "Sheet1!D1")) ["A", "B", "F"] [[1,2,4], [4,4,Nothing], [6,Nothing,6]]
|
||||
check_table (file.read (..Range "Sheet1!D1")) ["A", "B", "F"] [[1,2,4], [4,4,Nothing], [6,Nothing,6]]
|
||||
|
||||
group_builder.specify "Should handle duplicate headers with warnings" <|
|
||||
action = file.read (Excel_Format.Range "Sheet1!S1") on_problems=_
|
||||
action = file.read (..Range "Sheet1!S1") on_problems=_
|
||||
tester = check_table _ ["DD", "DD 1"] [[1,3], [2,4]]
|
||||
problems = [Duplicate_Output_Column_Names.Error ["DD"]]
|
||||
Problems.test_problem_handling action problems tester
|
||||
|
@ -47,7 +47,7 @@ add_specs suite_builder =
|
||||
r.sheet_names . should_equal ["MyTestSheet"]
|
||||
r.read "MyTestSheet" . should_equal expected_table
|
||||
|
||||
r2 = Data.fetch url format=Raw_Response . decode (Excel_Format.Sheet "MyTestSheet")
|
||||
r2 = Data.fetch url format=Raw_Response . decode (..Sheet "MyTestSheet")
|
||||
r2.should_be_a Table
|
||||
r2.should_equal expected_table
|
||||
|
||||
@ -60,11 +60,11 @@ add_specs suite_builder =
|
||||
r.sheet_names . should_equal ["MyTestSheet"]
|
||||
r.read "MyTestSheet" . should_equal expected_table
|
||||
|
||||
r2 = Data.fetch url format=Raw_Response . decode (Excel_Format.Sheet "MyTestSheet")
|
||||
r2 = Data.fetch url format=Raw_Response . decode (..Sheet "MyTestSheet")
|
||||
r2.should_be_a Table
|
||||
r2.should_equal expected_table
|
||||
|
||||
r3 = url.to_uri.fetch format=Raw_Response . decode (Excel_Format.Sheet "MyTestSheet")
|
||||
r3 = url.to_uri.fetch format=Raw_Response . decode (..Sheet "MyTestSheet")
|
||||
r3.should_be_a Table
|
||||
r3.should_equal expected_table
|
||||
|
||||
|
@ -74,7 +74,7 @@ add_specs suite_builder =
|
||||
|
||||
f2 = (transient / "test5.txt")
|
||||
f2.delete_if_exists
|
||||
my_format = Plain_Text Encoding.ascii
|
||||
my_format = Plain_Text_Format.Plain_Text Encoding.ascii
|
||||
r2 = t1.write f2 my_format
|
||||
r2.should_fail_with File_Error
|
||||
r2.catch.should_be_a File_Error.Unsupported_Output_Type
|
||||
|
@ -57,14 +57,14 @@ add_specs suite_builder =
|
||||
group_builder.specify "should serialise back to input" <|
|
||||
expected_text = normalize_lines <|
|
||||
(enso_project.data / "prime_ministers.csv").read_text
|
||||
delimited = Text.from data.expected format=(Delimited_Format.Delimited "," line_endings=Line_Ending_Style.Unix)
|
||||
delimited = Text.from data.expected format=(..Delimited "," line_endings=Line_Ending_Style.Unix)
|
||||
delimited.should_equal expected_text
|
||||
|
||||
group_builder.specify "should serialise dates with format" <|
|
||||
test_table = Table.new [data.c_from]
|
||||
expected_text = 'From\n04.05.1979\n28.11.1990\n02.05.1997\n27.06.2007\n11.05.2010\n13.07.2016\n24.07.2019\n'
|
||||
data_formatter = Data_Formatter.Value.with_datetime_formats date_formats=["dd.MM.yyyy"]
|
||||
delimited = Text.from test_table format=(Delimited_Format.Delimited "," value_formatter=data_formatter line_endings=Line_Ending_Style.Unix)
|
||||
delimited = Text.from test_table format=(..Delimited "," value_formatter=data_formatter line_endings=Line_Ending_Style.Unix)
|
||||
delimited.should_equal expected_text
|
||||
|
||||
main filter=Nothing =
|
||||
|
@ -50,14 +50,14 @@ add_specs suite_builder =
|
||||
expected_text = normalize_lines <|
|
||||
(enso_project.data / "datetime_sample_normalized_hours.csv").read_text
|
||||
data_formatter = Data_Formatter.Value . with_datetime_formats datetime_formats=["yyyy-MM-dd HH:mm:ss"]
|
||||
delimited = Text.from data.expected format=(Delimited_Format.Delimited "," line_endings=Line_Ending_Style.Unix value_formatter=data_formatter)
|
||||
delimited = Text.from data.expected format=(..Delimited "," line_endings=Line_Ending_Style.Unix value_formatter=data_formatter)
|
||||
delimited.should_equal expected_text
|
||||
|
||||
group_builder.specify "should serialise dates with format" <|
|
||||
test_table = Table.new [data.c_date]
|
||||
expected_text = 'Posting date\n05.01.2015 09-00\n05.01.2015 14-00\n06.01.2015 09-00\n07.01.2015 17-30\n05.01.2011 09-00\n09.01.2011 15-30\n'
|
||||
data_formatter = Data_Formatter.Value . with_datetime_formats datetime_formats=["dd.MM.yyyy HH-mm"]
|
||||
delimited = Text.from test_table format=(Delimited_Format.Delimited "," value_formatter=data_formatter line_endings=Line_Ending_Style.Unix)
|
||||
delimited = Text.from test_table format=(..Delimited "," value_formatter=data_formatter line_endings=Line_Ending_Style.Unix)
|
||||
delimited.should_equal expected_text
|
||||
|
||||
main filter=Nothing =
|
||||
|
@ -51,14 +51,14 @@ add_specs suite_builder =
|
||||
group_builder.specify "should serialise back to input" <|
|
||||
expected_text = normalize_lines <|
|
||||
(enso_project.data / "time_of_day_sample_normalized_hours.csv").read_text
|
||||
delimited = Text.from data.expected format=(Delimited_Format.Delimited "," line_endings=Line_Ending_Style.Unix)
|
||||
delimited = Text.from data.expected format=(..Delimited "," line_endings=Line_Ending_Style.Unix)
|
||||
delimited.should_equal expected_text
|
||||
|
||||
group_builder.specify "should serialise dates with format" <|
|
||||
test_table = Table.new [data.c_time]
|
||||
expected_text = 'Posting time\n09-00-00\n14-00-12\n09-00-00\n17-30-00\n09-00-04\n15-30-00\n'
|
||||
data_formatter = Data_Formatter.Value . with_datetime_formats time_formats=["HH-mm-ss"]
|
||||
delimited = Text.from test_table format=(Delimited_Format.Delimited "," value_formatter=data_formatter line_endings=Line_Ending_Style.Unix)
|
||||
delimited = Text.from test_table format=(..Delimited "," value_formatter=data_formatter line_endings=Line_Ending_Style.Unix)
|
||||
delimited.should_equal expected_text
|
||||
|
||||
main filter=Nothing =
|
||||
|
Loading…
Reference in New Issue
Block a user