Refactor Table_Tests to the builder API (#8622)

Refactor `test/Table_Test` to the builder API. The builder API is in a new library called `Test_New` that is alongside the old `Test` library. There will be follow-up PRs that will migrate the rest of the tests. Meanwhile, let's keep these two libraries, and merge them after the last PR.

# Important Notes
- For a brief introduction into the new API, see **Prototype 1** section in https://github.com/enso-org/enso/pull/8622#issuecomment-1889706168
- When executing all the tests, the behavior should be the same as with the old library. With the only exception that if `ENSO_TEST_ANSI_COLORS` env var is set, the output is more colorful than it used to be.
This commit is contained in:
Pavel Marek 2024-01-26 13:08:24 +01:00 committed by GitHub
parent a04385a6a4
commit d0fdeca6df
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
97 changed files with 9031 additions and 5416 deletions

View File

@ -4,6 +4,7 @@ use crate::paths::Paths;
use crate::paths::ENSO_ENABLE_ASSERTIONS;
use crate::paths::ENSO_META_TEST_ARGS;
use crate::paths::ENSO_META_TEST_COMMAND;
use crate::paths::ENSO_TEST_ANSI_COLORS;
use crate::postgres;
use crate::postgres::EndpointConfiguration;
use crate::postgres::Postgresql;
@ -121,6 +122,7 @@ impl BuiltEnso {
ENSO_META_TEST_ARGS.set(&format!("{} --run", ir_caches.flag()))?;
ENSO_ENABLE_ASSERTIONS.set("true")?;
ENSO_TEST_ANSI_COLORS.set("true")?;
// Prepare Engine Test Environment
if let Ok(gdoc_key) = std::env::var("GDOC_KEY") {

View File

@ -33,6 +33,9 @@ ide_ci::define_env_var! {
/// If Enso-specific assertions should be enabled.
ENSO_ENABLE_ASSERTIONS, String;
/// If true, enso tests output will be colored.
ENSO_TEST_ANSI_COLORS, String;
/// Can be set to `"espresso"` to enable Espresso interpreter support.
ENSO_JAVA, String;
}

View File

@ -0,0 +1,10 @@
name: Test_New
namespace: Standard
version: 0.0.0-dev
license: APLv2
authors:
- name: Enso Team
email: contact@enso.org
maintainers:
- name: Enso Team
email: contact@enso.org

View File

@ -0,0 +1,11 @@
private
## PRIVATE
type Clue
## PRIVATE
Represents a clue as to why a test failed
Arguments:
- add_clue: either Nothing or a function which modifies a failure message
Value add_clue

View File

@ -0,0 +1,15 @@
from Standard.Base import all
import Standard.Base.Runtime.Context
import project.Test.Test
## PRIVATE
Runs the action twice, once with the Output context enabled and once with it
disabled, to check that the behaviour is the same regardless of context.
run_with_and_without_output ~action =
Context.Output.with_enabled <|
Test.with_clue "(normal mode - Output context enabled) " <|
action
Context.Output.with_disabled <|
Test.with_clue "(dry run - Output context disabled) " <|
action

View File

@ -0,0 +1,660 @@
from Standard.Base import all
import Standard.Base.Errors.Common.No_Such_Method
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
import project.Spec_Result.Spec_Result
import project.Test.Test
## Expect a function to fail with the provided dataflow error.
Arguments:
- matcher: The expected type of dataflow error contained in `self`.
- frames_to_skip (optional, advanced): used to alter the location which is
displayed as the source of this error.
- unwrap_errors: If true, remove any wrapping errors from the result before
checking against the expected warning.
> Example
Assert that a computation should return an error of a given type.
import Standard.Examples
from Standard.Test import Test
example_should_fail_with =
Examples.throw_error . should_fail_with Examples.My_Error
Any.should_fail_with : Any -> Integer -> Boolean -> Spec_Result
Any.should_fail_with self matcher frames_to_skip=0 unwrap_errors=True =
_ = unwrap_errors
loc = Meta.get_source_location 1+frames_to_skip
matcher_text = matcher . to_text
Test.fail ("Expected an error " + matcher_text + " but no error occurred, instead got: " + self.to_text + " (at " + loc + ").")
## Expect a function to fail with the provided dataflow error.
Arguments:
- matcher: The expected type of dataflow error contained in `self`.
- frames_to_skip (optional, advanced): used to alter the location which is
displayed as the source of this error.
- unwrap_errors: If true, remove any wrapping errors from the result before
checking against the expected warning.
> Example
Assert that a computation should return an error of a given type.
import Standard.Examples
from Standard.Test import Test
example_should_fail_with =
Examples.throw_error . should_fail_with Examples.My_Error
Error.should_fail_with : Any -> Integer -> Boolean -> Spec_Result
Error.should_fail_with self matcher frames_to_skip=0 unwrap_errors=True =
unwrap_maybe error = if unwrap_errors then Error.unwrap error else error
caught = unwrap_maybe self.catch
if caught == matcher || caught.is_a matcher then Nothing else
loc = Meta.get_source_location 2+frames_to_skip
matcher_text = matcher . to_text
Test.fail ("Expected error "+matcher_text+", but error " + caught.to_text + " has been returned (at " + loc + ").")
## Asserts that `self` value is equal to the expected value.
Arguments:
- that: The value to check `self` for equality with.
- frames_to_skip (optional, advanced): used to alter the location which is
displayed as the source of this error.
> Example
Assert that one value should equal another,
import Standard.Examples
from Standard.Test import Test
example_should_equal = Examples.add_1_to 1 . should_equal 2
Any.should_equal : Any -> Integer -> Spec_Result
Any.should_equal self that frames_to_skip=0 = case self == that of
True -> Spec_Result.Success
False ->
loc = Meta.get_source_location 2+frames_to_skip
additional_comment = case self of
_ : Vector -> case that of
_ : Vector ->
case self.length == that.length of
True ->
diff = self.zip that . index_of p->
p.first != p.second
"; first difference at index " + diff.to_text + " "
False -> "; lengths differ (" + self.length.to_text + " != " + that.length.to_text + ") "
_ -> ""
_ -> ""
msg = self.pretty + " did not equal " + that.pretty + additional_comment + " (at " + loc + ")."
Test.fail msg
## Asserts that `self` value is equal to the expected type value.
Arguments:
- that: The type to check `self` for equality with.
- frames_to_skip (optional, advanced): used to alter the location which is
displayed as the source of this error.
> Example
Assert that some type is equal to another.,
import Standard.Examples
from Standard.Test import Test
example_should_equal = Examples.some_type . should_equal_type Vector
Any.should_equal_type : Any -> Integer -> Spec_Result
Any.should_equal_type self that frames_to_skip=0 = case (self.is_same_object_as that) of
True -> Spec_Result.Success
False ->
loc = Meta.get_source_location 2+frames_to_skip
msg = self.to_text + " did not equal type " + that.to_text + " (at " + loc + ")."
Test.fail msg
## Added so that dataflow errors are not silently lost.
Error.should_equal_type self that frames_to_skip=0 =
_ = [that]
Test.fail_match_on_unexpected_error self 1+frames_to_skip
## Asserts that `self` value is not equal to the expected value.
Arguments:
- that: The value to check `self` for equality with.
- frames_to_skip (optional, advanced): used to alter the location which is
displayed as the source of this error.
> Example
Assert that one value should equal another,
import Standard.Examples
from Standard.Test import Test
example_should_not_equal = Examples.add_1_to 1 . should_not_equal 2
Any.should_not_equal : Any -> Integer -> Spec_Result
Any.should_not_equal self that frames_to_skip=0 = case self != that of
True -> Spec_Result.Success
False ->
loc = Meta.get_source_location 2+frames_to_skip
msg = self.to_text + " did equal " + that.to_text + " (at " + loc + ")."
Test.fail msg
## Added so that dataflow errors are not silently lost.
Error.should_not_equal self that frames_to_skip=0 =
_ = [that]
Test.fail_match_on_unexpected_error self 1+frames_to_skip
## Asserts that `self` value is not equal to the expected type value.
Arguments:
- that: The type to check `self` for equality with.
- frames_to_skip (optional, advanced): used to alter the location which is
displayed as the source of this error.
> Example
Assert that some type is equal to another.,
import Standard.Examples
from Standard.Test import Test
example_should_not_equal = Examples.some_type . should_not_equal_type Vector
Any.should_not_equal_type : Any -> Integer -> Spec_Result
Any.should_not_equal_type self that frames_to_skip=0 = case (self.is_same_object_as that . not) of
True -> Spec_Result.Success
False ->
loc = Meta.get_source_location 2+frames_to_skip
msg = self.to_text + " did equal type " + that.to_text + " (at " + loc + ")."
Test.fail msg
## Added so that dataflow errors are not silently lost.
Error.should_not_equal_type self that frames_to_skip=0 =
_ = [that]
Test.fail_match_on_unexpected_error self 1+frames_to_skip
## Asserts that `self` value is a Text value and starts with `that`.
Arguments:
- that: The value to check `self` starts with.
- frames_to_skip (optional, advanced): used to alter the location which is
displayed as the source of this error.
> Example
Assert that one value should start with another.
from Standard.Test import Test
example_should_start_with = "Hello World!" . should_start_with "Hello"
Any.should_start_with : Text -> Integer -> Spec_Result
Any.should_start_with self that frames_to_skip=0 = case self of
_ : Text -> if self.starts_with that then Spec_Result.Success else
loc = Meta.get_source_location 3+frames_to_skip
msg = self.to_text + " does not start with " + that.to_text + " (at " + loc + ")."
Test.fail msg
_ ->
loc = Meta.get_source_location 2+frames_to_skip
msg = self.to_text + " is not a `Text` value (at " + loc + ")."
Test.fail msg
## Asserts that `self` value is a Text value and ends with `that`.
Arguments:
- that: The value to check `self` ends with.
- frames_to_skip (optional, advanced): used to alter the location which is
displayed as the source of this error.
> Example
Assert that one value should end with another.
from Standard.Test import Test
example_should_end_with = "Hello World!" . should_end_with "ld!"
Any.should_end_with : Text -> Integer -> Spec_Result
Any.should_end_with self that frames_to_skip=0 = case self of
_ : Text -> if self.ends_with that then Spec_Result.Success else
loc = Meta.get_source_location 3+frames_to_skip
msg = self.to_text + " does not end with " + that.to_text + " (at " + loc + ")."
Test.fail msg
_ ->
loc = Meta.get_source_location 2+frames_to_skip
msg = self.to_text + " is not a `Text` value (at " + loc + ")."
Test.fail msg
## Asserts that `self` value is a Text value and starts with `that`.
Arguments:
- that: The value to check `self` starts with.
- frames_to_skip (optional, advanced): used to alter the location which is
displayed as the source of this error.
> Example
Assert that one value should start with another.
from Standard.Test import Test
example_should_start_with = "Hello World!" . should_start_with "Hello"
Error.should_start_with : Any -> Integer -> Spec_Result
Error.should_start_with self that frames_to_skip=0 =
_ = [that]
Test.fail_match_on_unexpected_error self 1+frames_to_skip
## Asserts that `self` value is a Text value and ends with `that`.
Arguments:
- that: The value to check `self` ends with.
- frames_to_skip (optional, advanced): used to alter the location which is
displayed as the source of this error.
> Example
Assert that one value should end with another.
from Standard.Test import Test
example_should_end_with = "Hello World!" . should_end_with "ld!"
Error.should_end_with : Any -> Integer -> Spec_Result
Error.should_end_with self that frames_to_skip=0 =
_ = [that]
Test.fail_match_on_unexpected_error self 1+frames_to_skip
## Asserts that `self` value is equal to the expected value.
Arguments:
- _: The value to check `self` for equality with.
> Example
Assert that one value should equal another,
import Standard.Examples
from Standard.Test import Test
example_should_equal = Examples.add_1_to 1 . should_equal 2
Error.should_equal : Any -> Integer -> Spec_Result
Error.should_equal self that frames_to_skip=0 =
_ = [that]
Test.fail_match_on_unexpected_error self 1+frames_to_skip
## Asserts that `self` is within `epsilon` from `that`.
Arguments:
- that: The value to compare `self` for equality with.
- epsilon: The epislon for comparing two float numbers.
- frames_to_skip (optional, advanced): used to alter the location which is
displayed as the source of this error.
> Example
Compare two float values.
from Standard.Test import Test
example_should_equal = 1.1 . should_equal 1.1
> Example
Compare two float values with an epsilon (tolerance).
from Standard.Test import Test
example_should_equal =
1.00000001 . should_equal 1.00000002 epsilon=0.0001
Number.should_equal : Float -> Float -> Integer -> Spec_Result
Number.should_equal self that epsilon=0 frames_to_skip=0 =
matches = case that of
_ : Number -> self.equals that epsilon
_ -> False
case matches of
True -> Spec_Result.Success
False ->
loc = Meta.get_source_location 2+frames_to_skip
msg = self.to_text + " did not equal " + that.to_text + " (at " + loc + ")."
Test.fail msg
## Asserts that `self` value is not an error.
It returns the original value, so that it can be inspected further.
Arguments:
- frames_to_skip (optional, advanced): used to alter the location which is
displayed as the source of this error.
> Example
Assert that a given action did not result in errors or warnings.
"foobar".write (enso_project.data / "f.txt") . should_succeed
Any.should_succeed : Integer -> Any
Any.should_succeed self frames_to_skip=0 =
_ = frames_to_skip
self
## Asserts that `self` value is not an error.
It returns the original value, so that it can be inspected further.
Arguments:
- frames_to_skip (optional, advanced): used to alter the location which is
displayed as the source of this error.
> Example
Assert that a given action did not result in errors or warnings.
"foobar".write (enso_project.data / "f.txt") . should_succeed
Error.should_succeed : Integer -> Any
Error.should_succeed self frames_to_skip=0 =
Test.fail_match_on_unexpected_error self 1+frames_to_skip
## Handles an unexpected dataflow error.
Error.should_be_a : Any -> Integer -> Any
Error.should_be_a self typ frames_to_skip=0 =
_ = typ
Test.fail_match_on_unexpected_error self 1+frames_to_skip
## Asserts that the given `Boolean` is `True`
> Example
Assert that a boolean value is true.
import Standard.Examples
from Standard.Test import Test
example_should_be_true = Examples.get_boolean . should_be_true
Boolean.should_be_true : Spec_Result
Boolean.should_be_true self = case self of
True -> Spec_Result.Success
False ->
loc = Meta.get_source_location 2
Test.fail "Expected False to be True (at "+loc+")."
## Asserts that the given `Boolean` is `True`.
> Example
Assert that a boolean value is true.
import Standard.Examples
from Standard.Test import Test
example_should_be_true = Examples.get_boolean . should_be_true
Error.should_be_true : Spec_Result
Error.should_be_true self = Test.fail_match_on_unexpected_error self 1
## Asserts that the given `Boolean` is `False`
> Example
Assert that a boolean value is false.
import Standard.Examples
from Standard.Test import Test
example_should_be_false = Examples.get_boolean . should_be_false
Boolean.should_be_false : Spec_Result
Boolean.should_be_false self = case self of
True ->
loc = Meta.get_source_location 2
Test.fail "Expected True to be False (at "+loc+")."
False -> Spec_Result.Success
## Asserts that the given `Boolean` is `False`
> Example
Assert that a boolean value is false.
import Standard.Examples
from Standard.Test import Test
example_should_be_false = Examples.get_boolean . should_be_false
Error.should_be_false : Spec_Result
Error.should_be_false self = Test.fail_match_on_unexpected_error self 1
## Asserts that a value is of a given type.
Arguments:
- typ: The type to assert that `self` is a value of.
> Examples
Assert that 1 is of type Boolean.
from Standard.Test import Test
example_should_be_a = 1.should_be_a Boolean
Any.should_be_a : Any -> Spec_Result
Any.should_be_a self typ =
loc = Meta.get_source_location 1
fail_on_wrong_arg_type =
Panic.throw <|
Illegal_Argument.Error "typ ("+typ.to_display_text+") must either be a type or a constructor. Use `should_equal` for value equality test instead."
case Meta.meta typ of
c : Meta.Constructor -> case Meta.meta self of
a : Meta.Atom ->
if a.constructor == c then Spec_Result.Success else
expected_type = Meta.get_qualified_type_name typ
actual_type = Meta.get_qualified_type_name self
message = "Expected a value of type "+expected_type+", built with constructor "+c.name+", but got a value of type "+actual_type+", built with constructor "+a.constructor.name+" instead (at "+loc+")."
Test.fail message
_ ->
expected_type = Meta.get_qualified_type_name typ
actual_type = Meta.get_qualified_type_name self
message = "Expected a value of type "+expected_type+", built with constructor "+c.name+", but got a value of type "+actual_type+" instead (at "+loc+")."
Test.fail message
_ : Meta.Type ->
ok = self.is_a typ || self==typ
if ok then Spec_Result.Success else
expected_type = Meta.get_qualified_type_name typ
actual_type = Meta.get_qualified_type_name self
message = "Expected a value of type "+expected_type+" but got a value of type "+actual_type+" instead (at "+loc+")."
Test.fail message
# Workaround for 0-argument atom constructors which 'unapplies' them.
atom : Meta.Atom ->
ctor = atom . constructor
if ctor.fields.not_empty then fail_on_wrong_arg_type else
self.should_be_a (ctor.value ...)
_ : Meta.Polyglot ->
ok = self.is_a typ
if ok then Spec_Result.Success else
actual_type = Meta.get_qualified_type_name self
message = "Expected a value of Java class "+typ.to_text+" but got a value of type "+actual_type+" instead (at "+loc+")."
Test.fail message
Meta.Primitive.Value (b : Boolean) ->
ok = self == b
if ok then Spec_Result.Success else
actual_type = Meta.get_qualified_type_name self
message = "Expected a value of "+typ.to_text+" but got a value of type "+actual_type+" instead (at "+loc+")."
Test.fail message
_ -> fail_on_wrong_arg_type
## Asserts that `self` value contains the same elements as `that`.
It only checks that all elements from one collection are also present in the
other one. Arities of elements are not checked, so the collections can still
differ in length by containing duplicate elements.
It will work on any collection which supports the methods
`each : (Any -> Nothing) -> Any` and `contains : Any -> Boolean`.
Arguments:
- that: The collection to compare.
- frames_to_skip (optional, advanced): used to alter the location which is
displayed as the source of this error.
> Example
Assert that one vector should contain the same elements as another.
import Standard.Examples
from Standard.Test import Test
example_should_equal = [1, 2] . should_contain_the_same_elements_as [2, 1]
Any.should_contain_the_same_elements_as : Any -> Integer -> Spec_Result
Any.should_contain_the_same_elements_as self that frames_to_skip=0 =
loc = Meta.get_source_location 1+frames_to_skip
that.each element->
if self.contains element . not then
msg = "The collection (" + self.to_text + ") did not contain "+element.to_text+" (at " + loc + ")."
Test.fail msg
self.each element->
if that.contains element . not then
msg = "The collection contained an element ("+element.to_text+") which was not expected (at " + loc + ")."
Test.fail msg
Spec_Result.Success
## Asserts that `self` value contains the same elements as `that`.
It only checks that all elements from one collection are also present in the
other one. Arities of elements are not checked, so the collections can still
differ in length by containing duplicate elements.
It will work on any collection which supports the methods
`each : (Any -> Nothing) -> Any` and `contains : Any -> Boolean`.
Arguments:
- _: The collection to compare.
- frames_to_skip (optional, advanced): used to alter the location which is
displayed as the source of this error.
> Example
Assert that one vector should contain the same elements as another.
import Standard.Examples
from Standard.Test import Test
example_should_equal = [1, 2] . should_contain_the_same_elements_as [2, 1]
Error.should_contain_the_same_elements_as : Any -> Integer -> Spec_Result
Error.should_contain_the_same_elements_as self that frames_to_skip=0 =
_ = [that]
Test.fail_match_on_unexpected_error self 1+frames_to_skip
## Asserts that `self` value contains only elements in `that`.
It checks that all elements from `self` are also present in `that`. It does
not require that all elements of `that` are contained in `self`. Arities of
elements are not checked, so `self` may still contain more elements than
`that` by containing duplicates.
It will work on any collection which supports the methods
`each : (Any -> Nothing) -> Any` and `contains : Any -> Boolean`.
Arguments:
- that: The collection to compare.
- frames_to_skip (optional, advanced): used to alter the location which is
displayed as the source of this error.
> Example
Assert that one vector should contain only elements in another.
import Standard.Examples
from Standard.Test import Test
example_should_equal = [1, 2] . should_only_contain_elements_in [1, 2, 3, 4]
Any.should_only_contain_elements_in : Any -> Integer -> Spec_Result
Any.should_only_contain_elements_in self that frames_to_skip=0 =
loc = Meta.get_source_location 1+frames_to_skip
self.each element->
if that.contains element . not then
msg = "The collection contained an element ("+element.to_text+") which was not expected (at " + loc + ")."
Test.fail msg
Spec_Result.Success
## Asserts that `self` value contains only elements in `that`.
It checks that all elements from `self` are also present in `that`. It does
not require that all elements of `that` are contained in `self`. Arities of
elements are not checked, so the collections can still differ in length by
containing duplicate elements.
It will work on any collection which supports the methods
`each : (Any -> Nothing) -> Any` and `contains : Any -> Boolean`.
Arguments:
- that: The collection to compare.
- frames_to_skip (optional, advanced): used to alter the location which is
displayed as the source of this error.
> Example
Assert that one vector should contain only elements in another.
import Standard.Examples
from Standard.Test import Test
example_should_equal = [1, 2] . should_only_contain_elements_in [1, 2, 3, 4]
Error.should_only_contain_elements_in : Any -> Integer -> Spec_Result
Error.should_only_contain_elements_in self that frames_to_skip=0 =
_ = [that]
Test.fail_match_on_unexpected_error self 1+frames_to_skip
## Asserts that `self` value contains an element.
Arguments:
- element: The element to check.
- frames_to_skip (optional, advanced): used to alter the location which is
displayed as the source of this error.
This method delegates to the `contains` method of `self` and will use the
rules of the particular type - be it a `Vector`, `Text` or any custom type
implementing a method `contains : a -> Boolean`.
> Example
Assert that a string contains a substring.
from Standard.Test import Test
example_should_equal = "foobar".should_contain "foo"
Any.should_contain : Any -> Integer -> Spec_Result
Any.should_contain self element frames_to_skip=0 =
loc = Meta.get_source_location 1+frames_to_skip
contains_result = Panic.catch No_Such_Method (self.contains element) caught_panic->
if caught_panic.payload.method_name != "contains" then Panic.throw caught_panic else
msg = "The value (" + self.to_text + ") does not support the method `contains` (at " + loc + ")."
Test.fail msg
if contains_result then Spec_Result.Success else
msg = "The value (" + self.to_text + ") did not contain the element (" + element.to_text + ") (at " + loc + ")."
Test.fail msg
## Asserts that `self` value contains an element.
Arguments:
- element: The element to check.
- frames_to_skip (optional, advanced): used to alter the location which is
displayed as the source of this error.
This method delegates to the `contains` method of `self` and will use the
rules of the particular type - be it a `Vector`, `Text` or any custom type
implementing a method `contains : a -> Boolean`.
> Example
Assert that a string contains a substring.
from Standard.Test import Test
example_should_equal = "foobar".should_contain "foo"
Error.should_contain : Any -> Integer -> Spec_Result
Error.should_contain self element frames_to_skip=0 =
_ = [element]
Test.fail_match_on_unexpected_error self 1+frames_to_skip
## Asserts that `self` value does not contain an element.
Arguments:
- element: The element to check.
- frames_to_skip (optional, advanced): used to alter the location which is
displayed as the source of this error.
This method delegates to the `contains` method of `self` and will use the
rules of the particular type - be it a `Vector`, `Text` or any custom type
implementing a method `contains : a -> Boolean`.
Any.should_not_contain : Any -> Integer -> Spec_Result
Any.should_not_contain self element frames_to_skip=0 =
loc = Meta.get_source_location 1+frames_to_skip
contains_result = Panic.catch No_Such_Method (self.contains element) caught_panic->
if caught_panic.payload.method_name != "contains" then Panic.throw caught_panic else
msg = "The value (" + self.to_text + ") does not support the method `contains` (at " + loc + ")."
Test.fail msg
if contains_result.not then Spec_Result.Success else
msg = "The value (" + self.to_text + ") contained the element (" + element.to_text + "), but it was expected to not contain it (at " + loc + ")."
Test.fail msg
## Asserts that `self` value does not contain an element.
Arguments:
- element: The element to check.
- frames_to_skip (optional, advanced): used to alter the location which is
displayed as the source of this error.
This method delegates to the `contains` method of `self` and will use the
rules of the particular type - be it a `Vector`, `Text` or any custom type
implementing a method `contains : a -> Boolean`.
Error.should_not_contain : Any -> Integer -> Spec_Result
Error.should_not_contain self element frames_to_skip=0 =
_ = [element]
Test.fail_match_on_unexpected_error self 1+frames_to_skip

View File

@ -0,0 +1,45 @@
private
from Standard.Base import all
import Standard.Base.Runtime.Ref.Ref
import project.Spec.Spec
polyglot java import java.lang.StringBuilder
type Group_Builder
## PRIVATE
Arguments:
- teardown_ref: A reference to a teardown method.
Impl (builder = Vector.new_builder) (teardown_ref = Ref.new (_ -> Nothing))
## Specifies a single test.
Arguments:
- name: The name of the test spec. Should be unique within the group.
- code: A thunk that takes no arguments.
- pending: Contains a reason for why the test should be ignored. If Nothing, the test
is not ignored.
specify self (name : Text) (~code : Any) (pending : (Text | Nothing) = Nothing) =
self.builder.append <| Spec.Impl name (_ -> code) pending
## Provide a teardown of a group - a method that will be called once all the tests from the
group are finished.
Arguments:
- code: A function that does the teardown of a group. This function does not take any arguments,
and its return value is ignored.
teardown self (~code : Any) =
self.teardown_ref.put (_ -> code)
type Group
Impl (name : Text) (teardown : (Any -> Any)) (specs : Vector Spec) (pending : (Text | Nothing) = Nothing)
to_text self =
sb = StringBuilder.new
sb.append ("Group '" + self.name + "' specs=[")
self.specs.each spec->
sb.append (spec.to_text + ", ")
sb.append "]"
sb.toString

View File

@ -0,0 +1,87 @@
private
from Standard.Base import all
from Standard.Base.Runtime import State
import project.Clue.Clue
import project.Group.Group
import project.Spec_Result.Spec_Result
import project.Spec.Spec
import project.Suite.Suite
import project.Suite_Config.Suite_Config
import project.Test_Result.Test_Result
import project.Test_Reporter
import project.Test.Test
run_group_with_filter : Group -> (Regex|Text|Nothing) -> Vector Test_Result
run_group_with_filter (group : Group) (spec_filter : (Regex|Text|Nothing)) =
filtered_specs = group.specs.filter spec->
case spec_filter of
regex : Regex -> (regex.match spec.name) != Nothing
text : Text -> spec.name.contains text
Nothing -> True
run_specs_from_group filtered_specs group
run_group : Group -> Vector Test_Result
run_group (group : Group) =
run_specs_from_group group.specs group
run_specs_from_group : Vector Spec -> Text -> Vector Test_Result
run_specs_from_group (specs : Vector Spec) (group : Group) =
case specs.is_empty of
True -> []
False ->
test_results = specs.map spec->
pair = run_spec spec
spec_res = pair.second
time_taken = pair.first
Test_Result.Impl group.name spec.name spec_res time_taken
# Invoke the teardown of the group
group.teardown Nothing
test_results
## PRIVATE
run_spec : Spec -> Pair Duration Spec_Result
run_spec (spec : Spec) =
pair = case spec.pending of
Nothing -> Duration.time_execution <|
State.run Clue Nothing (execute_spec_code spec.code)
reason -> Pair.new Duration.zero (Spec_Result.Pending reason)
pair
## PRIVATE
execute_spec_code : (Nothing -> Any) -> Spec_Result
execute_spec_code spec_code =
recovery = Panic.recover Any <|
result = spec_code Nothing
result.catch Any err->
Panic.throw (Finished_With.Error err result.get_stack_trace_text)
Nothing
maybeExc = case recovery of
_ -> Spec_Result.Success
result = maybeExc.catch Any ex->
case ex of
Spec_Result.Failure _ _ -> ex
Finished_With.Error err stack_trace_text ->
Spec_Result.Failure (Test.enrich_message_with_clue ("An unexpected error was returned: " + err.to_text)) details=stack_trace_text
_ -> Spec_Result.Failure (Test.enrich_message_with_clue ("An unexpected panic was thrown: " + ex.to_text)) details=maybeExc.get_stack_trace_text
result
## PRIVATE
An error describing that a test finished with an unexpected error.
type Finished_With
## PRIVATE
An error describing that a test finished with an unexpected error.
Arguments:
- err: The payload of the error that triggered this error.
- stack_trace_text: A textual representation of the stack trace for the
error.
Error err stack_trace_text

View File

@ -0,0 +1,9 @@
import project.Suite.Suite
import project.Test.Test
import project.Problems
from project.Extensions import all
export project.Suite.Suite
export project.Test.Test
export project.Problems
from project.Extensions export all

View File

@ -0,0 +1,135 @@
from Standard.Base import all
from project import Test
from project.Extensions import all
## Returns values of warnings attached to the value.
get_attached_warnings v =
Warning.get_all v . map .value
## UNSTABLE
Tests how a specific operation behaves depending on the requested
`Problem_Behavior`.
Arguments:
- action: The action to execute. It takes a `Problem_Behavior` which
specifies whether it should ignore problems, report them as warnings or
raise a dataflow error on the first encountered problem.
- expected_problems: a list of expected problems, in the order that they are
expected to be reported. It should not be empty. The problems are assumed
to be Atoms.
- result_checker: A function which should verify that the result generated by
the action is correct. It does not return anything, instead it should use
the standard testing approach, like `x.should_equal y`.
test_problem_handling : (Problem_Behavior -> Any) -> Vector Any -> (Any -> Nothing) -> Nothing
test_problem_handling action expected_problems result_checker =
error_checker error_result =
first_problem = expected_problems.first
first_problem_type = Meta.type_of first_problem
error_result . should_fail_with first_problem_type frames_to_skip=3
error_result.catch . should_equal first_problem frames_to_skip=3
warnings_checker warnings =
## TODO [RW] we are not checking if there are no duplicate warnings, because the warnings are in fact duplicated - we should figure out how to handle that and then possibly modify the test
Test.with_clue "The warnings were "+warnings.to_text+'.\n' <|
warnings . should_contain_the_same_elements_as expected_problems frames_to_skip=5
test_advanced_problem_handling action error_checker warnings_checker result_checker frames_to_skip=1
## UNSTABLE
Tests how a specific operation behaves depending on the requested
`Problem_Behavior`. A variant that allows more customization over how
expected problems are checked.
Arguments:
- action: The action to execute. It takes a `Problem_Behavior` which
specifies whether it should ignore problems, report them as warnings or
raise a dataflow error on the first encountered problem.
- error_checker: A function which should verify that the returned error is as
expected.
- warnings_checker: A function which should verify that the returned warnings
are as expected.
- result_checker: A function which should verify that the result generated by
the action is correct. It does not return anything, instead it should use
the standard testing approach, like `x.should_equal y`.
test_advanced_problem_handling : (Problem_Behavior -> Any) -> (Any -> Nothing) -> (Vector Any -> Nothing) -> (Any -> Nothing) -> Integer -> Nothing
test_advanced_problem_handling action error_checker warnings_checker result_checker frames_to_skip=0 =
# First, we check the action ignoring any warnings.
result_ignoring = action Problem_Behavior.Ignore
result_checker result_ignoring
get_attached_warnings result_ignoring . should_equal [] frames_to_skip=frames_to_skip+1
# Then, we check the fail-on-first-error mode.
error_result = action Problem_Behavior.Report_Error
error_checker error_result
# Lastly, we check the report warnings mode and ensure that both the result is correct and the warnings are as expected.
result_warning = action Problem_Behavior.Report_Warning
result_checker result_warning
warnings_checker (get_attached_warnings result_warning)
## UNSTABLE
Checks if the provided value does not have any attached problems.
assume_no_problems result =
loc = Meta.get_source_location 1
if result.is_error then
Test.fail "Expected the result to not be an error, but a dataflow error has been matched: "+result.catch.to_display_text+" (at "+loc+")."
warnings = get_attached_warnings result
if warnings.not_empty then
Test.fail "Expected the result to not contain any warnings, but it did: "+warnings.to_text+" (at "+loc+")."
## UNSTABLE
Checks if the provided value has a specific warning attached.
It allows other warnings to be present also.
Arguments:
- expected_warning: The expected warning. It can either by a warning type or
a concrete value.
- result: The value to check.
expect_warning : Any -> Any -> Nothing
expect_warning expected_warning result =
loc = Meta.get_source_location 1
if result.is_error then
Test.fail "Expected a warning "+expected_warning.to_text+", but a dataflow error has been matched: "+result.catch.to_display_text+" (at "+loc+")."
warnings = get_attached_warnings result
found = warnings.find if_missing=Nothing x->
(x == expected_warning) || (x.is_a expected_warning)
found.if_nothing <|
Test.fail "Expected the result to contain a warning: "+expected_warning.to_text+", but it did not. The warnings were "+warnings.short_display_text+' (at '+loc+').'
## UNSTABLE
Checks if the provided value has a specific warning attached and if there are
no other warnings.
As a utility, it also returns the found warning.
Arguments:
- expected_warning: The expected warning. It can either by a warning type or
a concrete value.
- result: The value to check.
expect_only_warning : Any -> Any -> Any
expect_only_warning expected_warning result =
loc = Meta.get_source_location 1
if result.is_error then
Test.fail "Expected only warning "+expected_warning.to_text+", but a dataflow error has been matched: "+result.catch.to_display_text+" (at "+loc+")."
warnings = get_attached_warnings result
is_expected x =
(x == expected_warning) || (x.is_a expected_warning)
found = warnings.find if_missing=Nothing is_expected
if found.is_nothing then
Test.fail "Expected the result to contain a warning: "+expected_warning.to_text+", but it did not. The warnings were "+warnings.short_display_text+' (at '+loc+').'
invalid = warnings.filter x-> is_expected x . not
if invalid.not_empty then
Test.fail "Expected the result to contain only the warning: "+found.to_text+", but it also contained: "+invalid.to_text+' (at '+loc+').'
found
## UNSTABLE
Checks if the provided value does _not_ have a warning of the specified type.
It allows other warnings to be present also.
not_expect_warning : Any -> Any -> Nothing
not_expect_warning expected_warning_type result =
warnings = get_attached_warnings result
found = warnings.find if_missing=Nothing x-> x.is_a expected_warning_type
if found.is_nothing.not then
loc = Meta.get_source_location 3
Test.fail 'The result contained a warning it was not supposed to: '+found.to_text+' (at '+loc+').'

View File

@ -0,0 +1,12 @@
private
from Standard.Base import all
type Spec
Impl (name : Text) (code : Any -> Any) (pending : (Nothing | Text))
to_text self =
pend = if self.pending.is_nothing then "" else
"pending = " + self.pending
"Spec '" + self.name + "' " + pend

View File

@ -0,0 +1,39 @@
private
from Standard.Base import all
## Result of one test Spec
type Spec_Result
## Represents a successful behavioral test.
Success
## Represents a failing behavioral test.
Arguments:
- message: The reason why the test failed.
- details: Additional context of the error, for example the stack trace.
Failure message details=Nothing
## Represents a pending behavioral test.
Arguments:
- reason: Text describing why the test is pending.
Pending reason
## Checks if the Spec_Result is pending.
is_pending : Boolean
is_pending self = case self of
Spec_Result.Pending _ -> True
_ -> False
## Checks if the Spec_Result is a failure.
is_fail : Boolean
is_fail self = case self of
Spec_Result.Failure _ _ -> True
_ -> False
## Checks if the Spec_Result is a success.
is_success : Boolean
is_success self = case self of
Spec_Result.Success -> True
_ -> False

View File

@ -0,0 +1,96 @@
from Standard.Base import all
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
import Standard.Base.Runtime.State
import project.Group.Group
import project.Group.Group_Builder
import project.Helpers
import project.Suite_Config.Suite_Config
import project.Test_Reporter
polyglot java import java.lang.StringBuilder
type Suite_Builder
## PRIVATE
Arguments:
- builder: Vector builder that holds all the groups.
Impl builder
## Add a group to the suite builder.
Arguments:
- name: Name of the group. When collecting multiple groups into a single suite, all the
group names must be unique. Otherwise, a panic is thrown.
- fn: A callback function that takes `Group_Builder` as an argument. You can call the
`specify` method on group builder. See its docs.
- pending: Contains a reason for why the test group should be ignored. If Nothing, the test
is not ignored.
group : Text -> (Group_Builder -> Any) -> Nothing
group self (name:Text) (fn : (Group_Builder -> Any)) (pending : (Text | Nothing) = Nothing) =
group_builder = Group_Builder.Impl
fn group_builder
group = Group.Impl name group_builder.teardown_ref.get group_builder.builder.to_vector pending
self.builder.append group
## A testing suite that holds all the groups with test specs.
Users are expected to build the suite via `Suite_Builder`. That is, via `Test.build` method.
type Suite
## PRIVATE
Impl (groups : Vector Group)
## Run only tests that match the provided filters.
A filter can be either Regex, Text, or Nothing. If Nothing, it matches all the names.
If Text, it matches only names that contains that text as a substring.
If a Regex, the `regex.match` method is invoked for every name.
Note that you can specify the regex such that it matches, e.g., exactly two test spec names
via something like `Regex.compile "(name1|name2)"`.
Arguments:
- group_filter: Filter for group names.
- spec_filter: Filter for spec names.
- should_exit: If true, executes `System.exit` at the end.
run_with_filter : (Regex | Text | Nothing) -> (Regex | Text | Nothing) -> Nothing
run_with_filter self group_filter=Nothing spec_filter=Nothing should_exit=True =
config = Suite_Config.from_environment
filtered_groups = self.groups.filter group->
group_name_matches = case group_filter of
regex : Regex -> (regex.match group.name) != Nothing
text : Text -> group.name.contains text
Nothing -> True
case group_name_matches of
False -> False
True -> group.pending == Nothing
junit_sb_builder = if config.should_output_junit then StringBuilder.new else Nothing
all_results = Test_Reporter.wrap_junit_testsuites config junit_sb_builder <|
filtered_groups.flat_map group->
results = Helpers.run_group_with_filter group spec_filter
Test_Reporter.print_report results config junit_sb_builder
results
succ_tests = all_results.filter (_.is_success) . length
failed_tests = all_results.filter (_.is_fail) . length
skipped_tests = all_results.filter (_.is_pending) . length
case should_exit of
True ->
IO.println <| succ_tests.to_text + " tests succeeded."
IO.println <| failed_tests.to_text + " tests failed."
IO.println <| skipped_tests.to_text + " tests skipped."
exit_code = if failed_tests > 0 then 1 else 0
System.exit exit_code
False ->
failed_tests == 0
group_names self =
self.groups.map (_.name)
print_all self =
IO.println "Test Suite:"
self.groups.each group->
IO.println <| " Group '" + group.name + "':"
group.specs.each spec->
IO.println <| " " + spec.name

View File

@ -0,0 +1,66 @@
private
from Standard.Base import all
import Standard.Base.Runtime.Source_Location.Source_Location
import Standard.Base.Runtime.Stack_Trace_Element
polyglot java import java.lang.NullPointerException
## PRIVATE
find_project_root : File -> File
find_project_root path =
if path.is_nothing then Nothing else
handler _ = Nothing
Panic.catch NullPointerException handler=handler <|
if path.name == "src" then path.parent else
@Tail_Call find_project_root path.parent
## PRIVATE
find_caller_script : Vector Stack_Trace_Element -> File
find_caller_script stack =
find_main idx =
if stack.at idx . name . split "." . last == "main" then idx else
@Tail_Call find_main (idx + 1)
main_index = find_main 0
find_caller idx =
source = stack.at idx . source_location
case source of
_ : Source_Location -> stack.at idx . source_location . file
_ ->
if (idx + 1 == stack.length) then Nothing else
@Tail_Call find_caller (idx + 1)
find_caller main_index
## Holds configuration for a Test_Suite
type Suite_Config
## PRIVATE
Construct a configuration
Arguments:
- output_path: The path to the JUnit XML file to write to. If Nothing, no JUnit XML file
will be written.
Value (print_only_failures : Boolean) (output_path : (File | Nothing)) (use_ansi_colors : Boolean)
## Creates an Suite_Config based off environment and caller location
from_environment : Suite_Config
from_environment =
print_only_failures = Environment.get "REPORT_ONLY_FAILED" != Nothing
junit_folder = Environment.get "ENSO_TEST_JUNIT_DIR"
use_ansi_colors = Environment.get "ENSO_TEST_ANSI_COLORS" . is_nothing . not
results_path = if junit_folder.is_nothing then Nothing else
caller_script = find_caller_script Runtime.get_stack_trace
project_root = find_project_root caller_script
case project_root.is_error || project_root.is_nothing of
True ->
IO.println "Unable to determine root project path. JUnit output disabled."
Nothing
False ->
(File.new junit_folder) / project_root.name / "JUnit.xml"
Suite_Config.Value print_only_failures results_path use_ansi_colors
## Should the results be written to JUnit XML file.
should_output_junit self =
self.output_path.is_nothing.not

View File

@ -0,0 +1,150 @@
from Standard.Base import all
from Standard.Base.Runtime import State
import project.Test_Result.Test_Result
import project.Clue.Clue
import project.Group.Group
import project.Suite.Suite
import project.Suite.Suite_Builder
import project.Spec.Spec
import project.Spec_Result.Spec_Result
## Contains only static methods
type Test
## Construct a Test Suite object
build : (Suite_Builder -> Any) -> Suite
build fn =
b = Vector.new_builder
fn (Suite_Builder.Impl b)
groups_vec = b.to_vector
Suite.Impl groups_vec
## Expect a function to fail with the provided panic.
Arguments:
- action: The action to evaluate that is expected to fail with a panic.
- matcher: The expected type of the panic thrown by `action`.
> Example
Expect that a computation should panic as part of a test.
import Standard.Examples
from Standard.Test import Test
example_expect_panic_with =
Test.expect_panic_with Examples.throw_panic Examples.My_Error
expect_panic_with : Any -> Any -> Test_Result
expect_panic_with ~action matcher =
res = Panic.recover Any action
case res of
_ ->
loc = Meta.get_source_location 2
return_suffix = if res.is_nothing then "" else "and returned ["+res.to_text+"]"
Test.fail ("Expected a " + matcher.to_text + " to be thrown, but the action succeeded " + return_suffix + " (at "+loc+").")
err = res.catch
if err.is_a matcher then Nothing else
Test.fail ("Expected a " + matcher.to_text + ", but " + err.to_text + " was thrown instead.")
## Expect a function to fail with the provided panic.
An alternative API to `expect_panic_with` where the order of arguments is
more natural - as it allows blocks without reordering the arguments.
Arguments:
- matcher: The expected type of the panic thrown by `action`.
- action: The action to evaluate that is expected to fail with a panic.
> Example
Expect that a computation should panic as part of a test.
import Standard.Examples
from Standard.Test import Test
example_expect_panic_with =
Test.expect_panic_with Examples.My_Error <|
IO.println 'hello'
Examples.throw_panic
IO.println 'this is not reached'
expect_panic : Any -> Any -> Test_Result
expect_panic matcher ~action = Test.expect_panic_with action matcher
## Checks that the provided action returns without any errors or warnings.
If you just want to check for errors, usage of the `.should_succeed`
extension function is preferred.
assert_no_problems value frames_to_skip=0 =
value.catch Any _->
Test.fail_match_on_unexpected_error value 2+frames_to_skip
warnings = Warning.get_all value . map .value
if warnings.not_empty then
loc = Meta.get_source_location 2+frames_to_skip
msg = "The action returned unexpected warnings: " + warnings.to_text + " (at " + loc + ")."
Test.fail msg
## Fail a test with the given message.
Arguments:
- message: The message printed when failing the test.
> Example
Failing a test manually.
from Standard.Test import Test
example_fail = Test.fail "Something went wrong."
fail : Text -> Nothing|Text -> Spec_Result
fail message details=Nothing =
failure = Spec_Result.Failure (Test.enrich_message_with_clue message) details
Panic.throw failure
## PRIVATE
enrich_message_with_clue : Text -> Text
enrich_message_with_clue message =
case State.get Clue of
Clue.Value add_clue -> add_clue message
_ -> message
## PRIVATE
Reports an unexpected dataflow error has occurred.
fail_match_on_unexpected_error : Error -> Integer -> Nothing
fail_match_on_unexpected_error error frames_to_skip =
payload = error.catch
loc = Meta.get_source_location 1+frames_to_skip
msg = "An unexpected dataflow error (" + payload.to_text + ") has been matched (at " + loc + ")."
Test.fail msg+'\n'+error.get_stack_trace_text
## Executes the block of code passed as behavior and adds a clue modifier which
changes how assertion failures are reported.
Nesting with_clue invocations results in clue aggregation.
Arguments:
- clue: either a text which gets prepended to the failure or a function which transforms the failure message
- behavior: the behavior to test
> Example
Add a clue to a test
from Standard.Test import Test, Test_Suite
import Standard.Test.Extensions
main = Test_Suite.run_main <|
Test.group "Tests" <|
Test.specify "some property" <|
xs = Vector.new 100 (n -> n)
xs.each x->
Test.with_clue ("["+x.to_text+"] ") <|
x . should_equal 0
with_clue : Text|(Text -> Text) -> Any -> Any
with_clue ~clue ~behavior =
add_clue x = case clue of
_ : Text -> clue + x
_ : Function -> clue x
prev_clue = State.get Clue
next_clue = case prev_clue of
Clue.Value prev_add_clue -> (x -> prev_add_clue (add_clue x))
_ -> add_clue
State.put Clue (Clue.Value next_clue)
result = behavior
State.put Clue prev_clue
result

View File

@ -0,0 +1,22 @@
from Standard.Base import all
polyglot java import org.enso.base.Environment_Utils
## ADVANCED
UNSTABLE
Runs a given action with an environment variable modified to a given value.
The environment variable is restored to its original value after the action.
The environment variable override is only visible to the Enso
`Environment.get` method, the environment as seen from a direct
`System.getenv` Java call remains unchanged.
unsafe_with_environment_override : Text -> Text -> Any -> Any
unsafe_with_environment_override key value ~action =
## This has to be done in Enso, not in Java, due to the bug: https://github.com/enso-org/enso/issues/7117
If done in Java, Enso test functions do not work correctly, because they cannot access State.
old_value = Environment_Utils.getOverride key
restore_previous =
if old_value.is_nothing then Environment_Utils.removeOverride key else Environment_Utils.setOverride key old_value
Panic.with_finalizer restore_previous <|
Environment_Utils.setOverride key value
action

View File

@ -0,0 +1,148 @@
private
from Standard.Base import all
from Standard.Base.Runtime import assert
import Standard.Base.Runtime.Context
import project.Suite_Config.Suite_Config
import project.Spec_Result.Spec_Result
import project.Test.Test
import project.Test_Result.Test_Result
polyglot java import java.lang.StringBuilder
## PRIVATE
Write the JUnit XML header.
wrap_junit_testsuites : Suite_Config -> (StringBuilder|Nothing) -> Any -> Nothing
wrap_junit_testsuites config builder ~action =
if config.should_output_junit then
builder.append '<?xml version="1.0" encoding="UTF-8"?>\n'
builder.append '<testsuites>\n'
result = action
if config.should_output_junit then
builder.append '</testsuites>\n'
Context.Output.with_enabled <|
config.output_path.parent.create_directory
builder.toString.write config.output_path
result
red text =
'\u001b[31;1m' + text + '\u001b[0m'
green text =
'\u001b[32;1m' + text + '\u001b[0m'
maybe_red_text (text : Text) (config : Suite_Config) =
if config.use_ansi_colors then (red text) else text
maybe_green_text (text : Text) (config : Suite_Config) =
if config.use_ansi_colors then (green text) else text
## Print result for a single Spec run
print_single_result : Test_Result -> Suite_Config -> Nothing
print_single_result (test_result : Test_Result) (config : Suite_Config) =
times_suffix =
times = test_result.time_taken.total_milliseconds.to_text + "ms"
"[" + times + "]"
case test_result.spec_result of
Spec_Result.Success ->
if config.print_only_failures.not then
txt = " - " + test_result.spec_name + " " + times_suffix
IO.println (maybe_green_text txt config)
Spec_Result.Failure msg details ->
txt = " - [FAILED] " + test_result.spec_name + " " + times_suffix
IO.println (maybe_red_text txt config)
IO.println (" Reason: " + msg)
if details.is_nothing.not then
IO.println details
Spec_Result.Pending reason ->
if config.print_only_failures.not then
IO.println (" - [PENDING] " + test_result.spec_name)
IO.println (" Reason: " + reason)
## Prints all the results, optionally writing them to a jUnit XML output.
Arguments:
- test_results: Vector of `Test_Result`. Can be empty. Can contain results from multiple
groups.
- builder: StringBuilder or Nothing. If StringBuilder, then a jUnit XML format is appended to
that StringBuilder.
print_report : Vector Test_Result -> Suite_Config -> (StringBuilder|Nothing) -> Nothing
print_report (test_results : Vector Test_Result) (config : Suite_Config) (builder : (StringBuilder | Nothing)) =
distinct_group_names = test_results.map (_.group_name) . distinct
results_per_group = distinct_group_names.fold Map.empty acc-> group_name->
group_results = test_results.filter res->
res.group_name == group_name
assert (group_results.length > 0)
acc.insert group_name group_results
results_per_group.each_with_key group_name-> group_results->
print_group_report group_name group_results config builder
## Prints report for test_results from a single group.
Arguments:
- test_results: Test test_results from a single group
print_group_report : Text -> Vector Test_Result -> Suite_Config -> (StringBuilder|Nothing) -> Nothing
print_group_report group_name test_results config builder =
distinct_groups = test_results.distinct (res-> res.group_name)
assert (distinct_groups.length == 1)
total_time = test_results.fold Duration.zero acc-> res->
acc + res.time_taken
if config.should_output_junit then
assert builder.is_nothing.not "Builder must be specified when JUnit output is enabled"
builder.append (' <testsuite name="' + (escape_xml group_name) + '" timestamp="' + (Date_Time.now.format "yyyy-MM-dd'T'HH:mm:ss") + '"')
builder.append (' tests="' + test_results.length.to_text + '"')
builder.append (' disabled="' + test_results.filter _.is_pending . length . to_text + '"')
builder.append (' errors="' + test_results.filter _.is_fail . length . to_text + '"')
builder.append (' time="' + total_time.total_seconds.to_text + '"')
builder.append ('>\n')
test_results.each result->
builder.append (' <testcase name="' + (escape_xml result.spec_name) + '" time="' + ((result.time_taken.total_milliseconds / 1000.0).to_text) + '">')
case result.spec_result of
Spec_Result.Success -> Nothing
Spec_Result.Failure msg details ->
escaped_message = escape_xml msg . replace '\n' '&#10;'
builder.append ('\n <failure message="' + escaped_message + '">\n')
# We always print the message again as content - otherwise the GitHub action may fail to parse it.
builder.append (escape_xml msg)
if details.is_nothing.not then
## If there are additional details, we print them as well.
builder.append '\n\n'
builder.append (escape_xml details)
builder.append '\n </failure>\n'
Spec_Result.Pending msg -> builder.append ('\n <skipped message="' + (escape_xml msg) + '"/>\n ')
builder.append ' </testcase>\n'
builder.append ' </testsuite>\n'
should_print_behavior = config.print_only_failures.not || test_results.any (r -> r.is_fail)
if should_print_behavior then
tests_succeeded = test_results.fold 0 acc-> res->
if res.is_success then acc + 1 else acc
tests_failed = test_results.fold 0 acc-> res->
if res.is_fail then acc + 1 else acc
some_test_failed = tests_failed > 0
tests_executed = tests_succeeded + tests_failed
group_description =
counts = tests_succeeded.to_text + "/" + tests_executed.to_text
times = total_time.total_milliseconds.to_text + "ms"
group_name + ": " + "[" + counts + ", " + times + "]"
IO.println <| case some_test_failed of
True -> maybe_red_text ("[FAILED] " + group_description) config
False -> maybe_green_text group_description config
test_results.each result->
print_single_result result config
## PRIVATE
Escape Text for XML
escape_xml : Text -> Text
escape_xml input =
input.replace '&' '&amp;' . replace '"' '&quot;' . replace "'" '&apos;' . replace '<' '&lt;' . replace '>' '&gt;'

View File

@ -0,0 +1,21 @@
from Standard.Base import all
import project.Spec_Result.Spec_Result
## A wrapper for `Spec_Result` that contains also name of the group and name of the spec.
type Test_Result
Impl (group_name : Text) (spec_name : Text) (spec_result : Spec_Result) (time_taken : Duration)
to_text self =
"'" + self.group_name + "' '" + self.spec_name + "': " + self.spec_result.to_text
is_pending self =
self.spec_result.is_pending
is_success self =
self.spec_result.is_success
is_fail self =
self.spec_result.is_fail

View File

@ -10,6 +10,7 @@ object Editions {
val standardLibraries: Seq[String] = Seq(
"Standard.Base",
"Standard.Test",
"Standard.Test_New",
"Standard.Table",
"Standard.Database",
"Standard.AWS",

View File

@ -6,8 +6,7 @@ import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
from Standard.Base.Data.Numbers import Number_Parse_Error
from Standard.Test import Test, Test_Suite
import Standard.Test.Extensions
from Standard.Test_New import all
import project.Data.Round_Spec
@ -27,7 +26,7 @@ type Complex
Complex.from (that:Number) = Complex.new that
spec =
add_specs suite_builder =
eps = 0.000001
almost_max_long = 9223372036854775806
almost_max_long_times_three = 27670116110564327418
@ -36,61 +35,60 @@ spec =
hundred_factorial = 93326215443944152681699238856266700490715968264381621468592963895217599993229915608941463976156518286253697920827223758251185210916864000000000000000000000000
very_negative = -99223372036854775808
Round_Spec.spec "Number " .round
suite_builder.group "Integers" group_builder->
Round_Spec.add_specs group_builder (.round)
Test.group "Integers" <|
Test.specify "should be of unbound size when multiplied" <|
group_builder.specify "should be of unbound size when multiplied" <|
1.up_to 101 . fold 1 (*) . should_equal hundred_factorial
Test.specify "should be of unbound size when added" <|
group_builder.specify "should be of unbound size when added" <|
(almost_max_long + almost_max_long + almost_max_long).should_equal almost_max_long_times_three
Test.specify "should be of unbound size when subtracted" <|
group_builder.specify "should be of unbound size when subtracted" <|
(0 - almost_max_long - almost_max_long - almost_max_long).should_equal almost_max_long_times_three.negate
Test.specify "should be of unbound size when dividing" <|
group_builder.specify "should be of unbound size when dividing" <|
expected = 3372816184472482867110284450043137767873196479305249187406461598235841786750685581361224832688174410089430537516012695688121622150430744676
((1.up_to 101 . fold 1 (*)).div 3*almost_max_long).should_equal expected
Test.specify "should be of unbound size when taking remainder" <|
group_builder.specify "should be of unbound size when taking remainder" <|
expected = 3191479909175673432
((1.up_to 101 . fold 1 (*)) % 3*almost_max_long).should_equal expected
Test.specify "should allow defining extension methods through the Integer type for any number size" <|
group_builder.specify "should allow defining extension methods through the Integer type for any number size" <|
876543.is_even.should_be_false
(1.up_to 101 . fold 1 (*)).is_even.should_be_true
Test.specify "should handle the negation edge cases" <|
group_builder.specify "should handle the negation edge cases" <|
x = 9223372036854775808
y = -x
z = -9223372036854775808
y.should_equal z
Test.specify "should handle equality between small and big integers" <|
group_builder.specify "should handle equality between small and big integers" <|
(1 == hundred_factorial).should_be_false
(hundred_factorial == 1).should_be_false
Test.specify "should properly handle going to big numbers and back" <|
group_builder.specify "should properly handle going to big numbers and back" <|
((almost_max_long * 3) / 3) . should_equal almost_max_long
Test.specify "should use floating point arithmetic for division" <|
group_builder.specify "should use floating point arithmetic for division" <|
(3 / 4) . should_equal 0.75 epsilon=eps
(almost_max_long * 2 / almost_max_long_times_three) . should_equal 0.6666666 epsilon=eps
Test.specify "should support integer division" <|
group_builder.specify "should support integer division" <|
(10.div 3) . should_equal 3
(10.div 0).should_fail_with Arithmetic_Error
Test.specify "should support integral binary literals" <|
group_builder.specify "should support integral binary literals" <|
lit = 2_01101101
lit . should_equal 109
Test.specify "should support integral hexadecimal literals" <|
group_builder.specify "should support integral hexadecimal literals" <|
lit = 16_6D
lit . should_equal 109
Test.specify "should support bitwise and" <|
group_builder.specify "should support bitwise and" <|
left = 2_01101101
right = 2_11000100
big_left = 16_17ffffffffffffffa
@ -100,7 +98,7 @@ spec =
big_left.bit_and right . should_equal 2_11000000
big_left.bit_and big_right . should_equal 16_17ffffffffffffff8
Test.specify "should support bitwise or" <|
group_builder.specify "should support bitwise or" <|
left = 2_01101101
right = 2_11000100
big_left = 16_17ffffffffffffffa
@ -110,7 +108,7 @@ spec =
big_left.bit_or right . should_equal 16_17ffffffffffffffe
big_left.bit_or right . should_equal 16_17ffffffffffffffe
Test.specify "should support bitwise exclusive or" <|
group_builder.specify "should support bitwise exclusive or" <|
left = 2_01101101
right = 2_11000100
big_left = 16_17ffffffffffffffa
@ -120,7 +118,7 @@ spec =
big_left.bit_xor right . should_equal 16_17fffffffffffff3e
big_left.bit_xor big_right . should_equal 2_00000110
Test.specify "should support bitwise negation" <|
group_builder.specify "should support bitwise negation" <|
bits = 2_01101101
big_bits = 16_17ffffffffffffffa
bits.bit_not . should_equal -2_01101110
@ -128,7 +126,7 @@ spec =
big_bits.bit_not . should_equal -16_17ffffffffffffffb
big_bits.bit_not.bit_not . should_equal big_bits
Test.specify "should support left bit shifts" <|
group_builder.specify "should support left bit shifts" <|
positive_bits = 2_01101101
negative_bits = -2_01101101
positive_big_bits = almost_max_long_times_three
@ -162,7 +160,7 @@ spec =
(negative_big_bits.bit_shift_l positive_big_bits).should_fail_with Arithmetic_Error
negative_big_bits.bit_shift_l negative_big_bits . should_equal -1
Test.specify "should support right bit shifts, preserving sign" <|
group_builder.specify "should support right bit shifts, preserving sign" <|
positive_bits = 2_01101101
negative_bits = -2_01101101
positive_big_bits = almost_max_long_times_three
@ -196,7 +194,7 @@ spec =
(negative_big_bits.bit_shift_r negative_big_bits).should_fail_with Arithmetic_Error
negative_big_bits.bit_shift_r positive_big_bits . should_equal -1
Test.specify "should be able to parse" <|
group_builder.specify "should be able to parse" <|
Integer.parse "1245623" . should_equal 1245623
Integer.parse "012345" . should_equal 12345
Integer.parse "-1234567" . should_equal -1234567
@ -206,17 +204,17 @@ spec =
Integer.parse "123A" . should_fail_with Number_Parse_Error
Integer.parse "aaaa" . should_fail_with Number_Parse_Error
Test.specify "should parse hundred factorial" <|
group_builder.specify "should parse hundred factorial" <|
txt = hundred_factorial.to_text
number = Integer.parse txt
number . should_equal hundred_factorial
Test.specify "should fail on too huge decimal" <|
group_builder.specify "should fail on too huge decimal" <|
txt = hundred_factorial.to_text + ".345"
number = Integer.parse txt
number . should_fail_with Number_Parse_Error
Test.specify "should be able to parse alternate bases" <|
group_builder.specify "should be able to parse alternate bases" <|
Integer.parse "1245623" 8 . should_equal 347027
Integer.parse "-1245623" 8 . should_equal -347027
Integer.parse "0001245623" 8 . should_equal 347027
@ -235,22 +233,22 @@ spec =
Integer.parse "-101021010" 2 . should_fail_with Number_Parse_Error
Integer.parse "123" 128 . should_fail_with Number_Parse_Error
Test.specify "should be able to invoke methods on Integer via static method call" <|
group_builder.specify "should be able to invoke methods on Integer via static method call" <|
Integer.+ 1 2 . should_equal 3
Integer.+ 1 2.5 . should_equal 3.5
Test.expect_panic_with (Integer.+ 1.5 1) Type_Error
Test.expect_panic_with (Integer.+ 1.5 2.5) Type_Error
Test.expect_panic_with (Integer.+ 1 "hello") Type_Error
Test.group "Floats" <|
suite_builder.group "Floats" group_builder->
Test.specify "should exist and expose basic arithmetic operations" <|
group_builder.specify "should exist and expose basic arithmetic operations" <|
((1.5 + 1.5)*1.3 / 2 - 3) . should_equal -1.05 epsilon=eps
Test.specify "should allow defining extension methods through the Float type" <|
group_builder.specify "should allow defining extension methods through the Float type" <|
32.5.get_fun_factor.should_equal "Wow, 32.5 is such a fun number!"
Test.specify "should be able to be parsed" <|
group_builder.specify "should be able to be parsed" <|
Float.parse "32.5" . should_equal 32.5
Float.parse "0122.5" . should_equal 122.5
Float.parse "-98.5" . should_equal -98.5
@ -258,7 +256,7 @@ spec =
Float.parse "000000.0001" . should_equal 0.0001
Float.parse "aaaa" . should_fail_with Number_Parse_Error
Test.specify "parse with locale" <|
group_builder.specify "parse with locale" <|
l = Locale.new "cs"
Float.parse "32,5" l . should_equal 32.5
Float.parse "0122,5" l . should_equal 122.5
@ -267,21 +265,21 @@ spec =
Float.parse "000000,0001" l . should_equal 0.0001
Float.parse "aaaa" l . should_fail_with Number_Parse_Error
Test.specify "decimal should parse hundred factorial well" <|
group_builder.specify "decimal should parse hundred factorial well" <|
txt = hundred_factorial.to_text + ".345"
decimal = Float.parse txt
is_huge = decimal > (hundred_factorial / 5)
is_huge . should_equal True
Test.group "Numbers" <|
suite_builder.group "Numbers" group_builder->
Test.specify "should define addition" <|
group_builder.specify "should define addition" <|
2+3 . should_equal 5
Test.specify "should define multiplication" <|
group_builder.specify "should define multiplication" <|
2*3 . should_equal 6
Test.specify "should define modulo for integers and decimals" <|
group_builder.specify "should define modulo for integers and decimals" <|
5%3 . should_equal 2
5%3 . should_be_a Integer
@ -318,7 +316,7 @@ spec =
hundred_factorial%hundred_factorial . should_equal 0
10%hundred_factorial . should_equal 10
Test.specify "should support less than operator" <|
group_builder.specify "should support less than operator" <|
(1 < 2).should_be_true
(1 < 1).should_be_false
(1 < 0).should_be_false
@ -342,7 +340,7 @@ spec =
(very_negative < hundred_factorial).should_be_true
(very_negative < Nothing).should_fail_with Incomparable_Values
Test.specify "should support less than or equal to operator" <|
group_builder.specify "should support less than or equal to operator" <|
(1 <= 2).should_be_true
(1 <= 1).should_be_true
(1 <= 0).should_be_false
@ -366,7 +364,7 @@ spec =
(very_negative <= hundred_factorial).should_be_true
(very_negative <= Nothing).should_fail_with Incomparable_Values
Test.specify "should support greater than operator" <|
group_builder.specify "should support greater than operator" <|
(1 > 2).should_be_false
(1 > 1).should_be_false
(1 > 0).should_be_true
@ -390,7 +388,7 @@ spec =
(very_negative > hundred_factorial).should_be_false
(very_negative > Nothing).should_fail_with Incomparable_Values
Test.specify "should support greater than or equal to operator" <|
group_builder.specify "should support greater than or equal to operator" <|
(1 >= 2).should_be_false
(1 >= 1).should_be_true
(1 >= 0).should_be_true
@ -414,7 +412,7 @@ spec =
(very_negative >= hundred_factorial).should_be_false
(very_negative >= Nothing).should_fail_with Incomparable_Values
Test.specify "should be ordered by the default comparator" <|
group_builder.specify "should be ordered by the default comparator" <|
Ordering.compare 1 2 . should_equal Ordering.Less
Ordering.compare 1 1 . should_equal Ordering.Equal
Ordering.compare 1 0 . should_equal Ordering.Greater
@ -438,7 +436,7 @@ spec =
Ordering.compare very_negative hundred_factorial . should_equal Ordering.Less
Ordering.compare very_negative Nothing . should_fail_with Incomparable_Values
Test.specify "should expose exponentiation operations" <|
group_builder.specify "should expose exponentiation operations" <|
(3.14 ^ 2.71).should_equal 22.216689546 epsilon=eps
(3.14 ^ 14).should_equal 9057640.36635 epsilon=eps
a = almost_max_long
@ -447,26 +445,26 @@ spec =
(2 ^ 0.5).should_equal 1.41421356237 epsilon=eps
(a^2)^0.5 . should_equal a epsilon=eps
Test.specify "should expose more involved mathematical functions" <|
group_builder.specify "should expose more involved mathematical functions" <|
Math.pi.sin.should_equal 0 epsilon=eps
(Math.pi / 4).sin.should_equal (2 ^ 0.5)/2 epsilon=eps
(Math.pi / 6).cos.should_equal (3.sqrt / 2) epsilon=eps
(17 ^ 0.13).log base=17 . should_equal 0.13 epsilon=eps
0.exp.should_equal 1
Test.specify "should allow calculating the floor value" <|
group_builder.specify "should allow calculating the floor value" <|
1.2314.floor . should_equal 1
1.floor . should_equal 1
almost_max_long_times_three_decimal.floor.to_float . should_equal almost_max_long_times_three.to_float
almost_max_long_times_three.floor . should_equal almost_max_long_times_three
Test.specify "should allow calculating the ceil value" <|
group_builder.specify "should allow calculating the ceil value" <|
1.2314.ceil . should_equal 2
1.ceil . should_equal 1
almost_max_long_times_three_decimal.ceil.to_float . should_equal almost_max_long_times_three_plus_1.to_float
almost_max_long_times_three_plus_1.ceil . should_equal almost_max_long_times_three_plus_1
Test.specify "should expose a NaN value" <|
group_builder.specify "should expose a NaN value" <|
Number.nan.is_nan . should_be_true
0.is_nan . should_be_false
Number.positive_infinity.is_nan . should_be_false
@ -481,7 +479,7 @@ spec =
Number.nan==0 . should_be_false
Number.nan!=Number.nan . should_be_true
Test.specify "should support inexact equality comparisons" <|
group_builder.specify "should support inexact equality comparisons" <|
1.0001 . equals 1.0002 epsilon=0.01 . should_be_true
1.0001 . equals 1.0002 epsilon=0.0000001 . should_be_false
@ -499,14 +497,14 @@ spec =
Number.nan . equals Number.nan . should_fail_with Incomparable_Values
Number.nan . equals 0 . should_fail_with Incomparable_Values
Test.group "rounding" <|
Test.specify "Returns the correct type" <|
suite_builder.group "rounding" group_builder->
group_builder.specify "Returns the correct type" <|
231 . round 1 . should_be_a Integer
231 . round 0 . should_be_a Integer
231 . round . should_be_a Integer
231 . round -1 . should_be_a Integer
Test.specify "Input out of range" <|
group_builder.specify "Input out of range" <|
100000000000000 . round -2 . should_fail_with Illegal_Argument
-100000000000000 . round -2 . should_fail_with Illegal_Argument
100000000000000 . round . should_fail_with Illegal_Argument
@ -518,17 +516,17 @@ spec =
99999999999999 . round -2 . should_equal 100000000000000
-99999999999999 . round -2 . should_equal -100000000000000
Test.specify "Reject bigints before reaching the Java" <|
group_builder.specify "Reject bigints before reaching the Java" <|
922337203685477580700000 . round . should_fail_with Illegal_Argument
-922337203685477580700000 . round . should_fail_with Illegal_Argument
Test.specify "Can handle small numbers computed from bigints" <|
group_builder.specify "Can handle small numbers computed from bigints" <|
(922337203685477580712345 - 922337203685477580700000) . round . should_equal 12345
((99999999999998 * 1000).div 1000) . round . should_equal 99999999999998
Test.group "Float.truncate"
suite_builder.group "Float.truncate" group_builder->
Test.specify "Correctly converts to Integer" <|
group_builder.specify "Correctly converts to Integer" <|
0.1.truncate . should_equal 0
0.9.truncate . should_equal 0
3.1.truncate . should_equal 3
@ -538,24 +536,24 @@ spec =
-3.1.truncate . should_equal -3
-3.9.truncate . should_equal -3
Test.group "Integer.truncate"
suite_builder.group "Integer.truncate" group_builder->
Test.specify "Returns its argument" <|
group_builder.specify "Returns its argument" <|
0.truncate . should_equal 0
3.truncate . should_equal 3
-3.truncate . should_equal -3
Test.group "Number Conversions" <|
Test.specify "Returns its argument" <|
suite_builder.group "Number Conversions" group_builder->
group_builder.specify "Returns its argument" <|
v1 = (Complex.new 1 2) + (Complex.new 3)
v2 = (Complex.new 1 2) + 3
v1 . should_equal v2
Test.group "BigInts" <|
suite_builder.group "BigInts" group_builder->
expected_value = 2002115494039257055317447151023464523838443110452722331466645440244415760562579268801894716412
bigint_spec name create mul =
Test.specify name+" 6 * 7" <|
group_builder.specify name+" 6 * 7" <|
six = create 6
seven = create 7
both = six*seven
@ -565,7 +563,7 @@ spec =
seven.is_a Integer . should_be_true
both.is_a Integer . should_be_true
Test.specify name+" power of 3 with running_fold" <|
group_builder.specify name+" power of 3 with running_fold" <|
three = create 3
two_hundred = 1.up_to 200 . running_fold three a->_->
@ -575,7 +573,7 @@ spec =
sum.is_a Integer . should_be_true
sum.div two_hundred.length . should_equal expected_value
Test.specify name+" power of 3 with map" <|
group_builder.specify name+" power of 3 with map" <|
three = create 3
two_hundred = 1.up_to 200 . map i->
@ -588,7 +586,7 @@ spec =
v -> Test.fail "Expecting Integer, but got: "+(Meta.type_of v).to_text
sum.div two_hundred.length . should_equal expected_value
Test.specify name+" BigInteger to BigDecimal test" <|
group_builder.specify name+" BigInteger to BigDecimal test" <|
h = 2^70
bd1 = BigDecimal.new h 0
bd2 = BigDecimal.new h
@ -606,7 +604,7 @@ spec =
bigint_spec "Java" to_java_bigint java_bigint_mul
bigint_spec "JavaScript" to_js_bigint js_bigint_mul
Test.specify "Matrix of values" <|
group_builder.specify "Matrix of values" <|
x = to_java_bigint 10
y = 10
z = 10.0
@ -616,7 +614,10 @@ spec =
[x, y, z, w].each b->
a+b . should_equal 20
main = Test_Suite.run_main spec
main =
suite = Test.build suite_builder->
add_specs suite_builder
suite.run_with_filter
foreign js to_js_bigint n = """
return BigInt(n)

View File

@ -2,15 +2,13 @@ from Standard.Base import all
import Standard.Base.Errors.Common.Type_Error
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
import Standard.Test.Extensions
from Standard.Test import Test, Test_Suite
from Standard.Test_New import all
polyglot java import java.math.BigInteger
## PRIVATE
spec prefix round_fun =
Test.group prefix+"Rounding numeric tests" <|
Test.specify "Can round positive decimals correctly" <|
add_specs group_builder round_fun =
group_builder.specify "Can round positive decimals correctly" <|
round_fun 3.0 . should_equal 3
round_fun 3.00001 . should_equal 3
round_fun 3.3 . should_equal 3
@ -19,7 +17,7 @@ spec prefix round_fun =
round_fun 3.50001 . should_equal 4
round_fun 3.99999 . should_equal 4
Test.specify "Can round negative decimals correctly" <|
group_builder.specify "Can round negative decimals correctly" <|
round_fun -3.0 . should_equal -3
round_fun -3.00001 . should_equal -3
round_fun -3.3 . should_equal -3
@ -28,18 +26,18 @@ spec prefix round_fun =
round_fun -3.50001 . should_equal -4
round_fun -3.99999 . should_equal -4
Test.specify "Explicit and implicit 0 decimal places work the same" <|
group_builder.specify "Explicit and implicit 0 decimal places work the same" <|
round_fun 3.00001 0 . should_equal 3
round_fun 3.3 0 . should_equal 3
round_fun 3.00001 . should_equal 3
round_fun 3.3 . should_equal 3
Test.specify "Can round zero and small decimals correctly" <|
group_builder.specify "Can round zero and small decimals correctly" <|
round_fun 0.0 . should_equal 0
round_fun 0.00001 . should_equal 0
round_fun -0.00001 . should_equal 0
Test.specify "Can round positive decimals to a specified number of decimal places" <|
group_builder.specify "Can round positive decimals to a specified number of decimal places" <|
round_fun 3.0001 2 . should_equal 3.0
round_fun 3.1414 2 . should_equal 3.14
round_fun 3.1415 2 . should_equal 3.14
@ -52,7 +50,7 @@ spec prefix round_fun =
round_fun 3.1416 3 . should_equal 3.142
round_fun 3.9999 3 . should_equal 4.0
Test.specify "Can round negative decimals to a specified number of decimal places" <|
group_builder.specify "Can round negative decimals to a specified number of decimal places" <|
round_fun -3.0001 2 . should_equal -3.0
round_fun -3.1414 2 . should_equal -3.14
round_fun -3.1415 2 . should_equal -3.14
@ -65,7 +63,7 @@ spec prefix round_fun =
round_fun -3.1416 3 . should_equal -3.142
round_fun -3.9999 3 . should_equal -4.0
Test.specify "Can round positive decimals to a specified negative number of decimal places" <|
group_builder.specify "Can round positive decimals to a specified negative number of decimal places" <|
round_fun 1234.0 -1 . should_equal 1230
round_fun 1234.0 -2 . should_equal 1200
round_fun 1234.0 -3 . should_equal 1000
@ -80,7 +78,7 @@ spec prefix round_fun =
round_fun 1495.0 -2 . should_equal 1500
round_fun 1494.0 -2 . should_equal 1500
Test.specify "Can round negative decimals to a specified negative number of decimal places" <|
group_builder.specify "Can round negative decimals to a specified negative number of decimal places" <|
round_fun -1234.0 -1 . should_equal -1230
round_fun -1234.0 -2 . should_equal -1200
round_fun -1234.0 -3 . should_equal -1000
@ -95,7 +93,7 @@ spec prefix round_fun =
round_fun -1495.0 -2 . should_equal -1500
round_fun -1494.0 -2 . should_equal -1500
Test.specify "Banker's rounding handles half-way values correctly" <|
group_builder.specify "Banker's rounding handles half-way values correctly" <|
round_fun -3.5 use_bankers=True . should_equal -4
round_fun -2.5 use_bankers=True . should_equal -2
round_fun -1.5 use_bankers=True . should_equal -2
@ -115,7 +113,7 @@ spec prefix round_fun =
round_fun -12350.0 -2 use_bankers=True . should_equal -12400
round_fun -12250.0 -2 use_bankers=True . should_equal -12200
Test.specify "Banker's rounding handles non-half-way values just like normal rounding" <|
group_builder.specify "Banker's rounding handles non-half-way values just like normal rounding" <|
round_fun 3.0 use_bankers=True . should_equal 3
round_fun 3.00001 use_bankers=True . should_equal 3
round_fun 3.3 use_bankers=True . should_equal 3
@ -130,7 +128,7 @@ spec prefix round_fun =
round_fun -3.50001 . should_equal -4
round_fun -3.99999 . should_equal -4
Test.specify "Can round correctly near the precision limit" <|
group_builder.specify "Can round correctly near the precision limit" <|
round_fun 1.22222222225 10 . should_equal 1.2222222223
round_fun 1.222222222225 11 . should_equal 1.22222222223
round_fun 1.2222222222225 12 . should_equal 1.222222222223
@ -155,7 +153,7 @@ spec prefix round_fun =
round_fun -1.22222222222235 13 . should_equal -1.2222222222224
round_fun -1.222222222222235 14 . should_equal -1.22222222222224
Test.specify "Can round correctly near the precision limit, using banker's rounding" <|
group_builder.specify "Can round correctly near the precision limit, using banker's rounding" <|
round_fun 1.22222222225 10 use_bankers=True . should_equal 1.2222222222
round_fun 1.222222222225 11 use_bankers=True . should_equal 1.22222222222
round_fun 1.2222222222225 12 use_bankers=True . should_equal 1.222222222222
@ -180,15 +178,15 @@ spec prefix round_fun =
round_fun -1.22222222222235 13 use_bankers=True . should_equal -1.2222222222224
round_fun -1.222222222222235 14 use_bankers=True . should_equal -1.22222222222224
Test.specify "Decimal places out of range" <|
group_builder.specify "Decimal places out of range" <|
round_fun 3.1 16 . should_fail_with Illegal_Argument
round_fun 3.1 -16 . should_fail_with Illegal_Argument
Test.specify "Floating point imperfect representation counter-examples" <|
group_builder.specify "Floating point imperfect representation counter-examples" <|
round_fun 1.225 2 use_bankers=True . should_equal 1.22 # Actual result 1.23
round_fun 37.785 2 . should_equal 37.79
Test.specify "Can round small integers to a specified number of decimal places correctly (value is unchanged)"
group_builder.specify "Can round small integers to a specified number of decimal places correctly (value is unchanged)"
round_fun 0 . should_equal 0
round_fun 3 . should_equal 3
round_fun -3 . should_equal -3
@ -197,7 +195,7 @@ spec prefix round_fun =
round_fun 3 1 . should_equal 3
round_fun -3 1 . should_equal -3
Test.specify "Can round integers to a specified number of negative places correctly"
group_builder.specify "Can round integers to a specified number of negative places correctly"
round_fun 0 -1 . should_equal 0
round_fun 4 -1 . should_equal 0
round_fun 5 -1 . should_equal 10
@ -225,7 +223,7 @@ spec prefix round_fun =
round_fun 3098 -3 . should_equal 3000
round_fun 3101 -3 . should_equal 3000
Test.specify "Can round negative integers to a specified number of negative places correctly"
group_builder.specify "Can round negative integers to a specified number of negative places correctly"
round_fun -4 -1 . should_equal 0
round_fun -5 -1 . should_equal -10
round_fun -6 -1 . should_equal -10
@ -252,7 +250,7 @@ spec prefix round_fun =
round_fun -3098 -3 . should_equal -3000
round_fun -3101 -3 . should_equal -3000
Test.specify "Can round negative integers to a specified number of negative places with banker's rounding correctly" <|
group_builder.specify "Can round negative integers to a specified number of negative places with banker's rounding correctly" <|
round_fun 12300 -2 use_bankers=True . should_equal 12300
round_fun 12301 -2 use_bankers=True . should_equal 12300
round_fun 12330 -2 use_bankers=True . should_equal 12300
@ -281,10 +279,8 @@ spec prefix round_fun =
round_fun -12250 -2 use_bankers=True . should_equal -12200
round_fun -12251 -2 use_bankers=True . should_equal -12300
Test.specify "Handles incorrect argument types" <|
group_builder.specify "Handles incorrect argument types" <|
Test.expect_panic_with (round_fun 123 "two") Type_Error
Test.expect_panic_with (round_fun 123 use_bankers="no") Type_Error
Test.expect_panic_with (round_fun 123 use_bankers=0) Type_Error
## PRIVATE
main = Test_Suite.run_main (spec "Number " .round)

View File

@ -2,6 +2,7 @@ from Standard.Base import all
from Standard.Test import Test_Suite
import Standard.Test.Extensions
import Standard.Test_New
import project.Semantic.Any_Spec
import project.Semantic.Case_Spec
@ -89,7 +90,16 @@ import project.System.Temporary_File_Spec
import project.Random_Spec
main = Test_Suite.run_main <|
main =
# Round_Spec needs to be migrated to Test_New, as it is used also from Table_Tests, that
# are entirely migrated to Test_New. This is just a temporary workaround, until the migration
# is complete.
suite = Test_New.Test.build suite_builder->
Numbers_Spec.add_specs suite_builder
succeeded = suite.run_with_filter should_exit=False
if succeeded.not then System.exit 1
Test_Suite.run_main <|
Any_Spec.spec
Array_Spec.spec
Array_Proxy_Spec.spec
@ -127,7 +137,6 @@ main = Test_Suite.run_main <|
Meta_Location_Spec.spec
Names_Spec.spec
Equals_Spec.spec
Numbers_Spec.spec
Ordering_Spec.spec
Comparator_Spec.spec
Natural_Order_Spec.spec
@ -166,3 +175,4 @@ main = Test_Suite.run_main <|
System_Spec.spec
Random_Spec.spec
XML_Spec.spec

View File

@ -8,23 +8,41 @@ from Standard.Table import Sort_Column
from Standard.Table.Data.Aggregate_Column.Aggregate_Column import Group_By, Sum
from Standard.Table.Errors import Missing_Input_Columns, Duplicate_Output_Column_Names, Floating_Point_Equality
from Standard.Test import Test, Problems
import Standard.Test.Extensions
from Standard.Test_New import all
import project.Database.Helpers.Name_Generator
from project.Common_Table_Operations.Util import run_default_backend
polyglot java import java.lang.Long as Java_Long
main = run_default_backend spec
main = run_default_backend add_specs
spec setup =
type Data
Value ~connection
setup create_connection_fn = Data.Value <|
create_connection_fn Nothing
teardown self =
self.connection.close
add_specs suite_builder setup =
prefix = setup.prefix
table_builder = setup.table_builder
materialize = setup.materialize
create_connection_fn = setup.create_connection_func
Test.group prefix+"Table.add_row_number (common)" <|
Test.specify "should rename existing column upon a name clash" <|
suite_builder.group prefix+"Table.add_row_number (common)" group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
table_builder cols =
setup.table_builder cols connection=data.connection
group_builder.specify "should rename existing column upon a name clash" <|
t1 = table_builder [["X", ['a', 'b']], ["Y", ['c', 'd']], ["Z", [40, 20]]]
t2 = t1.add_row_number name="Y" order_by=["X"] |> materialize |> _.order_by "X"
@ -38,13 +56,13 @@ spec setup =
r3 = t1.add_row_number name="X" on_problems=Problem_Behavior.Report_Error
r3.should_fail_with Duplicate_Output_Column_Names
Test.specify "should allow to order the row numbers by some columns" <|
group_builder.specify "should allow to order the row numbers by some columns" <|
t2 = table_builder [["X", ["a", "b", "a", "a"]], ["Y", [1, 2, 3, 4]]]
t3 = t2.add_row_number order_by=["X", (Sort_Column.Name "Y" Sort_Direction.Descending)] |> materialize |> _.order_by "Y"
t3.at "Y" . to_vector . should_equal [1, 2, 3, 4]
t3.at "Row" . to_vector . should_equal [3, 4, 2, 1]
Test.specify "should allow mixing grouping with ordering and custom start and step" <|
group_builder.specify "should allow mixing grouping with ordering and custom start and step" <|
vx = ['a', 'b', 'a', 'a', 'a', 'b', 'c', 'c']
vy = [9, 8, 7, 6, 5, 4, 100, 200]
vr = [1, 2, 3, 4, 5, 6, 7, 8]
@ -56,7 +74,7 @@ spec setup =
t1.at "Y" . to_vector . should_equal vy
t1.at "Row" . to_vector . should_equal [400, 200, 300, 200, 100, 100, 100, 200]
Test.specify "should report floating point equality warning when grouping on float columns" <|
group_builder.specify "should report floating point equality warning when grouping on float columns" <|
t = table_builder [["X", [1.0, 1.5, 1.0, 2.5, 2.5]], ["row_id", [1, 2, 3, 4, 5]]]
t1 = t.add_row_number group_by=["X"] order_by=["row_id"] |> materialize |> _.order_by "row_id"
Problems.expect_warning Floating_Point_Equality t1
@ -75,7 +93,7 @@ spec setup =
Problems.expect_warning Floating_Point_Equality t5
t5.at "Row" . to_vector . should_equal [1, 1, 2, 1, 2, 3, 4]
Test.specify "should fail if columns provided in ordering/grouping do not exist" <|
group_builder.specify "should fail if columns provided in ordering/grouping do not exist" <|
t = table_builder [["X", [20, 30, 10]]]
r1 = t.add_row_number group_by=["X", "Y", "Z"] order_by=["X"]
r1.should_fail_with Missing_Input_Columns
@ -88,7 +106,7 @@ spec setup =
r3 = t.add_row_number group_by=[44] order_by=["X"]
r3.should_fail_with Missing_Input_Columns
Test.specify "will respect the row order of order_by" <|
group_builder.specify "will respect the row order of order_by" <|
t = table_builder [["X", [1, 2, 3, 4]], ["Y", [40, 30, 20, 10]]]
t1 = t.order_by "Y"
@ -98,7 +116,7 @@ spec setup =
t2.at "Y" . to_vector . should_equal [10, 20, 30, 40]
t2.at "Row" . to_vector . should_equal [1, 2, 3, 4]
Test.specify "Should work correctly after aggregation" <|
group_builder.specify "Should work correctly after aggregation" <|
t0 = table_builder [["X", ["a", "b", "a", "c"]], ["Y", [1, 2, 4, 8]]]
t1 = t0.aggregate [Group_By "X", Sum "Y"]
@ -107,8 +125,16 @@ spec setup =
t2.at "Sum Y" . to_vector . should_equal [5.0, 2.0, 8.0]
t2.at "Row" . to_vector . should_equal [1, 2, 3]
if setup.is_database.not then Test.group prefix+"Table.add_row_number (in-memory specific)" <|
Test.specify "should add a row numbering column" <|
if setup.is_database.not then suite_builder.group prefix+"Table.add_row_number (in-memory specific)" group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
table_builder cols =
setup.table_builder cols connection=data.connection
group_builder.specify "should add a row numbering column" <|
t = table_builder [["X", ['a', 'b', 'a', 'a', 'c']]]
t1 = t.add_row_number
rows = t1.rows.to_vector . map .to_vector
@ -116,7 +142,7 @@ spec setup =
t1.at "Row" . to_vector . should_equal [1, 2, 3, 4, 5]
t1.at "Row" . value_type . is_integer . should_be_true
Test.specify "should allow customizing the starting index and step" <|
group_builder.specify "should allow customizing the starting index and step" <|
t = table_builder [["X", ['a', 'b', 'a']]]
t1 = t.add_row_number from=10
t1.at "Row" . to_vector . should_equal [10, 11, 12]
@ -133,7 +159,7 @@ spec setup =
t5 = t.add_row_number from=(-1)
t5.at "Row" . to_vector . should_equal [-1, 0, 1]
Test.specify "should allow to assign row numbers separately within each group" <|
group_builder.specify "should allow to assign row numbers separately within each group" <|
t = table_builder [["X", ['a', 'a', 'a', 'a', 'b', 'b']], ["Y", [40, 30, 20, 40, 20, 10]]]
t1 = t.add_row_number group_by=["X"]
t1.at "Row" . to_vector . should_equal [1, 2, 3, 4, 1, 2]
@ -142,7 +168,7 @@ spec setup =
t3 = t2.add_row_number group_by=["X", "Y"]
t3.at "Row" . to_vector . should_equal [1, 2, 1, 2, 1, 1]
Test.specify "should allow to assign row numbers separately within scattered groups, preserving the row layout" <|
group_builder.specify "should allow to assign row numbers separately within scattered groups, preserving the row layout" <|
v = ['a', 'b', 'a', 'b', 'b', 'b', 'c', 'a']
t = table_builder [["X", v]]
t1 = t.add_row_number group_by=["X"]
@ -150,7 +176,7 @@ spec setup =
t1.at "X" . to_vector . should_equal v
t1.at "Row" . to_vector . should_equal [1, 1, 2, 2, 3, 4, 1, 3]
Test.specify "should allow mixing grouping with ordering and custom start and step, preserving the original row layout" <|
group_builder.specify "should allow mixing grouping with ordering and custom start and step, preserving the original row layout" <|
vx = ['a', 'b', 'a', 'a', 'a', 'b', 'c', 'c']
vy = [9, 8, 7, 6, 5, 4, 100, 200]
vr = [1, 2, 3, 4, 5, 6, 7, 8]
@ -163,7 +189,7 @@ spec setup =
t1.at "Y" . to_vector . should_equal vy
t1.at "Row" . to_vector . should_equal [400, 200, 300, 200, 100, 100, 100, 200]
Test.specify "should allow to order the row numbers by some columns, keeping the row ordering intact" <|
group_builder.specify "should allow to order the row numbers by some columns, keeping the row ordering intact" <|
v = [9, 8, 7, 6, 5, 4, 100, 200]
t = table_builder [["X", v]]
t1 = t.add_row_number order_by=["X"]
@ -175,7 +201,7 @@ spec setup =
t3 = t2.add_row_number order_by=["X", (Sort_Column.Name "Y" Sort_Direction.Descending)]
t3.at "Row" . to_vector . should_equal [3, 4, 2, 1]
Test.specify "will fail if the row number exceeds Long range" <|
group_builder.specify "will fail if the row number exceeds Long range" <|
max_long = Java_Long.MAX_VALUE
t1 = table_builder [["X", [1, 2, 3, 4, 5]], ["Y", [1, 2, 2, 2, 2]], ["Z", [1, 5, 3, 4, 2]]]
@ -195,10 +221,18 @@ spec setup =
t1.add_row_number from=(max_long + 10) order_by=["Z"] . should_fail_with Illegal_Argument
t1.add_row_number from=(max_long + 10) group_by=["Y"] order_by=["Z"] . should_fail_with Illegal_Argument
if setup.is_database then Test.group prefix+"Table.add_row_number (Database specific)" <|
Test.specify "will use the primary key by default" <|
if setup.is_database then suite_builder.group prefix+"Table.add_row_number (Database specific)" group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
table_builder cols =
setup.table_builder cols connection=data.connection
group_builder.specify "will use the primary key by default" <|
src = table_builder [["X", [500, 400, 30, 1, 2]], ["Y", [10, 20, 30, 40, 50]]]
db_table = src.select_into_database_table setup.connection (Name_Generator.random_name "add-row-number-test-1") temporary=True primary_key=["X"]
db_table = src.select_into_database_table data.connection (Name_Generator.random_name "add-row-number-test-1") temporary=True primary_key=["X"]
t2 = db_table.add_row_number |> materialize |> _.order_by ["Y"]
t2.at "Y" . to_vector . should_equal [10, 20, 30, 40, 50]

View File

@ -6,20 +6,36 @@ from Standard.Table.Errors import Clashing_Column_Name, Duplicate_Output_Column_
from Standard.Database.Errors import Unsupported_Database_Operation
from Standard.Test import Test, Problems
import Standard.Test.Extensions
from Standard.Test_New import all
from project.Common_Table_Operations.Util import run_default_backend
from project.Common_Table_Operations.Core_Spec import weird_names
main = run_default_backend spec
type Data
Value ~connection
spec setup =
table_builder = setup.table_builder
setup create_connection_fn =
Data.Value (create_connection_fn Nothing)
teardown self =
self.connection.close
add_specs suite_builder setup =
materialize = setup.materialize
create_connection_fn = setup.create_connection_func
is_case_sensitive = setup.test_selection.supports_case_sensitive_columns
Test.group setup.prefix+"Column Naming edge cases" <|
Test.specify "case insensitive name collisions - set" <|
suite_builder.group setup.prefix+"Column Naming edge cases" group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
table_builder cols =
setup.table_builder cols connection=data.connection
group_builder.specify "case insensitive name collisions - set" <|
t1 = table_builder [["X", [1]]]
Problems.assume_no_problems (t1.at "X" . rename "x")
t2 = t1.set "[X] + 100" "x"
@ -51,7 +67,7 @@ spec setup =
t6.at "Right X" . to_vector . should_equal [1, 1, 1, 1]
t6.at "Right x" . to_vector . should_equal [101, 101, 101, 101]
Test.specify "case insensitive name collisions - rename" <|
group_builder.specify "case insensitive name collisions - rename" <|
t1 = table_builder [["X", [1]], ["Y", [2]]]
t2 = t1.rename_columns [["X", "A"], ["Y", "a"]]
case is_case_sensitive of
@ -71,7 +87,7 @@ spec setup =
t3.column_names . should_equal ["X 1", "x"]
Problems.expect_only_warning Duplicate_Output_Column_Names t3
Test.specify "case insensitive name collisions - aggregate" <|
group_builder.specify "case insensitive name collisions - aggregate" <|
t1 = table_builder [["X", [2, 1, 3, 2]]]
t2 = t1.aggregate [Aggregate_Column.Maximum "X" "A", Aggregate_Column.Minimum "X" "a"]
@ -91,7 +107,7 @@ spec setup =
t3.at 0 . to_vector . should_equal [3]
t3.at 1 . to_vector . should_equal [1]
Test.specify "case insensitive name collisions - joins" <|
group_builder.specify "case insensitive name collisions - joins" <|
t1 = table_builder [["X", [1, 2]], ["a", [3, 4]]]
t2 = table_builder [["X", [2, 1]], ["A", [5, 6]]]
@ -118,7 +134,7 @@ spec setup =
t5 = t1.join t2 on="X" join_kind=Join_Kind.Left_Exclusive
t5.column_names . should_equal ["X", "a"]
Test.specify "case insensitive name collisions - cross_tab" <|
group_builder.specify "case insensitive name collisions - cross_tab" <|
t0 = table_builder [["X", ["a", "A", "b"]], ["Y", [4, 5, 6]]]
t1 = t0.cross_tab group_by=[] name_column="X" values=[Aggregate_Column.First "Y"] . sort_columns
case setup.is_database of
@ -136,7 +152,7 @@ spec setup =
# TODO possibly ensure a more detailed error message is included here so that the user knows the column names come from cross_tab
t1.should_fail_with Clashing_Column_Name
Test.specify "case insensitive name collisions - transpose" <|
group_builder.specify "case insensitive name collisions - transpose" <|
t0 = table_builder [["X", [1, 2, 3]], ["Y", [4, 5, 6]]]
t1 = t0.transpose attribute_column_name="a" value_column_name="A"
case setup.is_database of
@ -152,7 +168,7 @@ spec setup =
t1.column_names . should_equal ["a", "A 1"]
Problems.expect_only_warning Duplicate_Output_Column_Names t1
Test.specify "unicode-normalized-equality vs selecting columns" <|
group_builder.specify "unicode-normalized-equality vs selecting columns" <|
## In Enso column 'ś' and 's\u0301' are the same entity.
But in Databases, quite not necessarily.
t1 = table_builder [['ś', [1, 2]], ['X', ['a', 'b']]]

View File

@ -7,14 +7,13 @@ from Standard.Table.Errors import Missing_Input_Columns, Conversion_Failure
from Standard.Database.Errors import Unsupported_Database_Operation
from Standard.Test import Test, Problems
import Standard.Test.Extensions
from Standard.Test_New import all
from project.Common_Table_Operations.Util import run_default_backend
polyglot java import java.lang.Long as Java_Long
main = run_default_backend spec
main = run_default_backend add_specs
type My_Type
Value x
@ -22,14 +21,33 @@ type My_Type
to_text : Text
to_text self = "{{{MY Type [x="+self.x.to_text+"] }}}"
spec setup =
type Data
Value ~connection
setup create_connection_fn = Data.Value <|
create_connection_fn Nothing
teardown self =
self.connection.close
add_specs suite_builder setup =
prefix = setup.prefix
table_builder = setup.table_builder
materialize = setup.materialize
supports_dates = setup.test_selection.date_time
create_connection_fn = setup.create_connection_func
supports_conversion_failure_reporting = setup.is_database.not
Test.group prefix+"Table/Column.cast - to text" <|
Test.specify "should allow to cast columns of various basic types to text" <|
suite_builder.group prefix+"Table/Column.cast - to text" group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
table_builder cols =
setup.table_builder cols connection=data.connection
group_builder.specify "should allow to cast columns of various basic types to text" <|
t = table_builder [["X", [1, 2, 3000]], ["Y", [True, False, True]], ["Z", [1.5, 0.125, -2.5]], ["W", ["a", "DEF", "a slightly longer text"]]]
t2 = t.cast t.column_names Value_Type.Char
t2.at "X" . value_type . is_text . should_be_true
@ -44,7 +62,7 @@ spec setup =
t2.at "W" . to_vector . should_equal ["a", "DEF", "a slightly longer text"]
if supports_dates then
Test.specify "should allow to cast date/time columns to text" <|
group_builder.specify "should allow to cast date/time columns to text" <|
t = table_builder [["X", [Date.new 2015 1 1, Date.new 2023 12 31]], ["Y", [Time_Of_Day.new 1 2 3, Time_Of_Day.new 23 57 59]], ["Z", [Date_Time.new 2015 1 1 1 2 3, Date_Time.new 2023 11 30 22 45 44]]]
t2 = t.cast t.column_names Value_Type.Char
t2.at "X" . value_type . is_text . should_be_true
@ -61,20 +79,20 @@ spec setup =
vz.second . should_contain "22:45:44"
if setup.is_database.not then
Test.specify "should allow to cast a column of objects to text" <|
group_builder.specify "should allow to cast a column of objects to text" <|
t = table_builder [["X", [My_Type.Value 42, My_Type.Value "X"]]]
c = t.at "X" . cast Value_Type.Char
c.value_type.is_text . should_be_true
c.to_vector . should_equal ["{{{MY Type [x=42] }}}", "{{{MY Type [x=X] }}}"]
Test.specify "should allow to cast an integer column to a decimal type" <|
group_builder.specify "should allow to cast an integer column to a decimal type" <|
t = table_builder [["X", [1, 2, 3]]]
c = t.at "X" . cast Value_Type.Decimal
c.value_type.is_decimal . should_be_true
c.to_vector . should_equal [1, 2, 3]
if setup.test_selection.fixed_length_text_columns then
Test.specify "should allow to cast a text column to fixed-length" <|
group_builder.specify "should allow to cast a text column to fixed-length" <|
t = table_builder [["X", ["a", "DEF", "a slightly longer text"]]]
c = t.at "X" . cast (Value_Type.Char size=3 variable_length=False)
c.value_type . should_equal (Value_Type.Char size=3 variable_length=False)
@ -83,7 +101,7 @@ spec setup =
# No Conversion_Failure warning here, because we started with text, so it was expected we will trim it if needed.
Problems.assume_no_problems c
Test.specify "should allow to cast a text column to variable-length with a max size" <|
group_builder.specify "should allow to cast a text column to variable-length with a max size" <|
t = table_builder [["X", ["a", "DEF", "a slightly longer text"]]]
c = t.at "X" . cast (Value_Type.Char size=3 variable_length=True)
c.value_type . should_equal (Value_Type.Char size=3 variable_length=True)
@ -92,14 +110,14 @@ spec setup =
# No Conversion_Failure warning here, because we started with text, so it was expected we will trim it if needed.
Problems.assume_no_problems c
Test.specify "should allow casting a non-text column to fixed-length text" <|
group_builder.specify "should allow casting a non-text column to fixed-length text" <|
t = table_builder [["X", [1, 22, 333]]]
c = t.at "X" . cast (Value_Type.Char size=3 variable_length=False)
c.value_type . should_equal (Value_Type.Char size=3 variable_length=False)
c.to_vector . should_equal ["1 ", "22 ", "333"]
Problems.assume_no_problems c
Test.specify "should warn when losing data if the fixed-length text length is too short to fit the data" pending=(if supports_conversion_failure_reporting.not then "Conversion_Failure is not supported in Database yet.") <|
group_builder.specify "should warn when losing data if the fixed-length text length is too short to fit the data" pending=(if supports_conversion_failure_reporting.not then "Conversion_Failure is not supported in Database yet.") <|
t = table_builder [["X", [15, 1000000, 123456, 1000, 1000]]]
c1 = t.at "X" . cast (Value_Type.Char size=3 variable_length=False)
c1.value_type . should_equal (Value_Type.Char size=3 variable_length=False)
@ -115,7 +133,7 @@ spec setup =
w2 = Problems.expect_warning Conversion_Failure c2
w2.affected_rows_count . should_equal 4
Test.specify "should not allow 0-length Char type" <|
group_builder.specify "should not allow 0-length Char type" <|
c1 = table_builder [["X", ["a", "", "bcd"]]] . at "X"
r1 = c1.cast (Value_Type.Char size=0 variable_length=False)
r1.should_fail_with Illegal_Argument
@ -124,8 +142,16 @@ spec setup =
r2 = c1.cast (Value_Type.Char size=0 variable_length=True)
r2.should_fail_with Illegal_Argument
Test.group prefix+"Table/Column.cast - numeric" <|
Test.specify "should allow to cast a boolean column to integer" <|
suite_builder.group prefix+"Table/Column.cast - numeric" group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
table_builder cols =
setup.table_builder cols connection=data.connection
group_builder.specify "should allow to cast a boolean column to integer" <|
t = table_builder [["X", [True, False, True]]]
c = t.at "X" . cast Value_Type.Integer
vt = c.value_type
@ -133,14 +159,14 @@ spec setup =
vt.is_integer . should_be_true
c.to_vector . should_equal [1, 0, 1]
Test.specify "should allow to cast an integer column to floating point" <|
group_builder.specify "should allow to cast an integer column to floating point" <|
t = table_builder [["X", [1, 2, 3]]]
c = t.at "X" . cast Value_Type.Float
c.value_type.is_floating_point . should_be_true
c.to_vector . should_equal [1.0, 2.0, 3.0]
if setup.test_selection.different_size_integer_types then
Test.specify "should allow to cast an integer column to a smaller bit-width and larger bit-width" <|
group_builder.specify "should allow to cast an integer column to a smaller bit-width and larger bit-width" <|
t = table_builder [["X", [1, 2, 3]]]
c = t.at "X" . cast (Value_Type.Integer Bits.Bits_16)
c.value_type . should_equal (Value_Type.Integer Bits.Bits_16)
@ -166,7 +192,7 @@ spec setup =
c3.to_vector . should_equal [1, 2, 3]
if setup.test_selection.supports_8bit_integer then
Test.specify "should allow to cast an integer column to a byte and back" <|
group_builder.specify "should allow to cast an integer column to a byte and back" <|
t = table_builder [["X", [1, 2, 3]]]
c1 = t.at "X" . cast Value_Type.Byte
c1.value_type . should_equal Value_Type.Byte
@ -194,7 +220,7 @@ spec setup =
c3.value_type . should_equal (Value_Type.Integer Bits.Bits_32)
c3.to_vector . should_equal [1, 2, Nothing, Nothing, 0]
Test.specify "should allow to cast a floating point column to integer" <|
group_builder.specify "should allow to cast a floating point column to integer" <|
t = table_builder [["X", [1.0001, 2.25, 4.0]]]
c = t.at "X" . cast Value_Type.Integer
vt = c.value_type
@ -211,7 +237,7 @@ spec setup =
[[1, 4], [1, 5]] . should_contain v2
if setup.is_database.not then
Test.specify "should report Conversion_Failure if converting a huge float to an integer overflows it" <|
group_builder.specify "should report Conversion_Failure if converting a huge float to an integer overflows it" <|
max_long = Java_Long.MAX_VALUE
too_big_double = (max_long + 1.0) * 1000.0
(too_big_double > max_long) . should_be_true
@ -233,20 +259,28 @@ spec setup =
warning.to_display_text . should_contain too_big_double.to_text
if supports_dates then
Test.group prefix+"Table/Column.cast - date/time" <|
Test.specify "should allow to get the Date part from a Date_Time" <|
suite_builder.group prefix+"Table/Column.cast - date/time" group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
table_builder cols =
setup.table_builder cols connection=data.connection
group_builder.specify "should allow to get the Date part from a Date_Time" <|
t = table_builder [["X", [Date_Time.new 2015 1 2 3 4 5, Date_Time.new 2023 12 31 23 56 59]]]
c = t.at "X" . cast Value_Type.Date
c.value_type . should_equal Value_Type.Date
c.to_vector . should_equal [Date.new 2015 1 2, Date.new 2023 12 31]
Test.specify "should allow to get the Time_Of_Day part from a Date_Time" <|
group_builder.specify "should allow to get the Time_Of_Day part from a Date_Time" <|
t = table_builder [["X", [Date_Time.new 2015 1 2 3 4 5, Date_Time.new 2023 12 31 23 56 59]]]
c = t.at "X" . cast Value_Type.Time
c.value_type . should_equal Value_Type.Time
c.to_vector . should_equal [Time_Of_Day.new 3 4 5, Time_Of_Day.new 23 56 59]
Test.specify "should allow to convert a Date into Date_Time" <|
group_builder.specify "should allow to convert a Date into Date_Time" <|
day1 = Date.new 2015 1 2
day2 = Date.new 2023 12 31
t = table_builder [["X", [day1, day2]]]
@ -258,19 +292,27 @@ spec setup =
expected_diff = Duration.between day1.to_date_time day2.to_date_time
diff . should_equal expected_diff
Test.group prefix+"Table/Column.cast - checking invariants" <|
Test.specify "should report an error for unsupported conversions" <|
suite_builder.group prefix+"Table/Column.cast - checking invariants" group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
table_builder cols =
setup.table_builder cols connection=data.connection
group_builder.specify "should report an error for unsupported conversions" <|
t = table_builder [["X", [1, 2, 3]]]
r1 = t.at "X" . cast Value_Type.Boolean
r1.should_fail_with Illegal_Argument
Test.specify "should report an error pointing to the Table.parse method where applicable" <|
group_builder.specify "should report an error pointing to the Table.parse method where applicable" <|
t = table_builder [["X", ["1", "2", "3"]]]
r1 = t.at "X" . cast Value_Type.Integer
r1.should_fail_with Illegal_Argument
r1.to_display_text . should_contain "`parse` should be used instead"
Test.specify "should report missing columns" <|
group_builder.specify "should report missing columns" <|
t = table_builder [["X", [1, 2, 3]], ["Y", [4, 5, 6]]]
r1 = t.cast ["X", "Z"] Value_Type.Char
@ -287,7 +329,7 @@ spec setup =
t3.at "X" . to_vector . should_equal [1, 2, 3]
t3.at "Y" . to_vector . should_equal [4, 5, 6]
Test.specify "should work if the first row is NULL" <|
group_builder.specify "should work if the first row is NULL" <|
t = table_builder [["X", [Nothing, 1, 2, 3000]], ["Y", [Nothing, True, False, True]]]
c1 = t.at "X" . cast Value_Type.Char
@ -298,7 +340,7 @@ spec setup =
c2.value_type . should_equal Value_Type.Integer
c2.to_vector . should_equal [Nothing, 1, 0, 1]
Test.specify "should not lose the type after further operations were performed on the result" <|
group_builder.specify "should not lose the type after further operations were performed on the result" <|
t = table_builder [["X", [1, 2, 3000]], ["Y", [True, False, True]]]
c1 = t.at "X" . cast Value_Type.Char
c2 = t.at "Y" . cast Value_Type.Integer
@ -313,7 +355,7 @@ spec setup =
vt4.is_integer . should_be_true
c4.to_vector . should_equal [1001, 1000, 1001]
Test.specify "should not lose the type after further operations were performed on the result, even if the first row is NULL" <|
group_builder.specify "should not lose the type after further operations were performed on the result, even if the first row is NULL" <|
t = table_builder [["X", [Nothing, 1, 2, 3000]], ["Y", [Nothing, True, False, True]]]
c1 = t.at "X" . cast Value_Type.Char
c2 = t.at "Y" . cast Value_Type.Integer
@ -328,7 +370,7 @@ spec setup =
vt4.is_integer . should_be_true
c4.to_vector . should_equal [Nothing, 1001, 1000, 1001]
Test.specify 'Table.cast should cast the columns "in-place" and not reorder them' <|
group_builder.specify 'Table.cast should cast the columns "in-place" and not reorder them' <|
t = table_builder [["X", [1, 2, 3000]], ["Y", [4, 5, 6]], ["Z", [7, 8, 9]], ["A", [True, False, True]]]
t2 = t.cast ["Z", "Y"] Value_Type.Char
t2.column_names . should_equal ["X", "Y", "Z", "A"]
@ -344,14 +386,14 @@ spec setup =
t2.at "A" . to_vector . should_equal [True, False, True]
if setup.test_selection.different_size_integer_types then
Test.specify "should preserve the overridden types when materialized (Integer)" <|
group_builder.specify "should preserve the overridden types when materialized (Integer)" <|
t = table_builder [["X", [1, 2, 100]]]
t2 = t . cast "X" (Value_Type.Integer Bits.Bits_16)
t3 = materialize t2
t3.at "X" . value_type . should_equal (Value_Type.Integer Bits.Bits_16)
if setup.test_selection.fixed_length_text_columns then
Test.specify "should preserve the overridden types when materialized (Char)" <|
group_builder.specify "should preserve the overridden types when materialized (Char)" <|
t = table_builder [["Y", ["a", "abcdef", "abc"]]]
t2 = t . cast "Y" (Value_Type.Char size=3 variable_length=False)
t3 = materialize t2
@ -359,7 +401,7 @@ spec setup =
t3.at "Y" . to_vector . should_equal ["a ", "abc", "abc"]
if setup.is_database.not then
Test.specify "should allow converting a Mixed type back to a specific type" <|
group_builder.specify "should allow converting a Mixed type back to a specific type" <|
t1 = table_builder [["A", [1, Nothing, 2]], ["B", [1.5, Nothing, 2.5]], ["C", [Nothing, "x", "y"]], ["D", [Nothing, True, False]]]
m1 = t1.cast t1.column_names Value_Type.Mixed
["A", "B", "C", "D"].each c->
@ -381,7 +423,7 @@ spec setup =
m2.at "F" . cast Value_Type.Time . value_type . should_equal Value_Type.Time
m2.at "G" . cast Value_Type.Date_Time . value_type . should_equal Value_Type.Date_Time
Test.specify "will extract matching values from a mixed column and replace unconvertible ones with Nothing" <|
group_builder.specify "will extract matching values from a mixed column and replace unconvertible ones with Nothing" <|
t0 = table_builder [["X", ["a", "b", "c", My_Type.Value 42]]]
t0.at "X" . value_type . should_equal Value_Type.Mixed
r0 = t0.cast ["X"] Value_Type.Integer
@ -459,7 +501,7 @@ spec setup =
w7.affected_rows_count . should_equal 6+3+1
if setup.is_database.not then
Test.specify "should fail if there is no conversion available for a given type" <|
group_builder.specify "should fail if there is no conversion available for a given type" <|
t = table_builder [["X", [1, 2, 3]]]
# currently unsupported
@ -470,8 +512,16 @@ spec setup =
r3 = t.cast "X" (Value_Type.Unsupported_Data_Type "foobar" "foobar")
r3.should_fail_with Illegal_Argument
Test.group prefix+"Simple variant of Table/Column.parse in all backends" <|
Test.specify "should be able to parse simple integers" <|
suite_builder.group prefix+"Simple variant of Table/Column.parse in all backends" group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
table_builder cols =
setup.table_builder cols connection=data.connection
group_builder.specify "should be able to parse simple integers" <|
t = table_builder [["X", ["42", "0", "-1"]]]
c1 = t.at "X" . parse Value_Type.Integer
@ -482,7 +532,7 @@ spec setup =
c2.value_type.is_integer . should_be_true
c2.to_vector . should_equal [42, 0, -1]
Test.specify "should be able to parse simple floats" <|
group_builder.specify "should be able to parse simple floats" <|
t = table_builder [["X", ["42.5", "0.25", "-1.0"]]]
c1 = t.at "X" . parse Value_Type.Float
@ -494,7 +544,7 @@ spec setup =
c2.to_vector . should_equal [42.5, 0.25, -1.0]
if supports_dates then
Test.specify "should be able to parse dates using a default format" <|
group_builder.specify "should be able to parse dates using a default format" <|
t = table_builder [["X", ["2018-01-01", "2023-12-31"]]]
c1 = t.at "X" . parse Value_Type.Date
@ -505,7 +555,7 @@ spec setup =
c2.value_type.should_equal Value_Type.Date
c2.to_vector . should_equal [Date.new 2018 1 1, Date.new 2023 12 31]
if supports_dates.not then
Test.specify "should report that date parsing is unsupported" <|
group_builder.specify "should report that date parsing is unsupported" <|
t = table_builder [["X", ["2018-01-01", "2023-12-31"]]]
r1 = t.at "X" . parse Value_Type.Date
@ -514,7 +564,7 @@ spec setup =
r2 = t.parse ["X"] Value_Type.Date
r2.should_fail_with Unsupported_Database_Operation
Test.specify "should be able to parse booleans with default format" <|
group_builder.specify "should be able to parse booleans with default format" <|
t = table_builder [["X", ["true", "false", "true"]]]
c1 = t.at "X" . parse Value_Type.Boolean
@ -525,7 +575,7 @@ spec setup =
c2.value_type.should_equal Value_Type.Boolean
c2.to_vector . should_equal [True, False, True]
Test.specify "should report missing columns" <|
group_builder.specify "should report missing columns" <|
t = table_builder [["X", ["42", "0", "-1"]]]
t1 = t.parse ["X", "Y"] Value_Type.Integer error_on_missing_columns=False
@ -541,26 +591,58 @@ spec setup =
r3.should_fail_with Missing_Input_Columns
r3.catch.criteria . should_equal ["Y"]
if setup.is_database then Test.group prefix+"Table/Column auto value type" <|
Test.specify "should report unsupported" <|
if setup.is_database then suite_builder.group prefix+"Table/Column auto value type" group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
table_builder cols =
setup.table_builder cols connection=data.connection
group_builder.specify "should report unsupported" <|
t = table_builder [["X", [1, 2, 3]]]
t.auto_value_types . should_fail_with Unsupported_Database_Operation
t.at "X" . auto_value_type . should_fail_with Unsupported_Database_Operation
# The in-memory functionality of `expand_column` is tested in test/Table_Tests/src/In_Memory/Table_Conversion_Spec.enso
if setup.is_database then Test.group prefix+"Table.expand_column" <|
Test.specify "should report unsupported" <|
if setup.is_database then suite_builder.group prefix+"Table.expand_column" group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
table_builder cols =
setup.table_builder cols connection=data.connection
group_builder.specify "should report unsupported" <|
table = table_builder [["aaa", [1, 2]], ["bbb", [3, 4]], ["ccc", [5, 6]]]
table.expand_column "bbb" . should_fail_with Unsupported_Database_Operation
# The in-memory functionality of `expand_to_rows` is tested in test/Table_Tests/src/In_Memory/Table_Conversion_Spec.enso
if setup.is_database then Test.group prefix+"Table.expand_to_rows" <|
Test.specify "should report unsupported" <|
if setup.is_database then suite_builder.group prefix+"Table.expand_to_rows" group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
table_builder cols =
setup.table_builder cols connection=data.connection
group_builder.specify "should report unsupported" <|
table = table_builder [["aaa", [1, 2]], ["bbb", [3, 4]], ["ccc", [5, 6]]]
table.expand_to_rows "bbb" . should_fail_with Unsupported_Database_Operation
if setup.is_database.not then Test.group prefix+"Table/Column auto value type" <|
Test.specify "should allow to narrow down types of a Mixed column" <|
if setup.is_database.not then suite_builder.group prefix+"Table/Column auto value type" group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
table_builder cols =
setup.table_builder cols connection=data.connection
group_builder.specify "should allow to narrow down types of a Mixed column" <|
[True, False].each shrink_types->
mixer = My_Type.Value 1
t0 = table_builder [["strs", [mixer, "a", "b"]], ["ints", [mixer, 2, 3]], ["floats", [mixer, 1.5, 2.5]], ["mix", [1, mixer, "a"]], ["dates", [mixer, Date.new 2022, Date.new 2020]], ["datetimes", [mixer, Date_Time.new 2022 12 30 13 45, Date_Time.new 2020]], ["times", [mixer, Time_Of_Day.new 12 30, Time_Of_Day.new 13 45]], ["mixed_time", [Date.new 2022, Time_Of_Day.new 12 30, Date_Time.new 2019]], ["bools", [mixer, True, False]]]
@ -588,7 +670,7 @@ spec setup =
t2.at "mixed_time" . value_type . should_equal Value_Type.Mixed
t2.at "bools" . value_type . should_equal Value_Type.Boolean
Test.specify "will only modify selected columns" <|
group_builder.specify "will only modify selected columns" <|
mixer = My_Type.Value 1
t0 = table_builder [["strs", [mixer, "a", "b"]], ["ints", [mixer, 2, 3]], ["floats", [mixer, 1.5, 2.5]]]
t1 = t0.drop 1
@ -609,7 +691,7 @@ spec setup =
t4.at "ints" . value_type . should_equal Value_Type.Integer
t4.at "floats" . value_type . should_equal Value_Type.Float
Test.specify "will convert a Float column to Integer if all values can be represented as long" <|
group_builder.specify "will convert a Float column to Integer if all values can be represented as long" <|
t1 = table_builder [["X", [1.0, 2.0, 3.0]], ["Y", [1.0, 2.5, 3.0]], ["Z", [1.0, 2.0, (2.0^100)]]]
t1.at "X" . value_type . should_equal Value_Type.Float
t1.at "Y" . value_type . should_equal Value_Type.Float
@ -627,12 +709,12 @@ spec setup =
automatically.
t2.at "Z" . value_type . should_equal Value_Type.Float
Test.specify "will not parse text columns" <|
group_builder.specify "will not parse text columns" <|
t1 = table_builder [["X", ["1", "2", "3"]]]
c2 = t1.at "X" . auto_value_type
c2.value_type . should_equal Value_Type.Char
Test.specify "will 'undo' a cast to Mixed" <|
group_builder.specify "will 'undo' a cast to Mixed" <|
t1 = table_builder [["X", [1, 2, 3]], ["Y", ["a", "b", "c"]]]
t2 = t1.cast ["X", "Y"] Value_Type.Mixed
t2.at "X" . value_type . should_equal Value_Type.Mixed
@ -642,7 +724,7 @@ spec setup =
t3.at "X" . value_type . should_equal Value_Type.Integer
t3.at "Y" . value_type . should_equal Value_Type.Char
Test.specify "will choose Decimal type if all values are integers but cannot fit long" <|
group_builder.specify "will choose Decimal type if all values are integers but cannot fit long" <|
c0 = table_builder [["X", [My_Type.Value 42, 1, 2, (2^100)+1]]] . at "X"
c1 = c0.drop 1
@ -651,7 +733,7 @@ spec setup =
c2.value_type . should_be_a (Value_Type.Decimal ...)
c2.to_vector . should_equal [1, 2, (2^100)+1]
Test.specify "will try to find the smallest integer type to fit the value (if shrink_types=True)" <|
group_builder.specify "will try to find the smallest integer type to fit the value (if shrink_types=True)" <|
[False, True].each is_mixed->
prefix = if is_mixed then "mixed" else 0
t0 = table_builder [["X", [prefix, 1, 2, 3]], ["Y", [prefix, 2^20, 2, 3]], ["Z", [prefix, 2^50, 2, 3]], ["F", [prefix, 1.0, 2.0, 3.0]]]
@ -679,7 +761,7 @@ spec setup =
# Shrinking Floats also finds the smallest type that fits.
t3.at "F" . value_type . should_equal (Value_Type.Integer Bits.Bits_16)
Test.specify "will not return Byte columns by default, but should leave existing Byte columns intact" <|
group_builder.specify "will not return Byte columns by default, but should leave existing Byte columns intact" <|
c1 = table_builder [["X", [1, 2, 3]]] . at "X" . cast Value_Type.Byte
c1.value_type . should_equal Value_Type.Byte
@ -687,7 +769,7 @@ spec setup =
c2 = c1.auto_value_type shrink_types=shrink_types
c2.value_type . should_equal Value_Type.Byte
Test.specify "Decimal (scale=0, i.e. integer) columns should also be shrinked if possible and shrink_types=True" <|
group_builder.specify "Decimal (scale=0, i.e. integer) columns should also be shrinked if possible and shrink_types=True" <|
t0 = table_builder [["X", [2^100, 1, 2, 3]], ["Y", [10, 20, 2^100, 30]], ["Z", [1, 2, 3, 4]]] . cast "Z" (Value_Type.Decimal scale=0)
t1 = t0.drop 1
@ -708,7 +790,7 @@ spec setup =
t3.at "Y" . value_type . should_equal (Value_Type.Decimal scale=0)
t3.at "Z" . value_type . should_equal (Value_Type.Integer Bits.Bits_16)
Test.specify "if all text values have the same length, will change the type to fixed-length string (if shrink_types=True)" <|
group_builder.specify "if all text values have the same length, will change the type to fixed-length string (if shrink_types=True)" <|
[False, True].each is_mixed->
prefix = if is_mixed then 42 else "FOOBARBAZ"
c0 = table_builder [["X", [prefix, "aa", "bb", "cc"]]] . at "X"
@ -735,7 +817,7 @@ spec setup =
c6 = c4.auto_value_type shrink_types=True
c6.value_type . should_equal (Value_Type.Char size=1 variable_length=False)
Test.specify "if all text values are empty string, the type will remain unchanged" <|
group_builder.specify "if all text values are empty string, the type will remain unchanged" <|
c1 = table_builder [["X", ["", ""]]] . at "X"
c2 = c1.cast (Value_Type.Char size=100 variable_length=True)
@ -749,7 +831,7 @@ spec setup =
c2_b = c2.auto_value_type shrink_types=shrink_types
c2_b.value_type . should_equal (Value_Type.Char size=100 variable_length=True)
Test.specify "if all text values fit under 255 characters, will add a 255 length limit (if shrink_types=True)" <|
group_builder.specify "if all text values fit under 255 characters, will add a 255 length limit (if shrink_types=True)" <|
t1 = table_builder [["short_unbounded", ["a", "bb", "ccc"]], ["long_unbounded", ["a"*100, "b"*200, "c"*300]]]
t2 = t1 . set (t1.at "short_unbounded" . cast (Value_Type.Char size=1000)) "short_1000" . set (t1.at "short_unbounded" . cast (Value_Type.Char size=10)) "short_10" . set (t1.at "long_unbounded" . cast (Value_Type.Char size=400)) "long_400" . set (t1.at "short_unbounded" . cast Value_Type.Mixed) "short_mixed"
@ -774,7 +856,7 @@ spec setup =
t4.at "long_unbounded" . value_type . should_equal (Value_Type.Char size=Nothing variable_length=True)
t4.at "long_400" . value_type . should_equal (Value_Type.Char size=400 variable_length=True)
Test.specify "can deal with all-null columns" <|
group_builder.specify "can deal with all-null columns" <|
t0 = table_builder [["mix", [My_Type.Value 1, Nothing, Nothing]], ["int", [42, Nothing, Nothing]], ["str", ["a", Nothing, Nothing]], ["float", [1.5, Nothing, Nothing]], ["decimal", [2^100, 2^10, 2]]]
t1 = t0.drop 1

View File

@ -9,17 +9,22 @@ import Standard.Table.Data.Expression.Expression_Error
from Standard.Database import all
from Standard.Database.Errors import Integrity_Error
from Standard.Test import Test, Problems
import Standard.Test.Extensions
from Standard.Test_New import all
from project.Common_Table_Operations.Util import run_default_backend
main = run_default_backend spec
main = run_default_backend add_specs
spec setup =
prefix = setup.prefix
table_builder = setup.table_builder
table_fn =
type Data
Value ~data
connection self = self.data.at 0
table self = self.data.at 1
setup create_connection_fn table_builder = Data.Value <|
connection = create_connection_fn Nothing
table =
col1 = ["foo", [1,2,3]]
col2 = ["bar", [4,5,6]]
col3 = ["Baz", [7,8,9]]
@ -27,107 +32,165 @@ spec setup =
col5 = ["foo 2", [13,14,15]]
col6 = ["ab.+123", [16,17,18]]
col7 = ["abcd123", [19,20,21]]
table_builder [col1, col2, col3, col4, col5, col6, col7]
table = table_fn
table_builder [col1, col2, col3, col4, col5, col6, col7] connection=connection
[connection, table]
Test.group prefix+"Table.at" <|
Test.specify "should allow selecting columns by name" <|
column_1 = table.at "bar"
teardown self =
self.connection.close
type Rows_Data
Value ~data
connection self = self.data.at 0
table self = self.data.at 1
setup create_connection_fn table_builder = Rows_Data.Value <|
connection = create_connection_fn Nothing
table = table_builder [["X", [1, 2, 3, 4]], ["Y", [5, 6, 7, 8]], ["Z", ["A", "B", "C", "D"]]] connection=connection
[connection, table]
teardown self =
self.connection.close
type Read_Data
Value ~data
connection self = self.data.at 0
t_big self = self.data.at 1
t_small self = self.data.at 2
setup create_connection_fn table_builder = Read_Data.Value <|
connection = create_connection_fn Nothing
t_big = table_builder [["X", (0.up_to 1500)]] connection=connection
t_small = table_builder [["X", (0.up_to 10)]] connection=connection
[connection, t_big, t_small]
teardown self =
self.connection.close
add_specs suite_builder setup =
prefix = setup.prefix
create_connection_fn = setup.create_connection_func
suite_builder.group prefix+"Table.at" group_builder->
data = Data.setup create_connection_fn setup.table_builder
group_builder.teardown <|
data.teardown
group_builder.specify "should allow selecting columns by name" <|
column_1 = data.table.at "bar"
column_1.name . should_equal "bar"
column_1.to_vector . should_equal [4, 5, 6]
table.at "nonexistent column name" . should_fail_with No_Such_Column
data.table.at "nonexistent column name" . should_fail_with No_Such_Column
Test.specify "should allow selecting columns by index" <|
column_1 = table.at
group_builder.specify "should allow selecting columns by index" <|
column_1 = data.table.at
column_1.name . should_equal "foo"
column_1.to_vector . should_equal [1, 2, 3]
column_2 = table.at 2
column_2 = data.table.at 2
column_2.name . should_equal "Baz"
column_2.to_vector . should_equal [7, 8, 9]
column_3 = table.at -1
column_3 = data.table.at -1
column_3.name . should_equal "abcd123"
column_3.to_vector . should_equal [19, 20, 21]
column_4 = table.first_column
column_4 = data.table.first_column
column_4.name . should_equal "foo"
column_4.to_vector . should_equal [1, 2, 3]
column_5 = table.second_column
column_5 = data.table.second_column
column_5.name . should_equal "bar"
column_5.to_vector . should_equal [4, 5, 6]
column_6 = table.last_column
column_6 = data.table.last_column
column_6.name . should_equal "abcd123"
column_6.to_vector . should_equal [19, 20, 21]
table.at 100 . should_fail_with Index_Out_Of_Bounds
data.table.at 100 . should_fail_with Index_Out_Of_Bounds
Test.specify "should fail with Type Error is not an Integer or Text" <|
table.at (Pair.new 1 2) . should_fail_with Illegal_Argument
table.at (Pair.new 1 2) . catch . to_display_text . should_equal "Illegal Argument: expected 'selector' to be either a Text or an Integer, but got Pair."
group_builder.specify "should fail with Type Error is not an Integer or Text" <|
data.table.at (Pair.new 1 2) . should_fail_with Illegal_Argument
data.table.at (Pair.new 1 2) . catch . to_display_text . should_equal "Illegal Argument: expected 'selector' to be either a Text or an Integer, but got Pair."
Test.group prefix+"Table.get" <|
Test.specify "should allow selecting columns by name" <|
column_1 = table.get "bar"
suite_builder.group prefix+"Table.get" group_builder->
data = Data.setup create_connection_fn setup.table_builder
group_builder.teardown <|
data.teardown
group_builder.specify "should allow selecting columns by name" <|
column_1 = data.table.get "bar"
column_1.name . should_equal "bar"
column_1.to_vector . should_equal [4, 5, 6]
table.get "nonexistent column name" . should_equal Nothing
table.get "nonexistent column name" column_1 . name . should_equal "bar"
data.table.get "nonexistent column name" . should_equal Nothing
data.table.get "nonexistent column name" column_1 . name . should_equal "bar"
Test.specify "should allow selecting columns by index" <|
column_1 = table.get
group_builder.specify "should allow selecting columns by index" <|
column_1 = data.table.get
column_1.name . should_equal "foo"
column_1.to_vector . should_equal [1, 2, 3]
column_2 = table.get 2
column_2 = data.table.get 2
column_2.name . should_equal "Baz"
column_2.to_vector . should_equal [7, 8, 9]
column_3 = table.get -1
column_3 = data.table.get -1
column_3.name . should_equal "abcd123"
column_3.to_vector . should_equal [19, 20, 21]
table.get 100 . should_equal Nothing
table.get 100 column_1 . name . should_equal "foo"
data.table.get 100 . should_equal Nothing
data.table.get 100 column_1 . name . should_equal "foo"
Test.specify "should fail with Type Error is not an Integer or Text" <|
table.get (Pair.new 1 2) . should_fail_with Illegal_Argument
table.get (Pair.new 1 2) . catch . to_display_text . should_equal "Illegal Argument: expected 'selector' to be either a Text or an Integer, but got Pair."
group_builder.specify "should fail with Type Error is not an Integer or Text" <|
data.table.get (Pair.new 1 2) . should_fail_with Illegal_Argument
data.table.get (Pair.new 1 2) . catch . to_display_text . should_equal "Illegal Argument: expected 'selector' to be either a Text or an Integer, but got Pair."
Test.group prefix+"Table.set" <|
Test.specify "should allow adding a column" <|
bar2 = table.get "bar" . rename "bar2"
t2 = table.set bar2
suite_builder.group prefix+"Table.set" group_builder->
data = Data.setup create_connection_fn setup.table_builder
group_builder.teardown <|
data.teardown
table_builder cols =
setup.table_builder cols connection=data.connection
group_builder.specify "should allow adding a column" <|
bar2 = data.table.get "bar" . rename "bar2"
t2 = data.table.set bar2
t2.column_names . should_equal ["foo", "bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123", "bar2"]
t2.get "bar2" . to_vector . should_equal [4, 5, 6]
t3 = t2.set bar2 "bar3"
t3.column_names . should_equal ["foo", "bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123", "bar2", "bar3"]
Test.specify "should not allow illegal column names" <|
table.set (table.get "bar") new_name='a\0b' . should_fail_with Invalid_Column_Names
group_builder.specify "should not allow illegal column names" <|
data.table.set (data.table.get "bar") new_name='a\0b' . should_fail_with Invalid_Column_Names
Test.specify "should allow replacing a column" <|
foo = table.get "bar" . rename "foo"
t2 = table.set foo
group_builder.specify "should allow replacing a column" <|
foo = data.table.get "bar" . rename "foo"
t2 = data.table.set foo
t2.column_names . should_equal ["foo", "bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123"]
t2.get "foo" . to_vector . should_equal [4, 5, 6]
t3 = t2.set foo "bar3"
t3.column_names . should_equal ["foo", "bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123", "bar3"]
Test.specify "should allow adding a column" <|
bar2 = table.get "bar" . rename "bar2"
table.set bar2 set_mode=Set_Mode.Update . should_fail_with Missing_Column
group_builder.specify "should allow adding a column" <|
bar2 = data.table.get "bar" . rename "bar2"
data.table.set bar2 set_mode=Set_Mode.Update . should_fail_with Missing_Column
foo = table.get "bar" . rename "foo"
table.set foo set_mode=Set_Mode.Add . should_fail_with Existing_Column
foo = data.table.get "bar" . rename "foo"
data.table.set foo set_mode=Set_Mode.Add . should_fail_with Existing_Column
Test.specify "should not affect existing columns that depended on the old column being replaced" <|
group_builder.specify "should not affect existing columns that depended on the old column being replaced" <|
t1 = table_builder [["X", [1,2,3]]]
t2 = t1.set (t1.at "X" * 100) new_name="Y"
t3 = t2.set "[X] + 10" new_name="Z"
@ -137,13 +200,13 @@ spec setup =
t4.at "Y" . to_vector . should_equal [100, 200, 300]
t4.at "Z" . to_vector . should_equal [11, 12, 13]
Test.specify "should gracefully handle expression failures" <|
group_builder.specify "should gracefully handle expression failures" <|
t1 = table_builder [["X", [1,2,3]]]
t1.set "[unknown] + 10" new_name="Z" . should_fail_with No_Such_Column
t1.set "[[[[" . should_fail_with Expression_Error
t1.set "[[[[" . catch . should_be_a Expression_Error.Syntax_Error
Test.specify "should forward expression problems" <|
group_builder.specify "should forward expression problems" <|
t1 = table_builder [["X", [1.5, 2.0, 0.0]]]
r1 = t1.set "([X] == 2) || ([X] + 0.5 == 2)" on_problems=Problem_Behavior.Ignore
@ -165,7 +228,7 @@ spec setup =
problems2 = [Arithmetic_Error.Error "Division by zero (at rows [2])."]
Problems.test_problem_handling action2 problems2 tester2
Test.specify "should gracefully handle columns from different backends" <|
group_builder.specify "should gracefully handle columns from different backends" <|
t1 = table_builder [["A", [1, 2, 3]]]
alternative_connection = Database.connect (SQLite In_Memory)
t0 = (Table.new [["X", [3, 2, 1]]]).select_into_database_table alternative_connection "T0" temporary=True
@ -176,11 +239,19 @@ spec setup =
True -> r1.should_fail_with Integrity_Error
False -> r1.should_fail_with Illegal_Argument
Test.group prefix+"Table.column_names" <|
Test.specify "should return the names of all columns" <|
table.column_names . should_equal ["foo", "bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123"]
suite_builder.group prefix+"Table.column_names" group_builder->
data = Data.setup create_connection_fn setup.table_builder
Test.specify "should allow weird column names in all backends" <|
group_builder.teardown <|
data.teardown
table_builder cols =
setup.table_builder cols connection=data.connection
group_builder.specify "should return the names of all columns" <|
data.table.column_names . should_equal ["foo", "bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123"]
group_builder.specify "should allow weird column names in all backends" <|
columns = weird_names.map_with_index ix-> name->
[name, [100+ix, 2, 3]]
table = table_builder columns
@ -189,14 +260,23 @@ spec setup =
weird_names.map_with_index ix-> name->
table.at name . to_vector . should_equal [100+ix, 2, 3]
Test.group prefix+"Table.column_count" <|
Test.specify "should allow getting the column count" <|
table.column_count . should_equal 7
suite_builder.group prefix+"Table.column_count" group_builder->
data = Data.setup create_connection_fn setup.table_builder
Test.group prefix+"Table.rows" <|
table = table_builder [["X", [1, 2, 3, 4]], ["Y", [5, 6, 7, 8]], ["Z", ["A", "B", "C", "D"]]]
Test.specify "should allow to get a Vector of Table rows" <|
rows = table.rows
group_builder.teardown <|
data.teardown
group_builder.specify "should allow getting the column count" <|
data.table.column_count . should_equal 7
suite_builder.group prefix+"Table.rows" group_builder->
data = Rows_Data.setup create_connection_fn setup.table_builder
group_builder.teardown <|
data.teardown
group_builder.specify "should allow to get a Vector of Table rows" <|
rows = data.table.rows
rows.length . should_equal 4
first_row = rows.first
@ -227,52 +307,54 @@ spec setup =
rows.map .to_vector . should_equal [[1, 5, "A"], [2, 6, "B"], [3, 7, "C"], [4, 8, "D"]]
Test.specify "should let you get the first row" <|
first_row = table.first_row
group_builder.specify "should let you get the first row" <|
first_row = data.table.first_row
first_row . length . should_equal 3
first_row.at "X" . should_equal 1
first_row.at "Y" . should_equal 5
first_row.at "Z" . should_equal "A"
Test.specify "should let you get the second row" <|
second_row = table.second_row
group_builder.specify "should let you get the second row" <|
second_row = data.table.second_row
second_row . length . should_equal 3
second_row.at "X" . should_equal 2
second_row.at "Y" . should_equal 6
second_row.at "Z" . should_equal "B"
Test.specify "should let you get the last row" <|
last_row = table.last_row
group_builder.specify "should let you get the last row" <|
last_row = data.table.last_row
last_row . length . should_equal 3
last_row.at "X" . should_equal 4
last_row.at "Y" . should_equal 8
last_row.at "Z" . should_equal "D"
Test.specify "should fetch rows up to the specified limit" <|
table.rows max_rows=2 . map .to_vector . should_equal [[1, 5, "A"], [2, 6, "B"]]
group_builder.specify "should fetch rows up to the specified limit" <|
data.table.rows max_rows=2 . map .to_vector . should_equal [[1, 5, "A"], [2, 6, "B"]]
Test.specify "should correctly handle errors" <|
table.rows.at 5 . should_fail_with Index_Out_Of_Bounds
err = table.rows.at -6
group_builder.specify "should correctly handle errors" <|
data.table.rows.at 5 . should_fail_with Index_Out_Of_Bounds
err = data.table.rows.at -6
err.should_fail_with Index_Out_Of_Bounds
err.catch . should_equal (Index_Out_Of_Bounds.Error -6 4)
table.rows (max_rows=2) . at 2 . should_fail_with Index_Out_Of_Bounds
table.rows . at 0 . at -4 . should_fail_with Index_Out_Of_Bounds
table.rows . at 0 . at "unknown" . should_fail_with No_Such_Column
data.table.rows (max_rows=2) . at 2 . should_fail_with Index_Out_Of_Bounds
data.table.rows . at 0 . at -4 . should_fail_with Index_Out_Of_Bounds
data.table.rows . at 0 . at "unknown" . should_fail_with No_Such_Column
Test.group prefix+"Table.read" <|
t_big = table_builder [["X", (0.up_to 1500)]]
t_small = table_builder [["X", (0.up_to 10)]]
suite_builder.group prefix+"Table.read" group_builder->
data = Read_Data.setup create_connection_fn setup.table_builder
group_builder.teardown <|
data.teardown
has_default_row_limit = setup.is_database
Test.specify "should have a row limit by default and warn about it" <|
t_big.row_count . should_equal 1500
t_small.row_count . should_equal 10
group_builder.specify "should have a row limit by default and warn about it" <|
data.t_big.row_count . should_equal 1500
data.t_small.row_count . should_equal 10
t1 = t_big.read
t1 = data.t_big.read
case has_default_row_limit of
True ->
t1.row_count . should_equal 1000
@ -282,46 +364,46 @@ spec setup =
t1.row_count . should_equal 1500
Problems.assume_no_problems t1
t2 = t_small.read
t2 = data.t_small.read
t2.row_count . should_equal 10
Problems.assume_no_problems t2
Test.specify "should allow to set the row limit" <|
t1 = t_big.read max_rows=23
group_builder.specify "should allow to set the row limit" <|
t1 = data.t_big.read max_rows=23
t1.row_count . should_equal 23
w1 = Problems.expect_only_warning Not_All_Rows_Downloaded t1
w1.max_rows . should_equal 23
w1.to_display_text . should_contain "some rows have been dropped"
t2 = t_big.read max_rows=1500
t2 = data.t_big.read max_rows=1500
t2.row_count . should_equal 1500
Problems.assume_no_problems t2
t3 = t_small.read max_rows=1
t3 = data.t_small.read max_rows=1
t3.row_count . should_equal 1
w3 = Problems.expect_only_warning Not_All_Rows_Downloaded t3
w3.max_rows . should_equal 1
Test.specify "should allow to have no row limit" <|
t1 = t_big.read max_rows=Nothing
group_builder.specify "should allow to have no row limit" <|
t1 = data.t_big.read max_rows=Nothing
t1.row_count . should_equal 1500
Problems.assume_no_problems t1
Test.specify "should allow to turn off the warning" <|
t1 = t_big.read warn_if_more_rows=False
group_builder.specify "should allow to turn off the warning" <|
t1 = data.t_big.read warn_if_more_rows=False
t1.row_count . should_equal (if has_default_row_limit then 1000 else 1500)
Problems.assume_no_problems t1
t2 = t_big.read max_rows=123 warn_if_more_rows=False
t2 = data.t_big.read max_rows=123 warn_if_more_rows=False
t2.row_count . should_equal 123
Problems.assume_no_problems t2
t3 = t_big.read max_rows=12300 warn_if_more_rows=False
t3 = data.t_big.read max_rows=12300 warn_if_more_rows=False
t3.row_count . should_equal 1500
Problems.assume_no_problems t3
Test.specify "should also work as Column.read" <|
c1 = t_big.at "X"
group_builder.specify "should also work as Column.read" <|
c1 = data.t_big.at "X"
c1.length . should_equal 1500
r2 = c1.read
@ -349,25 +431,24 @@ spec setup =
r5.length . should_equal 3
Problems.assume_no_problems r5
if setup.is_database then Test.specify "should allow similar API on Connection.read" <|
connection = setup.connection
connection.query t_big.name . row_count . should_equal 1500
if setup.is_database then group_builder.specify "should allow similar API on Connection.read" <|
data.connection.query data.t_big.name . row_count . should_equal 1500
t1 = connection.read t_big.name
t1 = data.connection.read data.t_big.name
t1.row_count . should_equal 1000
w1 = Problems.expect_only_warning Not_All_Rows_Downloaded t1
w1.max_rows . should_equal 1000
t2 = connection.read t_big.name limit=42
t2 = data.connection.read data.t_big.name limit=42
t2.row_count . should_equal 42
w2 = Problems.expect_only_warning Not_All_Rows_Downloaded t2
w2.max_rows . should_equal 42
t3 = connection.read t_big.name limit=Nothing
t3 = data.connection.read data.t_big.name limit=Nothing
t3.row_count . should_equal 1500
Problems.assume_no_problems t3
t4 = connection.read t_big.name warn_if_more_rows=False
t4 = data.connection.read data.t_big.name warn_if_more_rows=False
t4.row_count . should_equal 1000
Problems.assume_no_problems t4

View File

@ -7,46 +7,68 @@ from Standard.Table.Errors import all
from Standard.Database.Errors import SQL_Error
from Standard.Test import Test, Problems
import Standard.Test.Extensions
from Standard.Test_New import all
from project.Common_Table_Operations.Util import run_default_backend
main = run_default_backend spec
main = run_default_backend add_specs
spec setup =
type Data
Value ~data
connection self = self.data.at 0
table self = self.data.at 1
table2 self = self.data.at 2
setup create_connection_fn table_builder = Data.Value <|
connection = create_connection_fn Nothing
table = table_builder [["Key", ["x", "x", "x", "x", "y", "y", "y", "z", "z"]], ["Value", [1, 2, 3, 4, 5, 6, 7, 8, 9]]] connection=connection
table2 = table_builder [["Group", ["A","B","A","B","A","B","A","B","A"]], ["Key", ["x", "x", "x", "x", "y", "y", "y", "z", "z"]], ["Value", [1, 2, 3, 4, 5, 6, 7, 8, 9]]] connection=connection
[connection, table, table2]
teardown self =
self.connection.close
add_specs suite_builder setup =
prefix = setup.prefix
table_builder = setup.table_builder
create_connection_fn = setup.create_connection_func
db_todo = if setup.is_database.not then Nothing else "Table.cross_tab is not implemented yet in Database."
Test.group prefix+"Table.cross_tab" pending=db_todo <|
table = table_builder [["Key", ["x", "x", "x", "x", "y", "y", "y", "z", "z"]], ["Value", [1, 2, 3, 4, 5, 6, 7, 8, 9]]]
table2 = table_builder [["Group", ["A","B","A","B","A","B","A","B","A"]], ["Key", ["x", "x", "x", "x", "y", "y", "y", "z", "z"]], ["Value", [1, 2, 3, 4, 5, 6, 7, 8, 9]]]
suite_builder.group prefix+"Table.cross_tab" pending=db_todo group_builder->
data = Data.setup create_connection_fn setup.table_builder
Test.specify "should cross_tab counts by default using first column as names" <|
t1 = table.cross_tab [] "Key"
group_builder.teardown <|
data.teardown
table_builder cols =
setup.table_builder cols connection=data.connection
group_builder.specify "should cross_tab counts by default using first column as names" <|
t1 = data.table.cross_tab [] "Key"
t1.column_names . should_equal ["x", "y", "z"]
t1.row_count . should_equal 1
t1.at "x" . to_vector . should_equal [4]
t1.at "y" . to_vector . should_equal [3]
t1.at "z" . to_vector . should_equal [2]
Test.specify "should allow a different aggregate" <|
t1 = table.cross_tab [] "Key" values=[Sum "Value"]
group_builder.specify "should allow a different aggregate" <|
t1 = data.table.cross_tab [] "Key" values=[Sum "Value"]
t1.column_names . should_equal ["x", "y", "z"]
t1.row_count . should_equal 1
t1.at "x" . to_vector . should_equal [10]
t1.at "y" . to_vector . should_equal [18]
t1.at "z" . to_vector . should_equal [17]
Test.specify "should allow a custom expression for the aggregate" <|
t1 = table.cross_tab [] "Key" values=[Sum "[Value]*[Value]"]
group_builder.specify "should allow a custom expression for the aggregate" <|
t1 = data.table.cross_tab [] "Key" values=[Sum "[Value]*[Value]"]
t1.column_names . should_equal ["x", "y", "z"]
t1.row_count . should_equal 1
t1.at "x" . to_vector . should_equal [30]
t1.at "y" . to_vector . should_equal [110]
t1.at "z" . to_vector . should_equal [145]
Test.specify "should allow a chosen column" <|
group_builder.specify "should allow a chosen column" <|
t = table_builder [["Group", ["A","B","A","B","A","B","A","B","A"]], ["Species", ["x", "x", "x", "x", "y", "y", "y", "z", "z"]], ["Value", [1, 2, 3, 4, 5, 6, 7, 8, 9]]]
t1 = t.cross_tab [] "Species"
t1.column_names . should_equal ["x", "y", "z"]
@ -62,8 +84,8 @@ spec setup =
t2.at "y" . to_vector . should_equal [3]
t2.at "z" . to_vector . should_equal [2]
Test.specify "should allow a grouping" <|
t1 = table2.cross_tab ["Group"] "Key"
group_builder.specify "should allow a grouping" <|
t1 = data.table2.cross_tab ["Group"] "Key"
t1.column_names . should_equal ["Group", "x", "y", "z"]
t1.row_count . should_equal 2
t1.at "Group" . to_vector . should_equal ["A", "B"]
@ -71,8 +93,8 @@ spec setup =
t1.at "y" . to_vector . should_equal [2, 1]
t1.at "z" . to_vector . should_equal [1, 1]
Test.specify "should allow a grouping by Aggregate_Column" <|
t1 = table2.cross_tab [Group_By "Group"] "Key"
group_builder.specify "should allow a grouping by Aggregate_Column" <|
t1 = data.table2.cross_tab [Group_By "Group"] "Key"
t1.column_names . should_equal ["Group", "x", "y", "z"]
t1.row_count . should_equal 2
t1.at "Group" . to_vector . should_equal ["A", "B"]
@ -80,9 +102,9 @@ spec setup =
t1.at "y" . to_vector . should_equal [2, 1]
t1.at "z" . to_vector . should_equal [1, 1]
table2.cross_tab [Sum "Group"] "Key" . should_fail_with Illegal_Argument
data.table2.cross_tab [Sum "Group"] "Key" . should_fail_with Illegal_Argument
Test.specify "should allow a grouping by Aggregate_Colum, with some empty bins" <|
group_builder.specify "should allow a grouping by Aggregate_Colum, with some empty bins" <|
table3 = table_builder [["Group", ["B","A","B","A","A"]], ["Key", ["x", "y", "y", "y", "z"]], ["Value", [4, 5, 6, 7, 8]]]
t1 = table3.cross_tab [Group_By "Group"] "Key"
t1.column_names . should_equal ["Group", "x", "y", "z"]
@ -92,8 +114,8 @@ spec setup =
t1.at "y" . to_vector . should_equal [2, 1]
t1.at "z" . to_vector . should_equal [1, 0]
Test.specify "should allow a grouping by text" <|
t1 = table2.cross_tab "Group" "Key"
group_builder.specify "should allow a grouping by text" <|
t1 = data.table2.cross_tab "Group" "Key"
t1.column_names . should_equal ["Group", "x", "y", "z"]
t1.row_count . should_equal 2
t1.at "Group" . to_vector . should_equal ["A", "B"]
@ -101,11 +123,11 @@ spec setup =
t1.at "y" . to_vector . should_equal [2, 1]
t1.at "z" . to_vector . should_equal [1, 1]
t2 = table2.cross_tab ["Group", "Group"] "Key"
t2 = data.table2.cross_tab ["Group", "Group"] "Key"
t2.column_names . should_equal ["Group", "x", "y", "z"]
Test.specify "should allow multiple values aggregates" <|
t1 = table.cross_tab [] "Key" values=[Count, Sum "Value"]
group_builder.specify "should allow multiple values aggregates" <|
t1 = data.table.cross_tab [] "Key" values=[Count, Sum "Value"]
t1.column_names . should_equal ["x Count", "x Sum", "y Count", "y Sum", "z Count", "z Sum"]
t1.row_count . should_equal 1
t1.at "x Count" . to_vector . should_equal [4]
@ -115,63 +137,63 @@ spec setup =
t1.at "z Count" . to_vector . should_equal [2]
t1.at "z Sum" . to_vector . should_equal [17]
Test.specify "should fail if name_column is not found" <|
err1 = table.cross_tab [] "Name"
group_builder.specify "should fail if name_column is not found" <|
err1 = data.table.cross_tab [] "Name"
err1.should_fail_with Missing_Input_Columns
err1.catch.criteria . should_equal ["Name"]
err2 = table.cross_tab [] 42
err2 = data.table.cross_tab [] 42
err2.should_fail_with Missing_Input_Columns
err2.catch.criteria . should_equal [42]
Test.specify "should fail if group-by contains missing columns" <|
err1 = table2.cross_tab ["Group", "Nonexistent Group", "OTHER"] "Key"
group_builder.specify "should fail if group-by contains missing columns" <|
err1 = data.table2.cross_tab ["Group", "Nonexistent Group", "OTHER"] "Key"
err1.should_fail_with Missing_Input_Columns
err1.catch.criteria . should_equal ["Nonexistent Group", "OTHER"]
err2 = table2.cross_tab [0, 42] "Key"
err2 = data.table2.cross_tab [0, 42] "Key"
err2.should_fail_with Missing_Input_Columns
err2.catch.criteria . should_equal [42]
Test.specify "should fail if aggregate values contain missing columns" <|
err1 = table.cross_tab [] "Key" values=[Count, Sum "Nonexistent Value", Sum "Value", Sum "OTHER"]
group_builder.specify "should fail if aggregate values contain missing columns" <|
err1 = data.table.cross_tab [] "Key" values=[Count, Sum "Nonexistent Value", Sum "Value", Sum "OTHER"]
err1.should_fail_with Invalid_Aggregate_Column
err1.catch.name . should_equal "Nonexistent Value"
err2 = table.cross_tab [] "Key" values=[Count, Sum "Nonexistent Value", Sum "Value", Sum 42]
err2 = data.table.cross_tab [] "Key" values=[Count, Sum "Nonexistent Value", Sum "Value", Sum 42]
err2.should_fail_with Missing_Input_Columns
err2.catch.criteria . should_equal [42]
Test.specify "should fail if aggregate values contain invalid expressions" <|
err1 = table.cross_tab [] "Key" values=[Sum "[MISSING]*10"]
group_builder.specify "should fail if aggregate values contain invalid expressions" <|
err1 = data.table.cross_tab [] "Key" values=[Sum "[MISSING]*10"]
err1.should_fail_with Invalid_Aggregate_Column
err1.catch.name . should_equal "[MISSING]*10"
err1.catch.expression_error . should_equal (No_Such_Column.Error "MISSING")
err2 = table.cross_tab [] "Key" values=[Sum "[[["]
err2 = data.table.cross_tab [] "Key" values=[Sum "[[["]
err2.should_fail_with Invalid_Aggregate_Column
err2.catch.name . should_equal "[[["
err2.catch.expression_error . should_be_a Expression_Error.Syntax_Error
Test.specify "should not allow Group_By for values" <|
err1 = table.cross_tab [] "Key" values=[Count, Group_By "Value"] on_problems=Problem_Behavior.Ignore
group_builder.specify "should not allow Group_By for values" <|
err1 = data.table.cross_tab [] "Key" values=[Count, Group_By "Value"] on_problems=Problem_Behavior.Ignore
err1.should_fail_with Illegal_Argument
Test.specify "should gracefully handle duplicate aggregate names" <|
action = table.cross_tab [] "Key" values=[Count new_name="Agg1", Sum "Value" new_name="Agg1"] on_problems=_
group_builder.specify "should gracefully handle duplicate aggregate names" <|
action = data.table.cross_tab [] "Key" values=[Count new_name="Agg1", Sum "Value" new_name="Agg1"] on_problems=_
tester table =
table.column_names . should_equal ["x Agg1", "x Agg1 1", "y Agg1", "y Agg1 1", "z Agg1", "z Agg1 1"]
problems = [Duplicate_Output_Column_Names.Error ["x Agg1", "y Agg1", "z Agg1"]]
Problems.test_problem_handling action problems tester
table3 = table2.rename_columns (Map.from_vector [["Group", "x"]])
table3 = data.table2.rename_columns (Map.from_vector [["Group", "x"]])
action3 = table3.cross_tab ["x"] "Key" on_problems=_
tester3 table =
table.column_names . should_equal ["x", "x 1", "y", "z"]
problems3 = [Duplicate_Output_Column_Names.Error ["x"]]
Problems.test_problem_handling action3 problems3 tester3
Test.specify "should allow non-Text columns to be used as name" <|
group_builder.specify "should allow non-Text columns to be used as name" <|
table = table_builder [["Key", [1, 1, 1, 2, 2, 1, 3, 3, 1]], ["Value", [1, 2, 3, 4, 5, 6, 7, 8, 9]]]
t1 = table.cross_tab [] "Key"
t1.column_names . should_equal ["1", "2", "3"]
@ -180,7 +202,7 @@ spec setup =
t1.at "2" . to_vector . should_equal [2]
t1.at "3" . to_vector . should_equal [2]
Test.specify "should correctly handle uncommon characters in fields becoming column names" <|
group_builder.specify "should correctly handle uncommon characters in fields becoming column names" <|
table = table_builder [["Key", ["💡🎉🌻", "ąęź", "ąęź", '\n\n', "😊", "😊", "🌻", "😊", "🌻", " "]], ["Value", [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]]]
t1 = table.cross_tab [] "Key" . sort_columns
t1.column_names . should_equal ['\n\n', ' ', 'ąęź', '🌻', '💡🎉🌻', '😊']
@ -192,7 +214,7 @@ spec setup =
t1.at '\n\n' . to_vector . should_equal [1]
t1.at " " . to_vector . should_equal [1]
Test.specify "should report Floating_Point_Equality if the group or name column is floating point" <|
group_builder.specify "should report Floating_Point_Equality if the group or name column is floating point" <|
t = table_builder [["X", [1.5, 2.5, 2.5, 1.5]], ["Y", [1, 2, 3, 4]], ["Z", ["a", "b", "b", "b"]]]
t1 = t.cross_tab [] "X"
Problems.expect_warning Floating_Point_Equality t1
@ -209,7 +231,7 @@ spec setup =
t2.at "a" . to_vector . should_equal [1, 0]
t2.at "b" . to_vector . should_equal [1, 2]
Test.specify "should check types of aggregates" <|
group_builder.specify "should check types of aggregates" <|
t = table_builder [["Key", ["a", "a", "b", "b"]], ["ints", [1, 2, 3, 4]], ["texts", ["a", "b", "c", "d"]]]
[Problem_Behavior.Report_Error, Problem_Behavior.Report_Warning, Problem_Behavior.Ignore].each pb-> Test.with_clue "Problem_Behavior="+pb.to_text+" " <|
@ -220,7 +242,7 @@ spec setup =
t3 = t.cross_tab [] "Key" values=[Concatenate "ints"] on_problems=pb
t3.should_fail_with Invalid_Value_Type
Test.specify "should return predictable types" <|
group_builder.specify "should return predictable types" <|
table = table_builder [["Text", ["a", "b"]], ["Int", [1, 2]], ["Float", [1.1, 2.2]]]
t1 = table.cross_tab ["Int"] "Text"
t1.column_names . should_equal ["Int", "a", "b"]
@ -235,18 +257,18 @@ spec setup =
t2.at "2 Average Float" . value_type . is_floating_point . should_be_true
t2.at "2 Concatenate Text" . value_type . is_text . should_be_true
Test.specify "should fail gracefully if an effective column name would contain invalid characters" <|
group_builder.specify "should fail gracefully if an effective column name would contain invalid characters" <|
table = table_builder [["Key", ['x', 'x', 'y\0', '\0', 'y\0', 'z', 'z', 'z', 'z']], ["Value", [1, 2, 3, 4, 5, 6, 7, 8, 9]]]
r1 = table.cross_tab [] "Key"
r1.should_fail_with Invalid_Column_Names
r1.catch.to_display_text . should_contain "cannot contain the NUL character"
r2 = table2.cross_tab [] "Key" values=[Average "Value" new_name='x\0']
r2 = data.table2.cross_tab [] "Key" values=[Average "Value" new_name='x\0']
r2.print
r2.should_fail_with Invalid_Column_Names
r2.catch.to_display_text . should_contain "cannot contain the NUL character"
Test.specify "should fail gracefully if an effective column name would be empty or null" <|
group_builder.specify "should fail gracefully if an effective column name would be empty or null" <|
table = table_builder [["Key", [" ", "x", "x", "x", "", "", "", "y", "y"]], ["Value", [1, 2, 3, 4, 5, 6, 7, 8, 9]]]
r1 = table.cross_tab [] "Key"
r1.should_fail_with Invalid_Column_Names
@ -257,7 +279,7 @@ spec setup =
r2 . should_fail_with Invalid_Column_Names
r2.catch.to_display_text . should_contain "cannot be Nothing"
Test.specify "should fail gracefully if producing too many columns in a table" <|
group_builder.specify "should fail gracefully if producing too many columns in a table" <|
table = table_builder [["Key", 0.up_to 25000 . to_vector]]
r1 = table.cross_tab [] "Key"
r1 . should_fail_with Column_Count_Exceeded

View File

@ -6,28 +6,54 @@ from Standard.Table.Errors import Inexact_Type_Coercion, Invalid_Value_Type
from Standard.Database.Errors import Unsupported_Database_Operation
from Standard.Test import Test, Problems
import Standard.Test.Extensions
from Standard.Test_New import all
from project.Common_Table_Operations.Util import all
main = run_default_backend spec
main = run_default_backend add_specs
spec setup =
type Data
Value ~data
connection self = self.data.at 0
dates self = self.data.at 1
times self = self.data.at 2
datetimes self = self.data.at 3
setup create_connection_fn table_builder = Data.Value <|
connection = create_connection_fn Nothing
dates = table_builder [["A", [Date.new 2020 12 31, Date.new 2024 2 29, Date.new 1990 1 1, Nothing]], ["X", [2020, 29, 1, 100]]] connection=connection
times = table_builder [["A", [Time_Of_Day.new 23 59 59 millisecond=567 nanosecond=123, Time_Of_Day.new 2 30 44 nanosecond=1002000, Time_Of_Day.new 0 0 0, Nothing]], ["X", [2020, 29, 1, 100]]] connection=connection
datetimes = table_builder [["A", [Date_Time.new 2020 12 31 23 59 59 millisecond=567 nanosecond=123, Date_Time.new 2024 2 29 2 30 44 nanosecond=1002000, Date_Time.new 1990 1 1 0 0 0, Nothing]], ["X", [2020, 29, 1, 100]]] connection=connection
[connection, dates, times, datetimes]
teardown self =
self.connection.close
add_specs suite_builder setup =
prefix = setup.prefix
table_builder = setup.table_builder
create_connection_fn = setup.create_connection_func
pending_datetime = if setup.test_selection.date_time.not then "Date/Time operations are not supported by this backend."
Test.group prefix+"Date-Time support" pending=pending_datetime <|
Test.specify "should allow to create Table with Date columns and round-trip them back to Enso" <|
suite_builder.group prefix+"Date-Time support" pending=pending_datetime group_builder->
data = Data.setup create_connection_fn setup.table_builder
group_builder.teardown <|
data.teardown
table_builder cols =
setup.table_builder cols connection=data.connection
group_builder.specify "should allow to create Table with Date columns and round-trip them back to Enso" <|
d = Date.new 2020 10 24
table = table_builder [["A", [d]], ["X", [123]]]
table.at "A" . value_type . should_equal Value_Type.Date
table.at "A" . to_vector . should_equal [d]
table.at "X" . to_vector . should_equal [123]
Test.specify "should allow to create Table with Time Of Day columns and round-trip them back to Enso" <|
group_builder.specify "should allow to create Table with Time Of Day columns and round-trip them back to Enso" <|
# We do not check nanoseconds, as nano support may be optional.
t = Time_Of_Day.new hour=12 minute=3 second=5 millisecond=6 microsecond=7
table = table_builder [["B", [t]], ["X", [123]]]
@ -35,7 +61,7 @@ spec setup =
table.at "B" . to_vector . should_equal [t]
table.at "X" . to_vector . should_equal [123]
Test.specify "should allow to create Table with Date Time columns and round-trip them back to Enso, preserving the exact instant in time (even if losing the timezone)" <|
group_builder.specify "should allow to create Table with Date Time columns and round-trip them back to Enso, preserving the exact instant in time (even if losing the timezone)" <|
dt1 = Date_Time.new 2020 10 24 1 2 3 millisecond=4 microsecond=5
offset_zone = Time_Zone.new hours=(-11) minutes=(-30)
dt2 = Date_Time.new 2021 11 23 1 2 3 zone=offset_zone
@ -49,12 +75,17 @@ spec setup =
table.at "C" . to_vector . should_equal_tz_agnostic dates
table.at "X" . to_vector . should_equal xs
Test.group prefix+"Date-Time operations" pending=pending_datetime <|
dates = table_builder [["A", [Date.new 2020 12 31, Date.new 2024 2 29, Date.new 1990 1 1, Nothing]], ["X", [2020, 29, 1, 100]]]
times = table_builder [["A", [Time_Of_Day.new 23 59 59 millisecond=567 nanosecond=123, Time_Of_Day.new 2 30 44 nanosecond=1002000, Time_Of_Day.new 0 0 0, Nothing]], ["X", [2020, 29, 1, 100]]]
datetimes = table_builder [["A", [Date_Time.new 2020 12 31 23 59 59 millisecond=567 nanosecond=123, Date_Time.new 2024 2 29 2 30 44 nanosecond=1002000, Date_Time.new 1990 1 1 0 0 0, Nothing]], ["X", [2020, 29, 1, 100]]]
Test.specify "should allow to get the year/month/day of a Date" <|
t = dates
suite_builder.group prefix+"Date-Time operations" pending=pending_datetime group_builder->
data = Data.setup create_connection_fn setup.table_builder
group_builder.teardown <|
data.teardown
table_builder cols =
setup.table_builder cols connection=data.connection
group_builder.specify "should allow to get the year/month/day of a Date" <|
t = data.dates
a = t.at "A"
a.year . to_vector . should_equal [2020, 2024, 1990, Nothing]
a.month . to_vector . should_equal [12, 2, 1, Nothing]
@ -68,8 +99,8 @@ spec setup =
((a.month) == (t.at "X")).to_vector . should_equal [False, False, True, Nothing]
((a.day) == (t.at "X")).to_vector . should_equal [False, True, True, Nothing]
Test.specify "should allow to get the year/month/day of a Date_Time" <|
t = datetimes
group_builder.specify "should allow to get the year/month/day of a Date_Time" <|
t = data.datetimes
a = t.at "A"
a.year . to_vector . should_equal [2020, 2024, 1990, Nothing]
a.month . to_vector . should_equal [12, 2, 1, Nothing]
@ -82,15 +113,15 @@ spec setup =
((a.month) == (t.at "X")).to_vector . should_equal [False, False, True, Nothing]
((a.day) == (t.at "X")).to_vector . should_equal [False, True, True, Nothing]
Test.specify "should allow to evaluate expressions with year/month/day" <|
group_builder.specify "should allow to evaluate expressions with year/month/day" <|
t = table_builder [["A", [Date.new 2020 12 31, Date.new 2024 2 29, Date.new 1990 1 1, Nothing]], ["X", [0, 2, 1, 100]], ["B", [Date_Time.new 2020 10 31 23 59 59, Date_Time.new 2024 4 29 2 30 44, Date_Time.new 1990 10 1 0 0 0, Nothing]]]
c = t.evaluate_expression "year([A]) + [X] + day([A]) * month([B])"
Test.with_clue "The column "+c.name+" value type ("+c.value_type.to_display_text+") should be an integer: " <|
c.value_type.is_integer.should_be_true
c.to_vector . should_equal [(2020 + 0 + 31 * 10), (2024 + 2 + 29 * 4), (1990 + 1 + 1 * 10), Nothing]
Test.specify "should allow to get hour/minute/second of a Time_Of_Day" <|
a = times.at "A"
group_builder.specify "should allow to get hour/minute/second of a Time_Of_Day" <|
a = data.times.at "A"
a.hour . to_vector . should_equal [23, 2, 0, Nothing]
a.minute . to_vector . should_equal [59, 30, 0, Nothing]
a.second . to_vector . should_equal [59, 44, 0, Nothing]
@ -103,8 +134,8 @@ spec setup =
Test.with_clue "The column "+c.name+" value type ("+c.value_type.to_display_text+") should be an integer: " <|
c.value_type.is_integer.should_be_true
Test.specify "should allow to get hour/minute/second of a Date_Time" <|
a = datetimes.at "A"
group_builder.specify "should allow to get hour/minute/second of a Date_Time" <|
a = data.datetimes.at "A"
a.hour . to_vector . should_equal [23, 2, 0, Nothing]
a.minute . to_vector . should_equal [59, 30, 0, Nothing]
a.second . to_vector . should_equal [59, 44, 0, Nothing]
@ -117,8 +148,8 @@ spec setup =
Test.with_clue "The column "+c.name+" value type ("+c.value_type.to_display_text+") should be an integer: " <|
c.value_type.is_integer.should_be_true
Test.specify "should allow to get millisecond/nanosecond of Time_Of_Day through date_part" <|
a = times.at "A"
group_builder.specify "should allow to get millisecond/nanosecond of Time_Of_Day through date_part" <|
a = data.times.at "A"
a.date_part Time_Period.Second . to_vector . should_equal [59, 44, 0, Nothing]
a.date_part Time_Period.Millisecond . to_vector . should_equal [567, 1, 0, Nothing]
a.date_part Time_Period.Microsecond . to_vector . should_equal [0, 2, 0, Nothing]
@ -132,8 +163,8 @@ spec setup =
Test.with_clue "The column "+c.name+" value type ("+c.value_type.to_display_text+") should be an integer: " <|
c.value_type.is_integer.should_be_true
Test.specify "should allow to get week/quarter of Date through date_part" <|
a = dates.at "A"
group_builder.specify "should allow to get week/quarter of Date through date_part" <|
a = data.dates.at "A"
a.date_part Date_Period.Quarter . to_vector . should_equal [4, 1, 1, Nothing]
a.date_part Date_Period.Week . to_vector . should_equal [53, 9, 1, Nothing]
@ -141,8 +172,8 @@ spec setup =
Test.with_clue "The column "+c.name+" value type ("+c.value_type.to_display_text+") should be an integer: " <|
c.value_type.is_integer.should_be_true
Test.specify "should allow to get various date_part of Date_Time" <|
a = datetimes.at "A"
group_builder.specify "should allow to get various date_part of Date_Time" <|
a = data.datetimes.at "A"
a.date_part Date_Period.Quarter . to_vector . should_equal [4, 1, 1, Nothing]
a.date_part Date_Period.Week . to_vector . should_equal [53, 9, 1, Nothing]
a.date_part Time_Period.Millisecond . to_vector . should_equal [567, 1, 0, Nothing]
@ -158,7 +189,7 @@ spec setup =
c.value_type.is_integer.should_be_true
Test.specify "should allow to compare dates" <|
group_builder.specify "should allow to compare dates" <|
t = table_builder [["X", [Date.new 2021 12 3]], ["Y", [Date.new 2021 12 5]]]
[(<), (<=), (>), (>=), (==), (!=)].each op->
@ -166,7 +197,7 @@ spec setup =
op (t.at "X") (t.at "Y") . to_vector . should_succeed
op (t.at "X") (Date.new 2021 12 4) . to_vector . should_succeed
Test.specify "should allow to compare date-times" <|
group_builder.specify "should allow to compare date-times" <|
t = table_builder [["X", [Date_Time.new 2021 12 3 12 30 0]], ["Y", [Date_Time.new 2021 12 5 12 30 0]]]
[(<), (<=), (>), (>=), (==), (!=)].each op->
@ -174,7 +205,7 @@ spec setup =
op (t.at "X") (t.at "Y") . to_vector . should_succeed
op (t.at "X") (Date_Time.new 2021 12 4 12 30 0) . to_vector . should_succeed
Test.specify "should allow to compare time-of-day" <|
group_builder.specify "should allow to compare time-of-day" <|
t = table_builder [["X", [Time_Of_Day.new 12 30 0]], ["Y", [Time_Of_Day.new 12 30 1]]]
[(<), (<=), (>), (>=), (==), (!=)].each op->
@ -182,7 +213,7 @@ spec setup =
op (t.at "X") (t.at "Y") . to_vector . should_succeed
op (t.at "X") (Time_Of_Day.new 12 30 0) . to_vector . should_succeed
Test.specify "should not allow to mix types in ordering comparisons" <|
group_builder.specify "should not allow to mix types in ordering comparisons" <|
t = table_builder [["X", [Date.new 2021 12 3]], ["Y", [Date_Time.new 2021 12 5 12 30 0]], ["Z", [Time_Of_Day.new 12 30 0]]]
[(<), (<=), (>), (>=)].each op->
@ -190,13 +221,13 @@ spec setup =
op (t.at "X") (t.at "Z") . should_fail_with Invalid_Value_Type
if setup.test_selection.supports_time_duration then
Test.specify "should allow to subtract two Dates" <|
group_builder.specify "should allow to subtract two Dates" <|
t = table_builder [["X", [Date.new 2021 11 3]], ["Y", [Date.new 2021 12 5]]]
((t.at "Y") - (t.at "X")) . to_vector . should_equal [Period.new months=1 days=2]
((t.at "Y") - (Date.new 2020 12 5)) . to_vector . should_equal [Period.new years=1]
Test.specify "should allow to subtract two Date_Times" <|
group_builder.specify "should allow to subtract two Date_Times" <|
dx = Date_Time.new 2021 11 30 10 15 0
t = table_builder [["X", [dx]], ["Y", [Date_Time.new 2021 12 5 12 30 20]]]
@ -205,14 +236,14 @@ spec setup =
((t.at "Y") - (t.at "X")) . to_vector . should_equal [diff]
((t.at "Y") - dx) . to_vector . should_equal [diff]
Test.specify "should allow to subtract two Time_Of_Days" <|
group_builder.specify "should allow to subtract two Time_Of_Days" <|
t = table_builder [["X", [Time_Of_Day.new 10 15 0, Time_Of_Day.new 1 0 0]], ["Y", [Time_Of_Day.new 12 30 20, Time_Of_Day.new 0 0 0]]]
((t.at "Y") - (t.at "X")) . to_vector . should_equal [Duration.new hours=2 minutes=15 seconds=20, Duration.new hours=(-1) minutes=0 seconds=0]
((t.at "Y") - (Time_Of_Day.new 0 0 0)) . to_vector . should_equal [Duration.new hours=12 minutes=30 seconds=20, Duration.zero]
if setup.test_selection.supports_time_duration.not then
Test.specify "should report unsupported operation for subtracting date/time" <|
group_builder.specify "should report unsupported operation for subtracting date/time" <|
t1 = table_builder [["X", [Date.new 2021 11 3]], ["Y", [Date.new 2021 12 5]]]
t2 = table_builder [["X", [Date_Time.new 2021 11 3 10 15 0]], ["Y", [Date_Time.new 2021 12 5 12 30 20]]]
t3 = table_builder [["X", [Time_Of_Day.new 10 15 0, Time_Of_Day.new 1 0 0]], ["Y", [Time_Of_Day.new 12 30 20, Time_Of_Day.new 0 0 0]]]
@ -224,7 +255,7 @@ spec setup =
((t3.at "Y") - (t3.at "X")) . should_fail_with Unsupported_Database_Operation
((t3.at "Y") - (Time_Of_Day.new 0 0 0)) . should_fail_with Unsupported_Database_Operation
Test.specify "should report an Invalid_Value_Type error when subtracting mixed date/time types" <|
group_builder.specify "should report an Invalid_Value_Type error when subtracting mixed date/time types" <|
t = table_builder [["X", [Date.new 2021 11 3]], ["Y", [Date_Time.new 2021 12 5 12 30 0]], ["Z", [Time_Of_Day.new 12 30 0]]]
((t.at "Y") - (t.at "X")) . should_fail_with Invalid_Value_Type
@ -234,7 +265,7 @@ spec setup =
((t.at "Z") - (t.at "Y")) . should_fail_with Invalid_Value_Type
((t.at "Z") - (Date.new 2021 11 3)) . should_fail_with Invalid_Value_Type
Test.specify "should allow computing a SQL-like difference" <|
group_builder.specify "should allow computing a SQL-like difference" <|
t1 = table_builder [["X", [Date.new 2021 11 3]], ["Y", [Date.new 2021 12 5]]]
(t1.at "X").date_diff (t1.at "Y") Date_Period.Day . to_vector . should_equal [32]
@ -334,7 +365,7 @@ spec setup =
(t3.at "X").date_diff (t3.at "Y") Time_Period.Nanosecond . should_fail_with Unsupported_Database_Operation
(t3.at "X").date_diff (Time_Of_Day.new 10 15 12 34 56 78) Time_Period.Nanosecond . should_fail_with Unsupported_Database_Operation
Test.specify "date_diff should return integers" <|
group_builder.specify "date_diff should return integers" <|
t = table_builder [["X", [Date.new 2021 01 31]], ["Y", [Time_Of_Day.new 12 30 20]], ["Z", [Date_Time.new 2021 12 5 12 30 20]]]
time_periods = [Time_Period.Hour, Time_Period.Minute, Time_Period.Second]
@ -349,7 +380,7 @@ spec setup =
(date_periods+time_periods).each p->
(t.at "Z").date_diff (Date_Time.new 2021 12 05 01 02) p . value_type . is_integer . should_be_true
Test.specify "should not allow mixing types in date_diff" <|
group_builder.specify "should not allow mixing types in date_diff" <|
t = table_builder [["X", [Date.new 2021 01 31]], ["Y", [Time_Of_Day.new 12 30 20]], ["Z", [Date_Time.new 2021 12 5 12 30 20]]]
(t.at "X").date_diff (t.at "Y") Date_Period.Day . should_fail_with Invalid_Value_Type
(t.at "Z").date_diff (t.at "X") Date_Period.Day . should_fail_with Invalid_Value_Type
@ -361,7 +392,7 @@ spec setup =
(t.at "Y").date_diff (Date.new 2021 12 5) Date_Period.Day . should_fail_with Invalid_Value_Type
(t.at "Z").date_diff (Time_Of_Day.new 12 30 20) Time_Period.Hour . should_fail_with Invalid_Value_Type
Test.specify "should allow an SQL-like shift" <|
group_builder.specify "should allow an SQL-like shift" <|
t1 = table_builder [["X", [Date.new 2021 01 31, Date.new 2021 01 01, Date.new 2021 12 31]], ["Y", [5, -1, 0]]]
(t1.at "X").date_add (t1.at "Y") Date_Period.Day . to_vector . should_equal [Date.new 2021 02 05, Date.new 2020 12 31, Date.new 2021 12 31]
(t1.at "X").date_add -1 Date_Period.Day . to_vector . should_equal [Date.new 2021 01 30, Date.new 2020 12 31, Date.new 2021 12 30]
@ -427,11 +458,11 @@ spec setup =
# and defaults to Hour for time-of-day
(t3.at "X").date_add (t3.at "Y") . to_vector . should_equal [Time_Of_Day.new 17 30 0, Time_Of_Day.new 22 45 0, Time_Of_Day.new 1 30 0]
Test.specify "should check shift_amount type in date_add" <|
group_builder.specify "should check shift_amount type in date_add" <|
t = table_builder [["X", [Date.new 2021 01 31]]]
t.at "X" . date_add "text" Date_Period.Day . should_fail_with Invalid_Value_Type
Test.specify "date_diff and date_add should work correctly with DST" pending="May be failing on some Database configurations. ToDo: investigate - https://github.com/enso-org/enso/issues/7326" <|
group_builder.specify "date_diff and date_add should work correctly with DST" pending="May be failing on some Database configurations. ToDo: investigate - https://github.com/enso-org/enso/issues/7326" <|
zone = Time_Zone.parse "Europe/Warsaw"
dt1 = Date_Time.new 2023 03 26 00 30 00 zone=zone
t = table_builder [["X", [dt1]]]
@ -469,13 +500,21 @@ spec setup =
(t2.at "X").date_diff dt4 Time_Period.Hour . to_vector . should_equal [23]
if setup.test_selection.date_time.not then
Test.group prefix+"partial Date-Time support" <|
Test.specify "will fail when uploading a Table containing Dates" <|
suite_builder.group prefix+"partial Date-Time support" group_builder->
data = Data.setup create_connection_fn setup.table_builder
group_builder.teardown <|
data.teardown
table_builder cols =
setup.table_builder cols connection=data.connection
group_builder.specify "will fail when uploading a Table containing Dates" <|
d = Date.new 2020 10 24
table = table_builder [["A", [d]], ["X", [123]]]
table.should_fail_with Unsupported_Database_Operation
Test.specify "should report a type error when date operations are invoked on a non-date column" <|
group_builder.specify "should report a type error when date operations are invoked on a non-date column" <|
t = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]], ["C", [True, False, True]]]
r1 = t.at "A" . year
r1.should_fail_with Invalid_Value_Type

View File

@ -6,19 +6,37 @@ import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
from Standard.Table import all
from Standard.Table.Errors import all
from Standard.Test import Test, Problems
import Standard.Test.Extensions
from Standard.Test_New import all
from project.Common_Table_Operations.Util import all
main = run_default_backend spec
main = run_default_backend add_specs
spec setup =
type Data
Value ~connection
setup create_connection_fn =
Data.Value (create_connection_fn Nothing)
teardown self =
self.connection.close
add_specs suite_builder setup =
prefix = setup.prefix
table_builder = setup.table_builder
create_connection_fn = setup.create_connection_func
pending_datetime = if setup.test_selection.date_time.not then "Date/Time operations are not supported by this backend."
Test.group prefix+"Table.set with Column_Operation" <|
Test.specify "arithmetics" <|
suite_builder.group prefix+"Table.set with Column_Operation" group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
table_builder cols =
setup.table_builder cols connection=data.connection
group_builder.specify "arithmetics" <|
t = table_builder [["A", [1, 2]], ["B", [10, 40]]]
t.set (Column_Operation.Add (Column_Ref.Name "A") (Column_Ref.Name "B")) "C" . at "C" . to_vector . should_equal [11, 42]
t.set (Column_Operation.Add 100 (Column_Ref.Name "B")) "C" . at "C" . to_vector . should_equal [110, 140]
@ -53,7 +71,7 @@ spec setup =
Test.expect_panic Type_Error <| t.set (Column_Operation.Subtract "x" "y")
t.set (Column_Operation.Add 42 "y") . should_fail_with Illegal_Argument
Test.specify "rounding" <|
group_builder.specify "rounding" <|
t = table_builder [["A", [1.13333, 122.74463, 32.52424, -12.7]]]
t.set (Column_Operation.Round (Column_Ref.Name "A")) "Z" . at "Z" . to_vector . should_equal [1, 123, 33, -13]
t.set (Column_Operation.Round (Column_Ref.Name "A") precision=1) "Z" . at "Z" . to_vector . should_equal [1.1, 122.7, 32.5, -12.7]
@ -66,7 +84,7 @@ spec setup =
Test.expect_panic Type_Error <| t.set (Column_Operation.Round "1.23")
Test.expect_panic Type_Error <| t.set (Column_Operation.Truncate "1.23")
Test.specify "date/time" pending=pending_datetime <|
group_builder.specify "date/time" pending=pending_datetime <|
t = table_builder [["A", [Date_Time.new 2023 1 12 12 45, Date_Time.new 2020 5 12 1 45]], ["B", [Date_Time.new 2023 1 15 18 45, Date_Time.new 2020 6 12 22 20]], ["x", [1, 3]]]
# TODO ticket for truncate for DB
@ -98,7 +116,7 @@ spec setup =
t.set (Column_Operation.Date_Part (Column_Ref.Name "x") Date_Period.Year) . should_fail_with Invalid_Value_Type
Test.expect_panic Type_Error <| t2.set (Column_Operation.Date_Diff 42 "x" Date_Period.Year)
Test.specify "boolean" <|
group_builder.specify "boolean" <|
t = table_builder [["A", [True, False]], ["T", [True, True]]]
t.set (Column_Operation.And (Column_Ref.Name "A") (Column_Ref.Name "T")) "Z" . at "Z" . to_vector . should_equal [True, False]
@ -115,7 +133,7 @@ spec setup =
Test.expect_panic_with (t.set (Column_Operation.And 42 True)) Type_Error
Test.expect_panic_with (t.set (Column_Operation.Or (Column_Ref.Name "A") "x")) Type_Error
Test.specify "if" <|
group_builder.specify "if" <|
t = table_builder [["A", [1, 100]], ["B", [10, 40]], ["C", [23, 55]]]
t.set (Column_Operation.If (Column_Ref.Name "A") (Filter_Condition.Greater than=(Column_Ref.Name "B"))) "Z" . at "Z" . to_vector . should_equal [False, True]
t.set (Column_Operation.If (Column_Ref.Name "A") (Filter_Condition.Greater than=20) "T" "F") "Z" . at "Z" . to_vector . should_equal ["F", "T"]
@ -144,7 +162,7 @@ spec setup =
t2.set (Column_Operation.If (Column_Ref.Name "A") (Filter_Condition.Not_In (Column_Ref.Name "B")) "TT" "FF") . should_fail_with Illegal_Argument
t2.set (Column_Operation.If (Column_Ref.Name "A") (Filter_Condition.Not_In [Column_Ref.Name "B", "X"]) "TT" "FF") . should_fail_with Illegal_Argument
Test.specify "text" <|
group_builder.specify "text" <|
t = table_builder [["A", [" a ", "b"]], ["B", ["c", " d "]]]
t.set (Column_Operation.Trim (Column_Ref.Name "A")) "Z" . at "Z" . to_vector . should_equal ["a", "b"]
@ -161,7 +179,7 @@ spec setup =
t2 = table_builder [["A", [42]]]
t2.set (Column_Operation.Trim (Column_Ref.Name "A")) . should_fail_with Invalid_Value_Type
Test.specify "min/max" <|
group_builder.specify "min/max" <|
t = table_builder [["A", [1, 20]], ["B", [10, 2]]]
t.set (Column_Operation.Min (Column_Ref.Name "A") (Column_Ref.Name "B")) "Z" . at "Z" . to_vector . should_equal [1, 2]
@ -189,18 +207,26 @@ spec setup =
t3.set (Column_Operation.Min (Column_Ref.Name "A") (Date.new 2003)) "Z" . at "Z" . to_vector . should_equal [Date.new 2002 12 10, Date.new 2003 01 01]
t3.set (Column_Operation.Max (Column_Ref.Name "A") (Date.new 2003)) "Z" . at "Z" . to_vector . should_equal [Date.new 2003 01 01, Date.new 2005 01 01]
Test.specify "allows also indexing columns numerically" <|
group_builder.specify "allows also indexing columns numerically" <|
t = table_builder [["X", [1, 2]], ["Y", [3, 4]]]
t.set (Column_Operation.Add (Column_Ref.Index 0) (Column_Ref.Index 1)) "Z" . at "Z" . to_vector . should_equal [4, 6]
Test.specify "will forward column resolution errors" <|
group_builder.specify "will forward column resolution errors" <|
t = table_builder [["X", [1, 2]], ["Y", [3, 4]]]
t.set (Column_Operation.Add (Column_Ref.Name "X") (Column_Ref.Name "Z")) . should_fail_with No_Such_Column
t.set (Column_Operation.Not (Column_Ref.Name "zzz")) . should_fail_with No_Such_Column
t.set (Column_Operation.Not (Column_Ref.Index 42)) . should_fail_with Index_Out_Of_Bounds
Test.group prefix+"Unique derived column names" <|
Test.specify "Should not disambiguate two derived columns that would otherwise have had the same name, with Set_Mode.Add_Or_Update" <|
suite_builder.group prefix+"Unique derived column names" group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
table_builder cols =
setup.table_builder cols connection=data.connection
group_builder.specify "Should not disambiguate two derived columns that would otherwise have had the same name, with Set_Mode.Add_Or_Update" <|
t = table_builder [["X", [1, 2, 3]]]
column_op = Column_Operation.Power 2 (Column_Ref.Name "X")
t2 = t.set column_op . set column_op
@ -208,7 +234,7 @@ spec setup =
t2.at "X" . to_vector . should_equal [1, 2, 3]
t2.at "[2] ^ [X]" . to_vector . should_equal [2, 4, 8]
Test.specify "Should disambiguate two derived columns that would otherwise have had the same name, with Set_Mode.Add" <|
group_builder.specify "Should disambiguate two derived columns that would otherwise have had the same name, with Set_Mode.Add" <|
t = table_builder [["X", [1, 2, 3]]]
column_op = Column_Operation.Power 2 (Column_Ref.Name "X")
t2 = t.set column_op set_mode=Set_Mode.Add . set column_op set_mode=Set_Mode.Add
@ -217,7 +243,7 @@ spec setup =
t2.at "[2] ^ [X]" . to_vector . should_equal [2, 4, 8]
t2.at "[2] ^ [X] 1" . to_vector . should_equal [2, 4, 8]
Test.specify "Should disambiguate two derived columns that would otherwise have had the same name, within the same expression" <|
group_builder.specify "Should disambiguate two derived columns that would otherwise have had the same name, within the same expression" <|
t = table_builder [["X", [1, 2, 3]]]
expression = "2 + (2 * 2) + (2 ^ [X])"
t2 = t.set expression
@ -225,7 +251,7 @@ spec setup =
t2.at "X" . to_vector . should_equal [1, 2, 3]
t2.at expression . to_vector . should_equal [8, 10, 14]
Test.specify "Should use .pretty to distinguish string constants from regular column names" <|
group_builder.specify "Should use .pretty to distinguish string constants from regular column names" <|
t = table_builder [["X", ["a", "b", "c"]]]
expression = '"foo" + [X] + "bar"'
t2 = t.set expression
@ -233,7 +259,7 @@ spec setup =
t2.at "X" . to_vector . should_equal ["a", "b", "c"]
t2.at expression . to_vector . should_equal ["fooabar", "foobbar", "foocbar"]
Test.specify "Should disambiguate between a column reference and a literal string" <|
group_builder.specify "Should disambiguate between a column reference and a literal string" <|
t = table_builder [["X", ["a", "b", "c"]]]
t2 = t.set (Column_Operation.Add "prefix" (Column_Ref.Name "X"))
t3 = t2.set (Column_Operation.Add "prefix" "X")
@ -242,13 +268,13 @@ spec setup =
t3.at "['prefix'] + [X]" . to_vector . should_equal ["prefixa", "prefixb", "prefixc"]
t3.at "['prefix'] + 'X'" . to_vector . should_equal ["prefixX", "prefixX", "prefixX"]
Test.specify "Should not disambiguate if set_mode is Update" <|
group_builder.specify "Should not disambiguate if set_mode is Update" <|
t = table_builder [["X", [1, 2, 3]]]
t2 = t.set (Column_Operation.Add (Column_Ref.Name "X") 1) set_mode=Set_Mode.Update
t2.column_names . should_equal ["X"]
t2.at "X" . to_vector . should_equal [2, 3, 4]
Test.specify "Should not disambiguate if set_mode is Add_Or_Update" <|
group_builder.specify "Should not disambiguate if set_mode is Add_Or_Update" <|
t = table_builder [["X", [1, 2, 3]], ["[X] + 1", [10, 20, 30]]]
# set_mode=Set_Mode.Add_Or_Update is the default
t2 = t.set (Column_Operation.Add (Column_Ref.Name "X") 1)
@ -256,7 +282,7 @@ spec setup =
t2.at "X" . to_vector . should_equal [1, 2, 3]
t2.at "[X] + 1" . to_vector . should_equal [2, 3, 4]
Test.specify "Should not disambiguate if the new name is explicitly set" <|
group_builder.specify "Should not disambiguate if the new name is explicitly set" <|
t = table_builder [["X", [1, 2, 3]]]
t2 = t.set (Column_Operation.Add (Column_Ref.Name "X") 1) new_name="X"
t2.column_names . should_equal ["X"]

View File

@ -3,18 +3,36 @@ from Standard.Base import all
from Standard.Table import Sort_Column
from Standard.Table.Errors import all
from Standard.Test import Test, Problems
import Standard.Test.Extensions
from Standard.Test_New import all
from project.Common_Table_Operations.Util import run_default_backend
main = run_default_backend spec
main = run_default_backend add_specs
spec setup =
table_builder = setup.table_builder
type Data
Value ~connection
setup create_connection_fn = Data.Value <|
create_connection_fn Nothing
teardown self =
self.connection.close
add_specs suite_builder setup =
materialize = setup.materialize
Test.group setup.prefix+"Table.distinct" <|
Test.specify "should group by all columns by default" <|
create_connection_fn = setup.create_connection_func
suite_builder.group setup.prefix+"Table.distinct" group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
table_builder cols =
setup.table_builder cols connection=data.connection
group_builder.specify "should group by all columns by default" <|
a = ["A", ["a", "b", "a", "b", "a", "b"]]
b = ["B", [2, 1, 2, 2, 2, 1]]
t = table_builder [a, b]
@ -22,7 +40,7 @@ spec setup =
r.at "A" . to_vector . should_equal ["a", "b", "b"]
r.at "B" . to_vector . should_equal [2, 1, 2]
Test.specify "should allow to select distinct rows based on a subset of columns, returning any row from each group" <|
group_builder.specify "should allow to select distinct rows based on a subset of columns, returning any row from each group" <|
a = ["A", ["a", "a", "a", "a", "a", "a"]]
b = ["B", [1, 1, 2, 2, 1, 2]]
c = ["C", [0.1, 0.2, 0.3, 0.4, 0.5, 0.6]]
@ -41,7 +59,7 @@ spec setup =
[0.3, 0.4, 0.6].contains (cv.at 1) . should_be_true
if setup.test_selection.distinct_returns_first_row_from_group_if_ordered then
Test.specify "should allow to select distinct rows based on a subset of columns, returning any first from each group if the table was ordered" <|
group_builder.specify "should allow to select distinct rows based on a subset of columns, returning any first from each group if the table was ordered" <|
a = ["A", ["a", "a", "a", "a", "a", "a"]]
b = ["B", [1, 1, 2, 2, 1, 2]]
c = ["C", [0.1, 0.2, 0.3, 0.4, 0.5, 0.6]]
@ -52,7 +70,7 @@ spec setup =
r2.at "B" . to_vector . should_equal [1, 2]
r2.at "C" . to_vector . should_equal [0.5, 0.6]
Test.specify "should allow to control case-sensitivity of keys" <|
group_builder.specify "should allow to control case-sensitivity of keys" <|
x = ["X", ['A', 'a', 'enso', 'Enso', 'A']]
t1 = table_builder [x]
d1 = t1.distinct ["X"] on_problems=Report_Error |> materialize |> _.order_by ["X"]
@ -64,7 +82,7 @@ spec setup =
v.filter (_.equals_ignore_case "enso") . length . should_equal 1
v.filter (_.equals_ignore_case "a") . length . should_equal 1
Test.specify "should report a warning if the key contains floating point values" <|
group_builder.specify "should report a warning if the key contains floating point values" <|
t1 = table_builder [["X", [3.0, 1.0, 2.0, 2.0, 1.0]]]
action1 = t1.distinct on_problems=_
tester1 table =
@ -74,7 +92,7 @@ spec setup =
problems1 = [Floating_Point_Equality.Error "X"]
Problems.test_problem_handling action1 problems1 tester1
Test.specify "should handle nulls correctly" <|
group_builder.specify "should handle nulls correctly" <|
a = ["A", ["a", Nothing, "b", "a", "b", Nothing, "a", "b"]]
b = ["B", [1, 2, 3, 4, 5, 6, 7, 8]]
t = table_builder [a, b]
@ -92,7 +110,7 @@ spec setup =
va.at 2 . should_equal "b"
[3, 5, 8].contains (vb.at 2) . should_be_true
Test.specify "should report missing input columns" <|
group_builder.specify "should report missing input columns" <|
t1 = table_builder [["X", [1, 2, 3, 2, 2]]]
[Problem_Behavior.Ignore, Problem_Behavior.Report_Warning, Problem_Behavior.Report_Error].each pb->
t2 = t1.distinct ["Y", "Z"] on_problems=pb

View File

@ -9,15 +9,15 @@ import Standard.Table.Data.Expression.Expression_Error
from Standard.Database.Errors import SQL_Error
from Standard.Test import Test, Test_Suite, Problems
import Standard.Test.Extensions
from Standard.Test_New import all
from project.Util import all
from project.Common_Table_Operations.Util import run_default_backend
main = run_default_backend (spec detailed=True)
main = run_default_backend (add_specs detailed=True)
spec detailed setup =
add_specs suite_builder detailed setup =
prefix = setup.prefix
table_builder = setup.table_builder
@ -50,32 +50,32 @@ spec detailed setup =
new_column.name . should_equal expression
specify_test label action expression_test=tester pending=Nothing = case pending of
specify_test label group_builder action expression_test=tester pending=Nothing = case pending of
Nothing ->
case detailed of
True ->
specify_tester expression value =
Test.specify (label + ": " + expression) <|
group_builder.specify (label + ": " + expression) <|
expression_test expression value
action specify_tester
False ->
Test.specify label (action expression_test)
_ -> Test.specify label Nothing pending
group_builder.specify label (action expression_test)
_ -> group_builder.specify label Nothing pending
Test.group prefix+"Expression Integer literals" <|
specify_test "should be able to add an integer column" expression_test->
suite_builder.group prefix+"Expression Integer literals" group_builder->
specify_test "should be able to add an integer column" group_builder expression_test->
expression_test "1" 1
expression_test "-3" -3
expression_test "1_000" 1000
Test.group prefix+"Expression Float literals" <|
specify_test "should be able to add an decimal column" expression_test->
suite_builder.group prefix+"Expression Float literals" group_builder->
specify_test "should be able to add an decimal column" group_builder expression_test->
expression_test "1.23" 1.23
expression_test "-3.1415" -3.1415
expression_test "1_000.456" 1000.456
Test.group prefix+"Expression Boolean literals" <|
specify_test "should be able to add a boolean column" expression_test->
suite_builder.group prefix+"Expression Boolean literals" group_builder->
specify_test "should be able to add a boolean column" group_builder expression_test->
expression_test "True" True
expression_test "true" True
expression_test "TRUE" True
@ -85,8 +85,8 @@ spec detailed setup =
expression_test "FALSE" False
expression_test "FaLsE" False
Test.group prefix+"Expression Text literals" <|
specify_test "should be able to add a text column" expression_test->
suite_builder.group prefix+"Expression Text literals" group_builder->
specify_test "should be able to add a text column" group_builder expression_test->
expression_test "'Hello World'" 'Hello World'
expression_test "'Hello \'World\''" "Hello 'World'"
expression_test '"Hello World"' 'Hello World'
@ -94,12 +94,12 @@ spec detailed setup =
expression_test '"Hello \\""World"""' 'Hello \\"World"'
expression_test "'Alpha\r\n\gBeta'" 'Alpha\r\n\\gBeta'
Test.group prefix+"Expression Text literals" <|
specify_test "should be able to get a Column" expression_test->
suite_builder.group prefix+"Expression Text literals" group_builder->
specify_test "should be able to get a Column" group_builder expression_test->
expression_test "[A]" (column_a.at 1)
expression_test "[Bad]] Name]" (column_odd.at 1)
Test.specify "should sanitize names" <|
group_builder.specify "should sanitize names" <|
t = table_builder [["X", ['\0', 'x', '']]]
c = t.evaluate_expression '[X] == \'\0\'' . catch SQL_Error
# We ignore the SQL error - some backends just do not support `\0` values. This is not the main point of this test.
@ -109,14 +109,14 @@ spec detailed setup =
c.name.should_equal "[X] == '\0'"
c.to_vector.should_equal [True, False, False]
Test.group prefix+"Expression Nothing literals" <|
specify_test "should be able to add an nothing column" expression_test->
suite_builder.group prefix+"Expression Nothing literals" group_builder->
specify_test "should be able to add an nothing column" group_builder expression_test->
expression_test "null" Nothing
expression_test "nUlL" Nothing
expression_test "Nothing" Nothing
expression_test "NOTHING" Nothing
Test.specify "should allow to create a NULL column" <|
group_builder.specify "should allow to create a NULL column" <|
t = table_builder [["X", [1, 2, 3]]]
c = t.evaluate_expression "null"
c.name . should_equal "null"
@ -132,7 +132,7 @@ spec detailed setup =
introduce a Value_Type.Null and make it accepted by all
`Value_Type.expect_*` checks.
See: https://github.com/enso-org/enso/issues/6281
Test.specify "a null column may fail typechecks that expect a concrete type" <|
group_builder.specify "a null column may fail typechecks that expect a concrete type" <|
t = table_builder [["X", [1, 2, 3]]]
c = t.evaluate_expression "null"
(c + c) . should_fail_with Illegal_Argument
@ -147,13 +147,13 @@ spec detailed setup =
t.evaluate_expression "[X] + Nothing" . to_vector . should_equal [Nothing, Nothing, Nothing]
t.evaluate_expression "Nothing + [X]" . should_fail_with Illegal_Argument
Test.group prefix+"Expression Date and Time literals" <|
specify_test "should be able to add a date or time column" pending=pending_datetime expression_test->
suite_builder.group prefix+"Expression Date and Time literals" group_builder->
specify_test "should be able to add a date or time column" group_builder pending=pending_datetime expression_test->
expression_test "#2020-12-23#" (Date.new 2020 12 23)
expression_test "#12:34#" (Time_Of_Day.new 12 34)
expression_test "#12:34:56#" (Time_Of_Day.new 12 34 56)
Test.specify "should be able to add a date-time column" pending=pending_datetime <|
group_builder.specify "should be able to add a date-time column" pending=pending_datetime <|
## We cannot just test equality as the Database may change the timezone,
so all we can do is check that the values are accepted and can be compared
with other values in the database.
@ -161,18 +161,18 @@ spec detailed setup =
c = t.evaluate_expression "#2020-12-23 12:34:56# == [X]"
c.to_vector . should_equal [True, False]
specify_test "should be able to add a time column with sub-millisecond accuracy" pending=pending_datetime expression_test->
specify_test "should be able to add a time column with sub-millisecond accuracy" group_builder pending=pending_datetime expression_test->
expression_test "#12:34:56.789#" (Time_Of_Day.new 12 34 56 789)
expression_test "#12:34:56.000123#" (Time_Of_Day.new 12 34 56 millisecond=0 microsecond=123 nanosecond=0)
if setup.is_database.not then
specify_test "should be able to add a date time column with timezone" expression_test->
specify_test "should be able to add a date time column with timezone" group_builder expression_test->
expression_test "#2020-12-23 12:34:56Z[UTC]#" (Date_Time.new 2020 12 23 12 34 56 zone=Time_Zone.utc)
expression_test "#2020-12-23 12:34:56+02:30[UTC]#" (Date_Time.new 2020 12 23 10 04 56 zone=Time_Zone.utc)
expression_test "#2020-12-23 12:34:56.157+01[UTC]#" (Date_Time.new 2020 12 23 11 34 56 157 zone=Time_Zone.utc)
expression_test "#2020-12-23T12:34[Europe/Warsaw]#" (Date_Time.new 2020 12 23 12 34 zone=Time_Zone.parse("Europe/Warsaw"))
Test.specify "should correctly handle timezones" pending=pending_datetime <|
group_builder.specify "should correctly handle timezones" pending=pending_datetime <|
## We cannot just test equality as the Database may change the timezone,
so all we can do is check that the values are accepted and can be compared
with other values in the database.
@ -182,8 +182,8 @@ spec detailed setup =
c2 = t.evaluate_expression "#2010-01-02 12:34:56[Europe/Warsaw]# == [X]"
c2.to_vector . should_equal [False, True]
Test.group prefix+"Expression Arithmetic" <|
specify_test "should be able to do basic arithmetic" expression_test->
suite_builder.group prefix+"Expression Arithmetic" group_builder->
specify_test "should be able to do basic arithmetic" group_builder expression_test->
expression_test "1+1" 2
expression_test "23-15" 8
expression_test "2.5*4.2" 10.5
@ -195,13 +195,13 @@ spec detailed setup =
expression_test "floor(5.3)" 5
expression_test "round(5.5)" 6
specify_test "should be able to do basic arithmetic with order" expression_test->
specify_test "should be able to do basic arithmetic with order" group_builder expression_test->
expression_test "1+1*2+2" 5
expression_test "23-15/3+6" 24
expression_test "52.92/4.2^2" 3
expression_test "(1+1)*2+2" 6
specify_test "should be able to do basic arithmetic with whitespace" expression_test->
specify_test "should be able to do basic arithmetic with whitespace" group_builder expression_test->
expression_test "1 + 1" 2
expression_test " 23 -15 " 8
expression_test "2.5* 4.2" 10.5
@ -211,8 +211,8 @@ spec detailed setup =
expression_test "1+1 * 2" 3
expression_test "1 + 1*2" 3
Test.group prefix+"Expression Column Arithmetic" <|
specify_test "should be able to perform arithmetic on columns" expression_test->
suite_builder.group prefix+"Expression Column Arithmetic" group_builder->
specify_test "should be able to perform arithmetic on columns" group_builder expression_test->
expression_test "[A] + 2" [3, 4, 5, 6, 7]
expression_test "[B] - 2" [-1, -0.5, 0.5, 2, 4]
expression_test "[A] * 4" [4, 8, 12, 16, 20]
@ -230,7 +230,7 @@ spec detailed setup =
expression_test "[A] ^ [B] * [A]" [1, 5.65685424949238, 46.7653718043597, 1024, 78125]
expression_test "[A] % [B] % [A]" [0, 0.5, 0.5, 0, 0]
specify_test "should be able to perform arithmetic on columns with order" expression_test->
specify_test "should be able to perform arithmetic on columns with order" group_builder expression_test->
expression_test "([A] + [B]) * 3" [6, 10.5, 16.5, 24, 33]
expression_test "[A] * (4 + [B])" [5, 11, 19.5, 32, 50]
expression_test "[A] * [B] + [A]" [2, 5, 10.5, 20, 35]
@ -242,8 +242,8 @@ spec detailed setup =
expression_test "([A] - [B]) ^ [A]" [0, 0.25, 0.125, 0, -1]
expression_test "[A] ^ ([B] - [A])" [1, 0.707106781186547, 0.577350269189626, 1, 5]
Test.group prefix+"Expression Comparison Operators" <|
specify_test "should be able to compare equality" expression_test->
suite_builder.group prefix+"Expression Comparison Operators" group_builder->
specify_test "should be able to compare equality" group_builder expression_test->
expression_test "2 = 1 + 1" True
expression_test "2 == 1 + 1" True
expression_test "[A] = 2" [False, True, False, False, False]
@ -253,7 +253,7 @@ spec detailed setup =
expression_test "[A] != 2" [True, False, True, True, True]
expression_test "[A] <> 2" [True, False, True, True, True]
specify_test "should be able to compare ordering" expression_test->
specify_test "should be able to compare ordering" group_builder expression_test->
expression_test "1 > 2" False
expression_test "1 < 2" True
expression_test "[A] > 2" [False, False, True, True, True]
@ -261,7 +261,7 @@ spec detailed setup =
expression_test "[A] < 2" [True, False, False, False, False]
expression_test "[A] <= 2" [True, True, False, False, False]
specify_test "should be able to use between" expression_test->
specify_test "should be able to use between" group_builder expression_test->
expression_test "1 + 1 BETWEEN 1 AND 3" True
expression_test "1 + 1 between 2 AND 3" True
expression_test "1 + 1 bETWEEN 1 AND 2" True
@ -269,7 +269,7 @@ spec detailed setup =
expression_test "1 + 1 NOT BETWEEN 1 AND 3" False
expression_test "[A] not between 2 AND 3" [True, False, False, True, True]
specify_test "should be able to use in" expression_test->
specify_test "should be able to use in" group_builder expression_test->
expression_test "1 + 1 IN (2, 4, 6)" True
expression_test "[A] IN (2, 4, 6)" [False, True, False, True, False]
expression_test "1 + 1 NOT IN (2, 4, 6)" False
@ -277,7 +277,7 @@ spec detailed setup =
expression_test "[A] IN (3)" [False, False, True, False, False]
expression_test "[A] NOT IN (3)" [True, True, False, True, True]
specify_test "should be able to check null" expression_test->
specify_test "should be able to check null" group_builder expression_test->
expression_test "1 IS NULL" False
expression_test "1 IS NoTHing" False
expression_test "Nothing IS NULL" True
@ -288,20 +288,20 @@ spec detailed setup =
expression_test "[A] IS NOT NULL" [True, True, True, True, True]
expression_test "[C] IS NOT NULL" [True, True, True, True, False]
specify_test "should be able to check empty" expression_test->
specify_test "should be able to check empty" group_builder expression_test->
expression_test "'Hello World' IS EMPTY" False
expression_test "'' IS EMPTY" True
expression_test "'Hello World' IS NOT EMPTY" True
expression_test "'' IS NOT EMPTY" False
Test.group prefix+"Expression Text Operators" <|
specify_test "should be able to concatenate text" expression_test->
suite_builder.group prefix+"Expression Text Operators" group_builder->
specify_test "should be able to concatenate text" group_builder expression_test->
expression_test "'Hello ' + 'World'" "Hello World"
expression_test "[C] + ' World'" ["Hello World", "World World", "Hello World! World", " World", Nothing]
expression_test "'Hello ' + [C]" ["Hello Hello", "Hello World", "Hello Hello World!", "Hello ", Nothing]
expression_test "[C] + [C]" ["HelloHello", "WorldWorld", "Hello World!Hello World!", "", Nothing]
specify_test "should be able to use like" expression_test->
specify_test "should be able to use like" group_builder expression_test->
expression_test "'Hello World' LIKE 'Hello%'" True
expression_test "'Hello' LIKE 'H_llo'" True
expression_test "'Hello' LIKE 'H_l%'" True
@ -311,63 +311,63 @@ spec detailed setup =
expression_test "[C] LIKE 'Hello%'" [True, False, True, False, Nothing]
expression_test "[C] NOT LIKE 'Hello%'" [False, True, False, True, Nothing]
Test.group prefix+"Expression Boolean Operators" <|
specify_test "should be able to AND booleans" expression_test->
suite_builder.group prefix+"Expression Boolean Operators" group_builder->
specify_test "should be able to AND booleans" group_builder expression_test->
expression_test "True && TRUE" True
expression_test "True && Nothing" Nothing
expression_test "True AND False" False
expression_test "True && [Bad]] Name]" [True, False, True, False, True]
expression_test "False AND [Bad]] Name]" False
specify_test "should be able to OR booleans" expression_test->
specify_test "should be able to OR booleans" group_builder expression_test->
expression_test "True || TRUE" True
expression_test "True OR False" True
expression_test "False OR False" False
expression_test "True OR [Bad]] Name]" True
expression_test "False || [Bad]] Name]" [True, False, True, False, True]
specify_test "should be able to NOT booleans" expression_test->
specify_test "should be able to NOT booleans" group_builder expression_test->
expression_test "!TRUE" False
expression_test "Not False" True
expression_test "NOT [Bad]] Name]" [False, True, False, True, False]
specify_test "should be able to use IF" expression_test->
specify_test "should be able to use IF" group_builder expression_test->
expression_test "IF True THEN 1 ELSE 0" 1
expression_test "IF False THEN 'A' ELSE 'B' END" 'B'
expression_test "IF [Bad]] Name] THEN [A] ELSE [B] ENDIF" [1, 1.5, 3, 4, 5]
Test.group prefix+"Function invocation" <|
specify_test "should be able to call a function with arguments" expression_test->
suite_builder.group prefix+"Function invocation" group_builder->
specify_test "should be able to call a function with arguments" group_builder expression_test->
expression_test "Not(True)" False
expression_test "not(False)" True
expression_test "iif(True, 1, 3)" 1
expression_test "iif([Bad]] Name], 2, 3)" [2, 3, 2, 3, 2]
specify_test "should be able to call a variable args function" expression_test->
specify_test "should be able to call a variable args function" group_builder expression_test->
expression_test "min(10, 3, 8)" 3
expression_test "max([A], [B], 3)" [3, 3, 3, 4, 6]
Test.group prefix+"Expression Errors should be handled" <|
suite_builder.group prefix+"Expression Errors should be handled" group_builder->
error_tester expression fail_ctor =
test_table.set expression new_name="NEW_COL" . should_fail_with Expression_Error
test_table.set expression new_name="NEW_COL" . catch . should_be_a fail_ctor
specify_test "should fail with Syntax_Error if badly formed" expression_test=error_tester expression_test->
specify_test "should fail with Syntax_Error if badly formed" group_builder expression_test=error_tester expression_test->
expression_test "IIF [A] THEN 1 ELSE 2" Expression_Error.Syntax_Error
expression_test "A + B" Expression_Error.Syntax_Error
expression_test "#2022-31-21#" Expression_Error.Syntax_Error
specify_test "should fail with Unsupported_Operation if not sufficient arguments" expression_test=error_tester expression_test->
specify_test "should fail with Unsupported_Operation if not sufficient arguments" group_builder expression_test=error_tester expression_test->
expression_test "unknown([C])" Expression_Error.Unsupported_Operation
specify_test "should fail with Argument_Mismatch if not sufficient arguments" expression_test=error_tester expression_test->
specify_test "should fail with Argument_Mismatch if not sufficient arguments" group_builder expression_test=error_tester expression_test->
expression_test "starts_with([C])" Expression_Error.Argument_Mismatch
specify_test "should fail with Argument_Mismatch if too many arguments" expression_test=error_tester expression_test->
specify_test "should fail with Argument_Mismatch if too many arguments" group_builder expression_test=error_tester expression_test->
expression_test "is_empty([C], 'Hello')" Expression_Error.Argument_Mismatch
Test.group prefix+"Expression Warnings should be reported" <|
Test.specify "should report floating point equality" <|
suite_builder.group prefix+"Expression Warnings should be reported" group_builder->
group_builder.specify "should report floating point equality" <|
t1 = table_builder [["X", [1.5, 2.0, 0.0]]]
r1 = t1.evaluate_expression "([X] == 2) || ([X] + 0.5 == 2)" on_problems=Problem_Behavior.Ignore
@ -387,7 +387,7 @@ spec detailed setup =
Problems.expect_warning Floating_Point_Equality r4
db_pending = if setup.is_database then "Arithmetic error handling is currently not implemented for the Database backend."
Test.specify "should report arithmetic errors" pending=db_pending <|
group_builder.specify "should report arithmetic errors" pending=db_pending <|
t1 = table_builder [["X", [1.5, 2.0, 0.0]]]
action1 = t1.evaluate_expression "3 / [X]" on_problems=_
@ -396,7 +396,7 @@ spec detailed setup =
problems1 = [Arithmetic_Error.Error "Division by zero (at rows [2])."]
Problems.test_problem_handling action1 problems1 tester1
Test.specify "already existing warnings should not be escalated to errors in error handling mode (1)" pending=db_pending <|
group_builder.specify "already existing warnings should not be escalated to errors in error handling mode (1)" pending=db_pending <|
t1 = table_builder [["X", [1.5, 2.0, 0.0]]]
c1 = t1.evaluate_expression "3 / [X]" on_problems=Problem_Behavior.Report_Warning
Problems.expect_warning Arithmetic_Error c1
@ -420,7 +420,7 @@ spec detailed setup =
# Should still keep the inherited warning from "Y".
Problems.expect_warning Arithmetic_Error t4
Test.specify "already existing warnings should not be escalated to errors in error handling mode (2)" <|
group_builder.specify "already existing warnings should not be escalated to errors in error handling mode (2)" <|
t1 = table_builder [["X", [1.5, 2.0, 0.0]]]
c1 = Warning.attach (Illegal_State.Error "FOO") (t1.evaluate_expression "[X] + 3.0")
Problems.expect_warning Illegal_State c1

View File

@ -11,24 +11,43 @@ import Standard.Table.Data.Expression.Expression_Error
from Standard.Database.Errors import SQL_Error
from Standard.Test import Test, Problems
import Standard.Test.Extensions
from Standard.Test_New import all
from project.Common_Table_Operations.Util import run_default_backend
main = run_default_backend spec
main = run_default_backend add_specs
type Data
Value ~connection
setup create_connection_fn =
Data.Value (create_connection_fn Nothing)
teardown self =
self.connection.close
## Currently these tests rely on filtering preserving the insertion ordering
within tables. This is not necessarily guaranteed by RDBMS, so we may adapt
this in the future. For now we implicitly assume the ordering is preserved,
as that seems to be the case.
spec setup =
add_specs suite_builder setup =
prefix = setup.prefix
table_builder = setup.table_builder
test_selection = setup.test_selection
create_connection_fn = setup.create_connection_func
Test.group prefix+"Table.filter" <|
Test.specify "by integer comparisons" <|
suite_builder.group prefix+"Table.filter" group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
table_builder cols =
setup.table_builder cols connection=data.connection
group_builder.specify "by integer comparisons" <|
t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", [100, 3, Nothing, 4, 12]], ["Y", [100, 4, 2, Nothing, 11]]]
t1 = t.filter "X" (Filter_Condition.Less than=10)
t1.at "ix" . to_vector . should_equal [2, 4]
@ -71,7 +90,7 @@ spec setup =
if test_selection.is_nan_and_nothing_distinct then
t.filter "X" (Filter_Condition.Is_Nan) . at "ix" . to_vector . should_equal []
Test.specify "by float operations" <|
group_builder.specify "by float operations" <|
t = table_builder [["ix", [1, 2, 3, 4, 5, 6]], ["X", [100.0, 2.5, Nothing, Number.nan, Number.positive_infinity, Number.negative_infinity]]]
t.filter "X" (Filter_Condition.Less than=10.0) . at "X" . to_vector . should_equal [2.5, Number.negative_infinity]
@ -90,7 +109,7 @@ spec setup =
if test_selection.is_nan_and_nothing_distinct then
t.filter "X" Filter_Condition.Is_Nan . at "ix" . to_vector . should_equal [4]
Test.specify "Not_Equal test cases" pending="Specification needs clarifying, see: https://github.com/enso-org/enso/issues/5241#issuecomment-1480167927" <|
group_builder.specify "Not_Equal test cases" pending="Specification needs clarifying, see: https://github.com/enso-org/enso/issues/5241#issuecomment-1480167927" <|
t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", [100, 3, Nothing, 4, 12]], ["Y", [100, 4, 2, Nothing, 11]]]
t3 = t.filter "X" (Filter_Condition.Not_Equal to=100)
t3 . at "X" . to_vector . should_equal [3, Nothing, 4, 12]
@ -98,7 +117,7 @@ spec setup =
t.filter "X" (Filter_Condition.Not_Equal to=(t.at "Y")) . at "X" . to_vector . should_equal [3, Nothing, 4, 12]
t.filter "X" (Filter_Condition.Not_Equal to=(Column_Ref.Name "Y")) . at "X" . to_vector . should_equal [3, Nothing, 4, 12]
Test.specify "by text comparisons" <|
group_builder.specify "by text comparisons" <|
t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", ["abb", "baca", "b", Nothing, "c"]], ["Y", ["a", "b", "b", "c", "c"]]]
t1 = t.filter "X" (Filter_Condition.Less than="c")
t1.at "ix" . to_vector . should_equal [1, 2, 3]
@ -132,7 +151,7 @@ spec setup =
t2.filter "X" (Filter_Condition.Equal_Ignore_Case to="a") . at "X" . to_vector . should_equal ["A", "a"]
t2.filter "X" (Filter_Condition.Equal_Ignore_Case to=(Column_Ref.Name "Y")) . at "X" . to_vector . should_equal ["A", "b"]
Test.specify "by text search (contains, starts_with, ends_with, not_contains)" <|
group_builder.specify "by text search (contains, starts_with, ends_with, not_contains)" <|
t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", ["abb", "bacb", "banana", Nothing, "nana"]], ["Y", ["a", "B", "d", "c", "a"]], ["Z", ["aaaaa", "bbbbb", "[ab]", "[ab]aaaa", "[ab]ccc"]]]
t.filter "X" (Filter_Condition.Starts_With "ba") . at "X" . to_vector . should_equal ["bacb", "banana"]
@ -166,7 +185,7 @@ spec setup =
t.filter "X" (Filter_Condition.Contains (Column_Ref.Name "Y") Case_Sensitivity.Insensitive) . at "X" . to_vector . should_equal ["abb", "bacb", "nana"]
t.filter "X" (Filter_Condition.Not_Contains (Column_Ref.Name "Y") Case_Sensitivity.Insensitive) . at "X" . to_vector . should_equal ["banana"]
Test.specify "by text search (like, not_like)" <|
group_builder.specify "by text search (like, not_like)" <|
t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", ["abb", "bacb", "banana", Nothing, "nana"]], ["Y", ["a", "B", "d", "c", "a"]], ["Z", ["aaaaa", "bbbbb", "[ab]", "[ab]aaaa", "[ab]ccc"]]]
t.filter "X" (Filter_Condition.Like "%an%") . at "X" . to_vector . should_equal ["banana", "nana"]
@ -178,7 +197,7 @@ spec setup =
t.filter "X" (Filter_Condition.Not_Like "%b") . at "X" . to_vector . should_equal ["banana", "nana"]
t.filter "Z" (Filter_Condition.Not_Like "[ab]%") . at "Z" . to_vector . should_equal ["aaaaa", "bbbbb"]
Test.specify "text operations should also match newlines" <|
group_builder.specify "text operations should also match newlines" <|
t = table_builder [["X", ['a\n\n\n', 'a\n', 'a\n\n\nb', 'a\nb', 'caa\nbb']]]
t.filter "X" (Filter_Condition.Like 'a_') . at "X" . to_vector . should_equal ['a\n']
t.filter "X" (Filter_Condition.Like 'a%') . at "X" . to_vector . should_equal ['a\n\n\n', 'a\n', 'a\n\n\nb', 'a\nb']
@ -193,7 +212,7 @@ spec setup =
if test_selection.supports_unicode_normalization then
t = table_builder [["X", ['śnieg', 's\u0301nieg', 'X', Nothing, 'połać', 'połac\u0301']]]
Test.specify "text operations should support Unicode normalization" <|
group_builder.specify "text operations should support Unicode normalization" <|
t.filter "X" (Filter_Condition.Starts_With 'ś') . at "X" . to_vector . should_equal ['śnieg', 's\u0301nieg']
t.filter "X" (Filter_Condition.Contains 'ś') . at "X" . to_vector . should_equal ['śnieg', 's\u0301nieg']
t.filter "X" (Filter_Condition.Ends_With 'ś') . at "X" . to_vector . should_equal []
@ -204,15 +223,15 @@ spec setup =
t.filter "X" (Filter_Condition.Like 'ś%') . at "X" . to_vector . should_equal ['śnieg']
# This test is split off just to mark is as pending, once resolved it can be merged with the one above.
Test.specify "text operations should support Unicode normalization (like)" pending='There is a bug with Java Regex in Unicode normalized mode (CANON_EQ) with quoting.\nhttps://bugs.java.com/bugdatabase/view_bug.do?bug_id=8032926' <|
group_builder.specify "text operations should support Unicode normalization (like)" pending='There is a bug with Java Regex in Unicode normalized mode (CANON_EQ) with quoting.\nhttps://bugs.java.com/bugdatabase/view_bug.do?bug_id=8032926' <|
t.filter "X" (Filter_Condition.Like 'ś%') . at "X" . to_vector . should_equal ['śnieg', 's\u0301nieg']
Test.specify "by empty text" <|
group_builder.specify "by empty text" <|
t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", ["abb", "", " ", Nothing, "nana"]]]
t.filter "X" Filter_Condition.Is_Empty . at "X" . to_vector . should_equal ["", Nothing]
t.filter "X" Filter_Condition.Not_Empty . at "X" . to_vector . should_equal ["abb", " ", "nana"]
Test.specify "should check types for text operations" <|
group_builder.specify "should check types for text operations" <|
t = table_builder [["ix", [1, 2, 3, 4]], ["X", [Nothing, "A", "", " "]]]
check_problem result =
result.should_fail_with Invalid_Value_Type
@ -255,7 +274,7 @@ spec setup =
check_scalar_type_error_handling (t.filter "X" (Filter_Condition.Not_Like 42))
check_scalar_type_error_handling (t.filter "X" (Filter_Condition.Not_Contains 42))
Test.specify "by nulls" <|
group_builder.specify "by nulls" <|
t = table_builder [["ix", [1, 2, 3, 4]], ["X", [Nothing, 1, Nothing, 4]]]
t1 = t.filter "X" Filter_Condition.Is_Nothing
t1.at "ix" . to_vector . should_equal [1, 3]
@ -265,7 +284,7 @@ spec setup =
t2.at "ix" . to_vector . should_equal [2, 4]
t2.at "X" . to_vector . should_equal [1, 4]
Test.specify "by an Is_In check" <|
group_builder.specify "by an Is_In check" <|
t = table_builder [["ix", [1, 2, 3, Nothing, 5, 6]], ["X", ["a", "b", "ccc", "X", "f", "2"]]]
t1 = table_builder [["txt", ["X", "a", "c", Nothing]], ["int", [Nothing, 2, 5, 4]], ["bool", [True, Nothing, Nothing, True]]]
t2 = table_builder [["txt", ["X", "a", "c", "q"]], ["int", [123, 2, 5, 4]], ["bool", [True, True, True, True]]]
@ -310,7 +329,7 @@ spec setup =
t3.filter "B" (Filter_Condition.Is_In [False]) . at "B" . to_vector . should_equal [False, False, False]
t3.filter "C" (Filter_Condition.Is_In [False, False]) . at "C" . to_vector . should_equal [False]
Test.specify "does not allow Column_Ref in Is_In/Not_In because that would be confusing" <|
group_builder.specify "does not allow Column_Ref in Is_In/Not_In because that would be confusing" <|
## Is In and Not In check if a value is contained anywhere in a provided collection (e.g. column),
NOT on a row-by-row basis like all other operations. Column_Ref is used with row-by-row ops,
so this would only cause confusion. Very rarely someone wants to filter a column by Is_In
@ -322,12 +341,12 @@ spec setup =
# If the user really wants this, they pass it as a raw column:
t.filter "A" (Filter_Condition.Is_In (t.at "B")) . at "A" . to_vector . should_equal [2, 3]
Test.specify "by a boolean mask" <|
group_builder.specify "by a boolean mask" <|
t = table_builder [["ix", [1, 2, 3, 4, 5]], ["b", [True, False, Nothing, True, True]]]
t.filter "b" . at "ix" . to_vector . should_equal [1, 4, 5]
t.filter "b" Filter_Condition.Is_False . at "ix" . to_vector . should_equal [2]
Test.specify "should correctly mask all kinds of columns" <|
group_builder.specify "should correctly mask all kinds of columns" <|
t = table_builder [["ints", [1, 2, 3, Nothing, 4]], ["floats", [4.0, Nothing, 3.0, 2.0, 1.0]], ["bools", [False, False, True, Nothing, False]], ["strings", ["a", Nothing, "b", "c", "d"]], ["mask", [False, True, True, True, Nothing]]]
t2 = t.filter "mask"
t2.at "ints" . to_vector . should_equal [2, 3, Nothing]
@ -336,7 +355,7 @@ spec setup =
t2.at "strings" . to_vector . should_equal [Nothing, "b", "c"]
t2.at "mask" . to_vector . should_equal [True, True, True]
Test.specify "should check types of boolean operations" <|
group_builder.specify "should check types of boolean operations" <|
t = table_builder [["ix", [1, 2, 3, 4, 5]], ["b", [True, False, Nothing, True, True]]]
check_problem result =
result.should_fail_with Invalid_Value_Type
@ -345,7 +364,7 @@ spec setup =
check_problem <| t.filter "ix" Filter_Condition.Is_True
check_problem <| t.filter "ix" Filter_Condition.Is_False
Test.specify "should check types of numeric operations" <|
group_builder.specify "should check types of numeric operations" <|
t = table_builder [["a", ["a", "b"]]]
check_problem result =
result.should_fail_with Invalid_Value_Type
@ -357,32 +376,32 @@ spec setup =
if test_selection.is_nan_and_nothing_distinct then
check_problem <| t.filter "a" Filter_Condition.Is_Nan
Test.specify "by a custom expression built from table's columns" <|
group_builder.specify "by a custom expression built from table's columns" <|
t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", [10, 20, 13, 4, 5]], ["Y", [0, -100, 8, 2, 5]]]
t.filter (t.at "X" + t.at "Y" > 9) . at "ix" . to_vector . should_equal [1, 3, 5]
Test.specify "should handle selection errors: unknown column name" <|
group_builder.specify "should handle selection errors: unknown column name" <|
t = table_builder [["X", [10, 20, 13, 4, 5]]]
t.filter "unknown column" . should_fail_with No_Such_Column
t.filter "unknown column" . catch . should_equal (No_Such_Column.Error "unknown column")
Test.specify "should handle selection errors: out of bounds index" <|
group_builder.specify "should handle selection errors: out of bounds index" <|
t = table_builder [["X", [10, 20, 13, 4, 5]]]
t.filter 4 . should_fail_with Index_Out_Of_Bounds
t.filter 4 . catch . should_equal (Index_Out_Of_Bounds.Error 4 1)
Test.specify "should handle illegal arguments" <|
group_builder.specify "should handle illegal arguments" <|
t = table_builder [["X", [10, 20, 13, 4, 5]]]
Test.expect_panic_with (t.filter "X" "NOT A CONDITION") Type_Error
Test.specify "should nicely handle Filter_Condition with unapplied arguments" <|
group_builder.specify "should nicely handle Filter_Condition with unapplied arguments" <|
t = table_builder [["X", [10, 20, 13, 4, 5]]]
t.filter "X" (Filter_Condition.Equal) . should_fail_with Illegal_Argument
t.filter "X" (Filter_Condition.Starts_With) . should_fail_with Illegal_Argument
t.filter "X" (Filter_Condition.Between) . should_fail_with Illegal_Argument
t.filter "X" (Filter_Condition.Between 1) . should_fail_with Illegal_Argument
Test.specify "should report issues: floating point equality" <|
group_builder.specify "should report issues: floating point equality" <|
t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", [10.0, 2.0001, 2.0, 4.5, 2.0]]]
r1 = t.filter "X" (Filter_Condition.Equal 2) on_problems=Problem_Behavior.Ignore
@ -400,7 +419,7 @@ spec setup =
r4.at "ix" . to_vector . should_equal [1, 2, 4]
Problems.expect_warning Floating_Point_Equality r4
Test.specify "already existing warnings should not be escalated to errors in error handling mode" <|
group_builder.specify "already existing warnings should not be escalated to errors in error handling mode" <|
t1 = table_builder [["X", [1.5, 2.0, 0.0]], ["ix", [1, 2, 3]]]
c1 = Warning.attach (Illegal_State.Error "FOO") (t1.evaluate_expression "3.0 + [X]")
Problems.expect_warning Illegal_State c1
@ -434,18 +453,26 @@ spec setup =
err2 = t2.filter "Y" (Filter_Condition.Not_Equal 5) on_problems=Problem_Behavior.Report_Error
err2.should_fail_with Floating_Point_Equality
Test.group prefix+"Table.filter_by_expression" <|
Test.specify "by a boolean column" <|
suite_builder.group prefix+"Table.filter_by_expression" group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
table_builder cols =
setup.table_builder cols connection=data.connection
group_builder.specify "by a boolean column" <|
t = table_builder [["ix", [1, 2, 3, 4, 5]], ["b", [True, False, Nothing, True, True]]]
t.filter_by_expression "[b]" . at "ix" . to_vector . should_equal [1, 4, 5]
t.filter_by_expression "![b]" . at "ix" . to_vector . should_equal [2]
Test.specify "by an integer comparison" <|
group_builder.specify "by an integer comparison" <|
t = table_builder [["ix", [1, 2, 3, 4, 5]], ["b", [True, False, Nothing, True, True]]]
t.filter_by_expression "[ix]==3" . at "ix" . to_vector . should_equal [3]
t.filter_by_expression "[ix]>2" . at "ix" . to_vector . should_equal [3, 4, 5]
Test.specify "fail gracefully" <|
group_builder.specify "fail gracefully" <|
t = table_builder [["ix", [1, 2, 3, 4, 5]], ["b", [True, False, Nothing, True, True]]]
t.filter_by_expression "[ix" . should_fail_with Expression_Error
t.filter_by_expression "[ix" . catch . should_be_a Expression_Error.Syntax_Error
@ -468,7 +495,7 @@ spec setup =
t.filter_by_expression "is_empty('', 42)" . should_fail_with Expression_Error
t.filter_by_expression "is_empty('', 42)" . catch . should_be_a Expression_Error.Argument_Mismatch
Test.specify "should report issues: floating point equality" <|
group_builder.specify "should report issues: floating point equality" <|
t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", [10.0, 2.0001, 2.0, 4.5, -2.0]]]
r1 = t.filter_by_expression "[X] * [X] == 4.0" on_problems=Problem_Behavior.Ignore
Problems.assume_no_problems r1
@ -487,7 +514,7 @@ spec setup =
Problems.expect_warning Floating_Point_Equality r4
db_pending = if setup.is_database then "Arithmetic error handling is currently not implemented for the Database backend."
Test.specify "should report issues: arithmetic error" pending=db_pending <|
group_builder.specify "should report issues: arithmetic error" pending=db_pending <|
t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", [2.0, 2.0, 0.0, 1.0, 2.0]]]
action = t.filter_by_expression "8.0 / [X] <= 4.0" on_problems=_
tester table =

View File

@ -4,19 +4,38 @@ from Standard.Base import all
from Standard.Table import all hiding Table
from Standard.Table.Data.Aggregate_Column.Aggregate_Column import Group_By, Count, Sum
from Standard.Test import Test, Problems
import Standard.Test.Extensions
from Standard.Test_New import all
from project.Common_Table_Operations.Util import run_default_backend
from project.Common_Table_Operations.Core_Spec import weird_names
main = run_default_backend spec
main = run_default_backend add_specs
spec setup =
table_builder = setup.table_builder
type Data
Value ~connection
setup create_connection_fn =
Data.Value (create_connection_fn Nothing)
teardown self =
self.connection.close
add_specs suite_builder setup =
create_connection_fn = setup.create_connection_func
materialize = setup.materialize
Test.group setup.prefix+" Interactions Between various operations" <|
Test.specify "using weird column names with operations and filtering" <|
suite_builder.group setup.prefix+" Interactions Between various operations" group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
table_builder cols =
setup.table_builder cols connection=data.connection
group_builder.specify "using weird column names with operations and filtering" <|
columns = weird_names.map_with_index ix-> name->
[name, [100+ix, 200+ix, 300+ix]]
table = table_builder columns
@ -31,7 +50,7 @@ spec setup =
weird_names.map_with_index ix-> name->
result.at name . to_vector . should_equal [200+ix]
Test.specify "aggregates and joins" <|
group_builder.specify "aggregates and joins" <|
t1 = table_builder [["Count", [1, 2, 3]], ["Class", ["X", "Y", "Z"]]]
t2 = table_builder [["Letter", ["A", "B", "A", "A", "C", "A", "C", "D", "D", "B", "B"]]]
@ -44,7 +63,7 @@ spec setup =
rows.at 2 . should_equal ["C", 2, 2, "Y"]
rows.at 3 . should_equal ["D", 2, 2, "Y"]
Test.specify "aggregates and distinct" <|
group_builder.specify "aggregates and distinct" <|
t2 = table_builder [["Letter", ["A", "B", "A", "A", "C", "C"]], ["Points", [2, 5, 2, 1, 10, 3]]]
t3 = t2.aggregate [Group_By "Letter", Sum "Points"]
@ -59,7 +78,7 @@ spec setup =
a_or_b . should_be_true
rows.at 1 . should_equal ["C", 13]
Test.specify "aggregates and filtering" <|
group_builder.specify "aggregates and filtering" <|
t2 = table_builder [["Letter", ["A", "B", "A", "A", "C", "C", "B"]], ["Points", [2, 5, 2, 1, 10, 3, 0]]]
t3 = t2.aggregate [Group_By "Letter", Sum "Points"]
@ -69,7 +88,7 @@ spec setup =
rows.at 0 . should_equal ["A", 5]
rows.at 1 . should_equal ["B", 5]
Test.specify "aggregates and ordering" <|
group_builder.specify "aggregates and ordering" <|
t1 = table_builder [["Letter", ["C", "A", "B", "A", "A", "C", "C", "B"]], ["Points", [0, -100, 5, 2, 1, 10, 3, 0]]]
t2 = t1.aggregate [Group_By "Letter", Sum "Points"]
t3 = t2.order_by "Sum Points" |> materialize
@ -77,7 +96,7 @@ spec setup =
t3.at "Letter" . to_vector . should_equal ["A", "B", "C"]
t3.at "Sum Points" . to_vector . should_equal [-97, 5, 13]
Test.specify "distinct and ordering" <|
group_builder.specify "distinct and ordering" <|
t1 = table_builder [["X", [1, 2, 2, 1]], ["Y", ["a", "b", "b", "a"]], ["Z", [1, 2, 3, 4]]]
# These are 'adversarial' white-box examples constructed knowing that Postgres' DISTINCT ON does not play too well with ORDER BY and it needs to be handled carefully.
@ -89,11 +108,11 @@ spec setup =
t4.row_count . should_equal 2
if setup.test_selection.distinct_returns_first_row_from_group_if_ordered then
Test.specify "distinct and ordering if first row is returned after ordering" <|
group_builder.specify "distinct and ordering if first row is returned after ordering" <|
a = ["A", ["a", "a", "a", "a", "a", "a"]]
b = ["B", [1, 1, 2, 2, 1, 2]]
c = ["C", [0.1, 0.2, 0.3, 0.4, 0.5, 0.6]]
t = table_builder [a, b, c] . order_by ([(Sort_Column.Name "C" Sort_Direction.Descending)])
t = (table_builder [a, b, c]) . order_by ([(Sort_Column.Name "C" Sort_Direction.Descending)])
t2 = t.distinct ["A", "B"] on_problems=Report_Error
# Now, reverse the order!
@ -112,7 +131,7 @@ spec setup =
It is easier to test this if we can rely on distinct returning
the first row, if it is returning any row, it is harder to write
tests that distinguish the two cases (filter before and after).
Test.specify "distinct and filtering" <|
group_builder.specify "distinct and filtering" <|
a = ["A", ["a", "a", "b", "a", "b"]]
b = ["B", [1, 2, 5, 5, 2]]
c = ["C", [0.1, 0.2, 0.3, 0.4, 0.5]]
@ -136,7 +155,7 @@ spec setup =
## This mostly checks that various operations handle all kinds of Integer storage implementations
(add_row_number may use a different storage than regular columns)
if setup.is_database.not then Test.specify "add_row_number and other operations" <|
if setup.is_database.not then group_builder.specify "add_row_number and other operations" <|
t1 = table_builder [["X", ["a", "b", "c"]], ["Y", [1, 2, 3]], ["Z", [0.25, 0.5, 0.75]]]
t2 = table_builder [["X", ["ddd", "eee", "fff"]]]
@ -165,9 +184,9 @@ spec setup =
r123.at "Row" . to_vector . should_equal [1, 2, 3, 1, 2, 3, 1.5, 2.5, 3.5]
if setup.test_selection.fixed_length_text_columns then
Test.specify "types of unioned fixed-length columns should be correctly inferred after passing through other operations that infer types from Database, like aggregate Shortest" <|
t1 = table_builder [["X", ["a", "b", "c"]], ["Y", [1, 0, 2]]] . cast "X" (Value_Type.Char 1 False)
t2 = table_builder [["X", ["ddd", "eee", "fff"]], ["Y", [0, 1, 0]]] . cast "X" (Value_Type.Char 3 False)
group_builder.specify "types of unioned fixed-length columns should be correctly inferred after passing through other operations that infer types from Database, like aggregate Shortest" <|
t1 = (table_builder [["X", ["a", "b", "c"]], ["Y", [1, 0, 2]]]) . cast "X" (Value_Type.Char 1 False)
t2 = (table_builder [["X", ["ddd", "eee", "fff"]], ["Y", [0, 1, 0]]]) . cast "X" (Value_Type.Char 3 False)
t3 = t1.union t2
vt1 = t3.at "X" . value_type
@ -184,7 +203,7 @@ spec setup =
t5.at "Y" . to_vector . should_equal [0, 1, 2]
t5.at "Shortest X" . to_vector . should_equal ["b", "a", "c"]
Test.specify "types should be correctly preserved after aggregation after iif" <|
group_builder.specify "types should be correctly preserved after aggregation after iif" <|
t0 = table_builder [["x", [False, True, False]], ["A", ["a", "b", "c"]], ["B", ["xyz", "abc", "def"]]]
t1 = t0 . cast "A" (Value_Type.Char size=1 variable_length=False) . cast "B" (Value_Type.Char size=3 variable_length=False)

View File

@ -10,19 +10,32 @@ import Standard.Table.Data.Join_Kind_Cross.Join_Kind_Cross
from Standard.Database import all
from Standard.Database.Errors import Unsupported_Database_Operation, Integrity_Error
from Standard.Test import Test, Problems
import Standard.Test.Extensions
from Standard.Test_New import all
from project.Common_Table_Operations.Util import expect_column_names, run_default_backend
main = run_default_backend spec
type Data
Value ~connection
spec setup =
setup create_connection_fn =
Data.Value (create_connection_fn Nothing)
teardown self = self.connection.close
add_specs suite_builder setup =
prefix = setup.prefix
table_builder = setup.table_builder
create_connection_fn = setup.create_connection_func
materialize = setup.materialize
Test.group prefix+"Table.cross_join" <|
Test.specify "should allow to create a cross product of two tables in the right order" <|
suite_builder.group prefix+"Table.cross_join" group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
table_builder cols =
setup.table_builder cols connection=data.connection
group_builder.specify "should allow to create a cross product of two tables in the right order" <|
t1 = table_builder [["X", [1, 2]], ["Y", [4, 5]]]
t2 = table_builder [["Z", ['a', 'b']], ["W", ['c', 'd']]]
@ -42,7 +55,7 @@ spec setup =
True -> r.should_contain_the_same_elements_as expected_rows
False -> r.should_equal expected_rows
Test.specify "should work correctly with empty tables" <|
group_builder.specify "should work correctly with empty tables" <|
t1 = table_builder [["X", [1, 2]], ["Y", [4, 5]]]
t2 = table_builder [["Z", ['a']], ["W", ['c']]]
# Workaround to easily create empty table until table builder allows that directly.
@ -59,7 +72,7 @@ spec setup =
t4.row_count.should_equal 0
t4.at "X" . to_vector . should_equal []
Test.specify "should respect the right row limit" <|
group_builder.specify "should respect the right row limit" <|
t2 = table_builder [["X", [1, 2]]]
t3 = table_builder [["X", [1, 2, 3]]]
t100 = table_builder [["Y", 0.up_to 100 . to_vector]]
@ -76,7 +89,7 @@ spec setup =
t2.cross_join t101 right_row_limit=Nothing . row_count . should_equal 202
t2.cross_join t3 right_row_limit=2 on_problems=Problem_Behavior.Report_Error . should_fail_with Cross_Join_Row_Limit_Exceeded
Test.specify "should ensure 1-1 mapping even with duplicate rows" <|
group_builder.specify "should ensure 1-1 mapping even with duplicate rows" <|
t1 = table_builder [["X", [2, 1, 2, 2]], ["Y", [5, 4, 5, 5]]]
t2 = table_builder [["Z", ['a', 'b', 'a', 'b']]]
@ -94,7 +107,7 @@ spec setup =
True -> r.should_contain_the_same_elements_as expected_rows
False -> r.should_equal expected_rows
Test.specify "should allow self-joins" <|
group_builder.specify "should allow self-joins" <|
t1 = table_builder [["X", [1, 2]], ["Y", [4, 5]]]
t2 = t1.cross_join t1
@ -111,7 +124,7 @@ spec setup =
True -> r.should_contain_the_same_elements_as expected_rows
False -> r.should_equal expected_rows
Test.specify "should rename columns of the right table to avoid duplicates" <|
group_builder.specify "should rename columns of the right table to avoid duplicates" <|
t1 = table_builder [["X", [1]], ["Y", [5]], ["Right Y", [10]]]
t2 = table_builder [["X", ['a']], ["Y", ['d']]]
@ -133,7 +146,7 @@ spec setup =
expect_column_names ["X", "Y", "Right Y", "Right X 1", "Right X"] (t1.cross_join t4)
expect_column_names ["X", "Right X", "Right X 1", "Y", "Right Y"] (t4.cross_join t1)
Test.specify "should respect the column ordering" <|
group_builder.specify "should respect the column ordering" <|
t1 = table_builder [["X", [100, 2]], ["Y", [4, 5]]]
t2 = table_builder [["Z", ['a', 'b', 'c']], ["W", ['x', 'd', 'd']]]
@ -157,12 +170,12 @@ spec setup =
True -> r.should_contain_the_same_elements_as expected_rows
False -> r.should_equal expected_rows
Test.specify "Cross join is not possible via call to .join" <|
group_builder.specify "Cross join is not possible via call to .join" <|
t1 = table_builder [["X", [1, 2]], ["Y", [4, 5]]]
t2 = table_builder [["Z", ['a', 'b']], ["W", ['c', 'd']]]
Test.expect_panic_with (t1.join t2 join_kind=Join_Kind_Cross.Cross on=[]) Type_Error
Test.specify "should gracefully handle tables from different backends" <|
group_builder.specify "should gracefully handle tables from different backends" <|
t1 = table_builder [["A", ["a", "b"]]]
alternative_connection = Database.connect (SQLite In_Memory)
t0 = (Table.new [["X", [1, 2, 4]], ["Z", [10, 20, 30]]]).select_into_database_table alternative_connection "T0" temporary=True

View File

@ -8,8 +8,7 @@ from Standard.Table.Errors import all
from Standard.Database import all
from Standard.Database.Errors import Unsupported_Database_Operation, Integrity_Error
from Standard.Test import Test, Problems
import Standard.Test.Extensions
from Standard.Test_New import all
from project.Common_Table_Operations.Util import expect_column_names, run_default_backend, within_table
@ -25,28 +24,53 @@ type My_Type_Comparator
Comparable.from (_:My_Type) = My_Type_Comparator
main = run_default_backend spec
spec setup =
type Data
Value ~data
connection self = self.data.at 0
t1 self = self.data.at 1
t2 self = self.data.at 2
t3 self = self.data.at 3
t4 self = self.data.at 4
setup create_connection_fn table_builder = Data.Value <|
connection = create_connection_fn Nothing
t1 = table_builder [["X", [1, 2, 3]], ["Y", [4, 5, 6]]] connection=connection
t2 = table_builder [["Z", [2, 3, 2, 4]], ["W", [4, 5, 6, 7]]] connection=connection
t3 = table_builder [["X", [1, 1, 1, 2, 2, 2]], ["Y", ["A", "B", "B", "C", "C", "A"]], ["Z", [1, 2, 3, 4, 5, 6]]] connection=connection
t4 = table_builder [["X", [1, 1, 3, 2, 2, 4]], ["Y", ["B", "B", "C", "C", "D", "A"]], ["Z", [1, 2, 3, 4, 5, 6]]] connection=connection
[connection, t1, t2, t3, t4]
teardown self = self.connection.close
add_specs suite_builder setup =
prefix = setup.prefix
table_builder = setup.table_builder
create_connection_fn = setup.create_connection_func
materialize = setup.materialize
db_todo = if setup.is_database.not then Nothing else "ToDo: handling NULLs in equality conditions."
Test.group prefix+"Table.join" <|
t1 = table_builder [["X", [1, 2, 3]], ["Y", [4, 5, 6]]]
t2 = table_builder [["Z", [2, 3, 2, 4]], ["W", [4, 5, 6, 7]]]
suite_builder.group prefix+"Table.join" group_builder->
data = Data.setup create_connection_fn table_builder
Test.specify "should by default do a Left Outer join on equality of first column in the left table, correlated with column of the same name in the right one" <|
group_builder.teardown <|
data.teardown
table_builder cols =
setup.table_builder cols connection=data.connection
group_builder.specify "should by default do a Left Outer join on equality of first column in the left table, correlated with column of the same name in the right one" <|
t3 = table_builder [["Z", [4, 5, 6, 7]], ["X", [2, 3, 2, 4]]]
t4 = t1.join t3 |> materialize |> _.order_by ["X", "Z"]
t4 = data.t1.join t3 |> materialize |> _.order_by ["X", "Z"]
expect_column_names ["X", "Y", "Z", "Right X"] t4
t4.at "X" . to_vector . should_equal [1, 2, 2, 3]
t4.at "Y" . to_vector . should_equal [4, 5, 5, 6]
t4.at "Right X" . to_vector . should_equal [Nothing, 2, 2, 3]
t4.at "Z" . to_vector . should_equal [Nothing, 4, 6, 5]
Test.specify "should allow Inner join" <|
t3 = t1.join t2 join_kind=Join_Kind.Inner on=(Join_Condition.Equals 0 0)
group_builder.specify "should allow Inner join" <|
t3 = data.t1.join data.t2 join_kind=Join_Kind.Inner on=(Join_Condition.Equals 0 0)
expect_column_names ["X", "Y", "Z", "W"] t3
t4 = t3 |> materialize |> _.order_by ["X", "W"]
t4.at "X" . to_vector . should_equal [2, 2, 3]
@ -54,35 +78,33 @@ spec setup =
t4.at "Y" . to_vector . should_equal [5, 5, 6]
t4.at "W" . to_vector . should_equal [4, 6, 5]
Test.specify "should allow Full join" <|
t3 = t1.join t2 join_kind=Join_Kind.Full on=(Join_Condition.Equals 0 0) |> materialize |> _.order_by ["X", "W"]
group_builder.specify "should allow Full join" <|
t3 = data.t1.join data.t2 join_kind=Join_Kind.Full on=(Join_Condition.Equals 0 0) |> materialize |> _.order_by ["X", "W"]
expect_column_names ["X", "Y", "Z", "W"] t3
t3.at "X" . to_vector . should_equal [Nothing, 1, 2, 2, 3]
t3.at "Y" . to_vector . should_equal [Nothing, 4, 5, 5, 6]
t3.at "Z" . to_vector . should_equal [4, Nothing, 2, 2, 3]
t3.at "W" . to_vector . should_equal [7, Nothing, 4, 6, 5]
Test.specify "should allow Right Outer join" <|
t5 = t1.join t2 join_kind=Join_Kind.Right_Outer on=(Join_Condition.Equals 0 0) |> materialize |> _.order_by ["X", "W"]
group_builder.specify "should allow Right Outer join" <|
t5 = data.t1.join data.t2 join_kind=Join_Kind.Right_Outer on=(Join_Condition.Equals 0 0) |> materialize |> _.order_by ["X", "W"]
expect_column_names ["X", "Y", "Z", "W"] t5
t5.at "X" . to_vector . should_equal [Nothing, 2, 2, 3]
t5.at "Y" . to_vector . should_equal [Nothing, 5, 5, 6]
t5.at "Z" . to_vector . should_equal [4, 2, 2, 3]
t5.at "W" . to_vector . should_equal [7, 4, 6, 5]
Test.specify "should allow to perform anti-joins" <|
t6 = t1.join t2 join_kind=Join_Kind.Left_Exclusive on=(Join_Condition.Equals 0 0) |> materialize |> _.order_by ["X"]
group_builder.specify "should allow to perform anti-joins" <|
t6 = data.t1.join data.t2 join_kind=Join_Kind.Left_Exclusive on=(Join_Condition.Equals 0 0) |> materialize |> _.order_by ["X"]
t6.columns.map .name . should_equal ["X", "Y"]
t6.at "X" . to_vector . should_equal [1]
t6.at "Y" . to_vector . should_equal [4]
t7 = t1.join t2 join_kind=Join_Kind.Right_Exclusive on=(Join_Condition.Equals 0 0) |> materialize |> _.order_by ["Z"]
t7 = data.t1.join data.t2 join_kind=Join_Kind.Right_Exclusive on=(Join_Condition.Equals 0 0) |> materialize |> _.order_by ["Z"]
t7.columns.map .name . should_equal ["Z", "W"]
t7.at "Z" . to_vector . should_equal [4]
t7.at "W" . to_vector . should_equal [7]
t3 = table_builder [["X", [1, 1, 1, 2, 2, 2]], ["Y", ["A", "B", "B", "C", "C", "A"]], ["Z", [1, 2, 3, 4, 5, 6]]]
t4 = table_builder [["X", [1, 1, 3, 2, 2, 4]], ["Y", ["B", "B", "C", "C", "D", "A"]], ["Z", [1, 2, 3, 4, 5, 6]]]
check_xy_joined r =
expect_column_names ["X", "Y", "Z", "Right Z"] r
r.at "X" . to_vector . should_equal [1, 1, 1, 1, 2, 2]
@ -90,20 +112,20 @@ spec setup =
r.at "Z" . to_vector . should_equal [2, 2, 3, 3, 4, 5]
r.at "Right Z" . to_vector . should_equal [1, 2, 1, 2, 4, 4]
Test.specify "should allow to join on equality of multiple columns and drop redundant columns if Inner join" <|
group_builder.specify "should allow to join on equality of multiple columns and drop redundant columns if Inner join" <|
conditions = [Join_Condition.Equals "Y" "Y", Join_Condition.Equals "X" "X"]
r = t3.join t4 join_kind=Join_Kind.Inner on=conditions |> materialize |> _.order_by ["X", "Y", "Z", "Right Z"]
r = data.t3.join data.t4 join_kind=Join_Kind.Inner on=conditions |> materialize |> _.order_by ["X", "Y", "Z", "Right Z"]
check_xy_joined r
[Join_Kind.Full, Join_Kind.Left_Outer, Join_Kind.Right_Outer].each kind->
r2 = t3.join t4 join_kind=kind on=conditions
r2 = data.t3.join data.t4 join_kind=kind on=conditions
expect_column_names ["X", "Y", "Z", "Right X", "Right Y", "Right Z"] r2
Test.specify "should support same-name column join shorthand" <|
r = t3.join t4 join_kind=Join_Kind.Inner on=["X", "Y"] |> materialize |> _.order_by ["X", "Y", "Z", "Right Z"]
group_builder.specify "should support same-name column join shorthand" <|
r = data.t3.join data.t4 join_kind=Join_Kind.Inner on=["X", "Y"] |> materialize |> _.order_by ["X", "Y", "Z", "Right Z"]
check_xy_joined r
Test.specify "should correctly handle duplicated rows in Equals" <|
group_builder.specify "should correctly handle duplicated rows in Equals" <|
t1 = table_builder [["X", [1, 2, 2, 3]]]
t2 = table_builder [["X", [1, 2, 2, 4]]]
r1 = t1.join t2 join_kind=Join_Kind.Full on="X" . order_by "X"
@ -112,7 +134,7 @@ spec setup =
r1.at "X" . to_vector . should_equal [Nothing, 1, 2, 2, 2, 2, 3]
r1.at "Right X" . to_vector . should_equal [4, 1, 2, 2, 2, 2, Nothing]
Test.specify "should allow to join on text equality ignoring case" <|
group_builder.specify "should allow to join on text equality ignoring case" <|
t1 = table_builder [["X", ["a", "B"]], ["Y", [1, 2]]]
t2 = table_builder [["X", ["A", "a", "b"]], ["Z", [1, 2, 3]]]
@ -130,7 +152,7 @@ spec setup =
r2 . at "Z" . to_vector . should_equal [1, 2, 3]
if setup.test_selection.supports_unicode_normalization then
Test.specify "should correctly handle Unicode equality" <|
group_builder.specify "should correctly handle Unicode equality" <|
t1 = table_builder [["X", ['s\u0301', 'S\u0301']], ["Y", [1, 2]]]
t2 = table_builder [["X", ['s', 'S', 'ś']], ["Z", [1, 2, 3]]]
@ -148,7 +170,7 @@ spec setup =
r2 . at "Z" . to_vector . should_equal [3, 3]
# This may need a test_selection toggle in the future, depending on how well databases like coercing decimals and integers.
Test.specify "should correctly handle Enso Float-Integer equality" <|
group_builder.specify "should correctly handle Enso Float-Integer equality" <|
t1 = table_builder [["X", [1, 2]], ["Y", [10, 20]]]
t2 = table_builder [["X", [2.0, 2.1, 0.0]], ["Z", [1, 2, 3]]]
@ -159,7 +181,7 @@ spec setup =
r1 . at "Z" . to_vector . should_equal [1]
if setup.supports_custom_objects then
Test.specify "should allow equality joins for custom objects" <|
group_builder.specify "should allow equality joins for custom objects" <|
t1 = table_builder [["X", [My_Type.Value 1 2, My_Type.Value 2 3]], ["Y", [1, 2]]]
t2 = table_builder [["X", [My_Type.Value 5 0, My_Type.Value 2 1]], ["Z", [10, 20]]]
@ -174,7 +196,7 @@ spec setup =
r1 . at "Y" . to_vector . should_equal [1, 2]
r1 . at "Z" . to_vector . should_equal [20, 10]
Test.specify "should allow range-based joins (using Between) for numbers" <|
group_builder.specify "should allow range-based joins (using Between) for numbers" <|
t1 = table_builder [["X", [1, 10, 12]], ["Y", [1, 2, 3]]]
t2 = table_builder [["lower", [1, 10, 8, 12]], ["upper", [1, 12, 30, 0]], ["Z", [1, 2, 3, 4]]]
@ -186,7 +208,7 @@ spec setup =
r1 . at "upper" . to_vector . should_equal [1, 12, 30, 12, 30]
r1 . at "Z" . to_vector . should_equal [1, 2, 3, 2, 3]
Test.specify "should allow range-based joins (using Between) for text" <|
group_builder.specify "should allow range-based joins (using Between) for text" <|
t1 = table_builder [["X", ["a", "b", "c"]], ["Y", [1, 2, 3]]]
t2 = table_builder [["lower", ["a", "b"]], ["upper", ["a", "ccc"]], ["Z", [10, 20]]]
@ -198,7 +220,7 @@ spec setup =
r1 . at "upper" . to_vector . should_equal ["a", "ccc", "ccc"]
r1 . at "Z" . to_vector . should_equal [10, 20, 20]
Test.specify "should correctly handle Between edge cases (1)" pending=(if prefix.contains "PostgreSQL" then "TODO: fix issue #8243") <|
group_builder.specify "should correctly handle Between edge cases (1)" pending=(if prefix.contains "PostgreSQL" then "TODO: fix issue #8243") <|
# 1. multiple rows with the same key value on the left side
# 2. fully duplicated rows (1, 7) on the left side
# 3. empty bounds (lower > upper: 10 > 0)
@ -221,7 +243,7 @@ spec setup =
rows.at 6 . should_equal [10, 2, 10, 10, 'b']
rows.at 7 . should_equal [Nothing, Nothing, 10, 0, 'c']
Test.specify "should correctly handle Between edge cases (2)" <|
group_builder.specify "should correctly handle Between edge cases (2)" <|
# 6. multiple Between conditions
xs = [0, 0, 1, 1, 1, 1, 2, 2, 3, 3, 3, 4, 4, 4]
ys = [1, 2, 3, 1, 9, 2, 3, 2, 4, 2, 1, 1, 1, 2]
@ -240,7 +262,7 @@ spec setup =
r3.at "X" . to_vector . should_equal [2, 3, 3]
r3.at "Y" . to_vector . should_equal [2, 1, 2]
Test.specify "should correctly handle Between edge cases (3)" <|
group_builder.specify "should correctly handle Between edge cases (3)" <|
# 7. duplicated rows on both sides
t1 = table_builder [["X", [10, 20, 20]]]
t2 = table_builder [["low", [15, 15]], ["high", [30, 30]]]
@ -257,7 +279,7 @@ spec setup =
r2.at "X" . to_vector . should_equal [10]
if setup.test_selection.supports_unicode_normalization then
Test.specify "should allow range-based joins (using Between) for text with Unicode normalization" <|
group_builder.specify "should allow range-based joins (using Between) for text with Unicode normalization" <|
t1 = table_builder [["X", ['s\u0301', 's']], ["Y", [1, 2]]]
t2 = table_builder [["lower", ['s', 'ś']], ["upper", ['sa', 'ś']], ["Z", [10, 20]]]
@ -270,7 +292,7 @@ spec setup =
r1 . at "Z" . to_vector . should_equal [20, 10]
if setup.supports_custom_objects then
Test.specify "should allow range-based joins (using Between) for custom objects" <|
group_builder.specify "should allow range-based joins (using Between) for custom objects" <|
t1 = table_builder [["X", [My_Type.Value 20 30, My_Type.Value 1 2]], ["Y", [1, 2]]]
t2 = table_builder [["lower", [My_Type.Value 3 0, My_Type.Value 10 10]], ["upper", [My_Type.Value 2 1, My_Type.Value 100 0]], ["Z", [10, 20]]]
@ -282,7 +304,7 @@ spec setup =
r1 . at "upper" . to_vector . to_text . should_equal "[(My_Type.Value 2 1), (My_Type.Value 100 0)]"
r1 . at "Z" . to_vector . should_equal [10, 20]
Test.specify "should allow to mix join conditions of various kinds" <|
group_builder.specify "should allow to mix join conditions of various kinds" <|
t1 = table_builder [["X", [1, 12, 12, 0]], ["Y", [1, 2, 3, 4]], ["Z", ["a", "A", "a", "ą"]], ["W", [1, 2, 3, 4]]]
t2 = table_builder [["X", [12, 12, 1]], ["l", [0, 100, 100]], ["u", [10, 100, 200]], ["Z", ["A", "A", "A"]], ["W'", [10, 20, 30]]]
@ -316,8 +338,8 @@ spec setup =
r3.at "Z" . to_vector . should_equal ["A", "A"]
r3.at "W'" . to_vector . should_equal [20, 30]
Test.specify "should work fine if the same condition is specified multiple times" <|
r = t3.join t4 join_kind=Join_Kind.Inner on=["X", "X", "Y", "X", "Y"] |> materialize |> _.order_by ["X", "Y", "Z", "Right Z"]
group_builder.specify "should work fine if the same condition is specified multiple times" <|
r = data.t3.join data.t4 join_kind=Join_Kind.Inner on=["X", "X", "Y", "X", "Y"] |> materialize |> _.order_by ["X", "Y", "Z", "Right Z"]
check_xy_joined r
t5 = table_builder [["X", [1, 10, 12]], ["Y", [1, 2, 3]]]
@ -336,7 +358,7 @@ spec setup =
r2 . at "Right X" . to_vector . should_equal ["A", "a", "b"]
r2 . at "Z" . to_vector . should_equal [1, 2, 3]
Test.specify "should correctly handle joining a table with itself" <|
group_builder.specify "should correctly handle joining a table with itself" <|
t1 = table_builder [["X", [0, 1, 2, 3, 2]], ["Y", [1, 2, 3, 4, 100]], ["A", ["B", "C", "D", "E", "X"]]]
t2 = t1.join t1 join_kind=Join_Kind.Inner on=(Join_Condition.Equals left="X" right="Y") |> materialize |> _.order_by ["X", "Y"]
@ -371,7 +393,7 @@ spec setup =
# t5.at "Right Y" . to_vector . should_equal ["ą", Nothing]
# t5.at "Right Z" . to_vector . should_equal [1, 3]
Test.specify "should gracefully handle unmatched columns in Join_Conditions" <|
group_builder.specify "should gracefully handle unmatched columns in Join_Conditions" <|
t1 = table_builder [["X", [1, 2]], ["Y", [3, 4]]]
t2 = table_builder [["Z", [2, 1]], ["W", [5, 6]]]
@ -393,7 +415,7 @@ spec setup =
r3.catch.criteria.should_equal ["baz", 42, -3]
r3.catch.to_display_text.should_equal "The criteria 'baz', 42 (index), -3 (index) did not match any columns in the right table."
Test.specify "should report Invalid_Value_Type if non-text columns are provided to Equals_Ignore_Case" <|
group_builder.specify "should report Invalid_Value_Type if non-text columns are provided to Equals_Ignore_Case" <|
t1 = table_builder [["X", ["1", "2", "c"]], ["Y", [1, 2, 3]]]
t2 = table_builder [["Z", ["1", "2", "c"]], ["W", [1, 2, 3]]]
@ -406,14 +428,14 @@ spec setup =
test <|
t1.join t2 on=(Join_Condition.Equals_Ignore_Case "Y" "Z") on_problems=Problem_Behavior.Ignore
Test.specify "should report Invalid_Value_Type if incompatible types are correlated" <|
group_builder.specify "should report Invalid_Value_Type if incompatible types are correlated" <|
t1 = table_builder [["X", ["1", "2", "c"]]]
t2 = table_builder [["X", [1, 2, 3]]]
r1 = t1.join t2 on_problems=Problem_Behavior.Ignore
r1.should_fail_with Invalid_Value_Type
Test.specify "should report Invalid_Value_Type if incompatible columns types are correlated in Between" <|
group_builder.specify "should report Invalid_Value_Type if incompatible columns types are correlated in Between" <|
t1 = table_builder [["X", ["1", "2", "c"]], ["Y", [1, 2, 3]]]
t2 = table_builder [["Z", ["1", "2", "c"]], ["W", [1, 2, 3]]]
@ -421,7 +443,7 @@ spec setup =
t1.join t2 on=(Join_Condition.Between "Y" "W" "Z") . should_fail_with Invalid_Value_Type
t1.join t2 on=(Join_Condition.Between "Y" "Z" "W") . should_fail_with Invalid_Value_Type
Test.specify "should warn when joining on equality of Float columns" <|
group_builder.specify "should warn when joining on equality of Float columns" <|
t1 = table_builder [["X", [1.5, 2.0, 2.00000000001]], ["Y", [10, 20, 30]]]
t2 = table_builder [["Z", [2.0, 1.5, 2.0]], ["W", [1, 2, 3]]]
@ -464,7 +486,7 @@ spec setup =
expected_problems = [Floating_Point_Equality.Error "Z", Floating_Point_Equality.Error "X"]
Problems.get_attached_warnings r3 . should_contain_the_same_elements_as expected_problems
Test.specify "should correctly handle nulls in equality conditions" pending=db_todo <|
group_builder.specify "should correctly handle nulls in equality conditions" pending=db_todo <|
t1 = table_builder [["X", ["A", Nothing, "a", Nothing, "ą"]], ["Y", [0, 1, 2, 3, 4]]]
t2 = table_builder [["X", ["a", Nothing, Nothing]], ["Z", [10, 20, 30]]]
@ -474,7 +496,7 @@ spec setup =
r1.at "Y" . to_vector . should_equal [1, 1, 2, 3, 3]
r1.at "Z" . to_vector . should_equal [20, 30, 10, 20, 30]
Test.specify "should correctly handle nulls in case-insensitive equality conditions" pending=db_todo <|
group_builder.specify "should correctly handle nulls in case-insensitive equality conditions" pending=db_todo <|
t1 = table_builder [["X", ["A", Nothing, "a", Nothing, "ą"]], ["Y", [0, 1, 2, 3, 4]]]
t2 = table_builder [["X", ["a", Nothing, Nothing]], ["Z", [10, 20, 30]]]
@ -485,7 +507,7 @@ spec setup =
r1.at "Y" . to_vector . should_equal [0, 1, 1, 2, 3, 3]
r1.at "Z" . to_vector . should_equal [10, 20, 30, 10, 20, 30]
Test.specify "should correctly handle nulls in Between conditions" <|
group_builder.specify "should correctly handle nulls in Between conditions" <|
t1 = table_builder [["X", [1, Nothing, 2, Nothing]], ["Y", [0, 1, 2, 3]]]
t2 = table_builder [["l", [Nothing, 0, 1]], ["u", [100, 10, Nothing]], ["Z", [10, 20, 30]]]
@ -497,7 +519,7 @@ spec setup =
r1.at "u" . to_vector . should_equal [10, 10]
r1.at "Z" . to_vector . should_equal [20, 20]
Test.specify "should rename columns of the right table to avoid duplicates" <|
group_builder.specify "should rename columns of the right table to avoid duplicates" <|
t1 = table_builder [["X", [1, 2]], ["Y", [3, 4]], ["Right Y", [5, 6]]]
t2 = table_builder [["X", [2, 1]], ["Y", [2, 2]]]
@ -533,7 +555,7 @@ spec setup =
t8 = t1.join t2 join_kind=Join_Kind.Inner right_prefix="P"
t8.column_names.should_equal ["X", "Y", "Right Y", "PY"]
Test.specify "should warn about renamed columns" <|
group_builder.specify "should warn about renamed columns" <|
t1 = table_builder [["X", [1, 2]], ["Y", [3, 4]]]
t2 = table_builder [["X", [2, 1]], ["Y", [2, 2]], ["Right Y", [2, 44]]]
@ -549,12 +571,12 @@ spec setup =
problems2 = [Duplicate_Output_Column_Names.Error ["Right Y"]]
Problems.test_problem_handling action2 problems2 tester2
Test.specify "should pass dataflow errors through" <|
group_builder.specify "should pass dataflow errors through" <|
error = Error.throw (Illegal_State.Error "FOO")
t1.join error . should_fail_with Illegal_State
t1.join t2 on=[error, "X"] . should_fail_with Illegal_State
data.t1.join error . should_fail_with Illegal_State
data.t1.join data.t2 on=[error, "X"] . should_fail_with Illegal_State
Test.specify "should correctly handle all null rows" pending=db_todo <|
group_builder.specify "should correctly handle all null rows" pending=db_todo <|
t1 = table_builder [["A", [Nothing, 2, Nothing, 1]], ["B", [Nothing, 3, 4, 7]]]
t2 = table_builder [["C", [Nothing, 2, Nothing, 4]], ["D", [Nothing, 5, 6, Nothing]]]
@ -661,7 +683,7 @@ spec setup =
r12.length . should_equal 1
r12.at 0 . should_equal [3, 5]
Test.specify "should work correctly when composing multiple join operations" <|
group_builder.specify "should work correctly when composing multiple join operations" <|
ta = table_builder [["id", [0, 1]], ["name", ["Foo", "X"]]]
tb = table_builder [["id", [2, 0]], ["name", ["Bar", "Y"]]]
tc = table_builder [["id_a", [0, 1]], ["id_b", [0, 2]]]
@ -673,7 +695,7 @@ spec setup =
r.at 0 . should_equal ["Foo", "Y"]
r.at 1 . should_equal ["X", "Bar"]
Test.specify "should work correctly when the join is performed on a transformed table" <|
group_builder.specify "should work correctly when the join is performed on a transformed table" <|
t1 = table_builder [["X", [1, 2, 3]]]
t1_2 = t1.set "10*[X]+1" new_name="A"
@ -696,7 +718,7 @@ spec setup =
r2.at 0 . should_equal [2, 20, 5, 5, 100]
r2.at 1 . should_equal [3, 30, 7, 7, 200]
Test.specify "should allow full joins with more complex join conditions" <|
group_builder.specify "should allow full joins with more complex join conditions" <|
t1 = table_builder [["X", ["a", "b", "c"]], ["Y", [10, 20, 30]]]
t2 = table_builder [["X", ["Ć", "A", "b"]], ["Z", [100, 200, 300]]]
@ -746,9 +768,9 @@ spec setup =
r3.at 2 . should_equal [2, 20, 5, 2, 20, 5]
r3.at 3 . should_equal [3, 30, 7, Nothing, Nothing, Nothing]
Test.specify "should gracefully handle tables from different backends" <|
group_builder.specify "should gracefully handle tables from different backends" <|
alternative_connection = Database.connect (SQLite In_Memory)
t0 = (Table.new [["X", [1, 2, 4]], ["Z", [10, 20, 30]]]).select_into_database_table alternative_connection "T0" temporary=True
r1 = t1.join t0
r1 = data.t1.join t0
r1.should_fail_with Illegal_Argument

View File

@ -7,20 +7,36 @@ from Standard.Table.Errors import all
from Standard.Database import all
from Standard.Database.Errors import Unsupported_Database_Operation, Invariant_Violation, Integrity_Error
from Standard.Test import Test, Problems
import Standard.Test.Extensions
from Standard.Test_New import all
from project.Common_Table_Operations.Util import run_default_backend, within_table
import project.Util
main = run_default_backend spec
main = run_default_backend add_specs
spec setup =
type Data
Value ~connection
setup create_connection_fn =
Data.Value (create_connection_fn Nothing)
teardown self = self.connection.close
add_specs suite_builder setup =
prefix = setup.prefix
table_builder = setup.table_builder
create_connection_fn = setup.create_connection_func
materialize = setup.materialize
Test.group prefix+"Table.merge" <|
Test.specify "should allow to simply update columns based on a lookup table" <|
suite_builder.group prefix+"Table.merge" group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
table_builder cols =
setup.table_builder cols connection=data.connection
group_builder.specify "should allow to simply update columns based on a lookup table" <|
lookup = table_builder [["Y", ["A", "B", "A"]], ["X", [1, 2, 3]]]
my_table = table_builder [["X", [1, 2, 3, 2]], ["Y", ["Z", "ZZ", "ZZZ", "ZZZZ"]], ["Z", [10, 20, 30, 40]]]
@ -31,7 +47,7 @@ spec setup =
m2.at "X" . to_vector . should_equal [1, 2, 3, 2]
m2.at "Y" . to_vector . should_equal ["A", "B", "A", "B"]
Test.specify "should allow to add new columns from a lookup table" <|
group_builder.specify "should allow to add new columns from a lookup table" <|
lookup = table_builder [["code", ["a", "b", "c"]], ["status", ["new", "old", "changed"]]]
my_table = table_builder [["id", [1, 2, 3, 4]], ["code", ["a", "c", "c", "b"]], ["hmm", [10, 20, 30, 40]]]
@ -52,7 +68,7 @@ spec setup =
m3.at "is_X" . to_vector . should_equal [True, True, False, True]
m3.at "X" . to_vector . should_equal ["Yes", "Yes", "No", "Yes"]
Test.specify "will warn if extra columns are unexpected (add_new_columns=False) (default)" <|
group_builder.specify "will warn if extra columns are unexpected (add_new_columns=False) (default)" <|
lookup = table_builder [["code", ["a", "b", "c"]], ["status", ["new", "old", "changed"]]]
my_table = table_builder [["id", [1, 2, 3, 4]], ["code", ["a", "c", "c", "b"]], ["hmm", [10, 20, 30, 40]]]
@ -82,7 +98,7 @@ spec setup =
w3 = Problems.expect_only_warning Unexpected_Extra_Columns t3
w3.columns . should_equal ["status"]
Test.specify "will only update rows that are matched and skip others (default - allow_unmatched_rows=True)" <|
group_builder.specify "will only update rows that are matched and skip others (default - allow_unmatched_rows=True)" <|
lookup = table_builder [["X", [1, 2]], ["Y", ["A", "B"]]]
my_table = table_builder [["X", [1, 2, 3, 2]], ["Y", ["Z", "ZZ", "ZZZ", "ZZZZ"]], ["Z", [10, 20, 30, 40]]]
@ -94,7 +110,7 @@ spec setup =
m2.at "Y" . to_vector . should_equal ["A", "B", "ZZZ", "B"]
m2.at "Y" . to_vector . should_equal ["A", "B", "ZZZ", "B"]
Test.specify "will fill new columns of unmatched rows with Nothing (allow_unmatched_rows=True)" <|
group_builder.specify "will fill new columns of unmatched rows with Nothing (allow_unmatched_rows=True)" <|
lookup = table_builder [["X", [1, 2]], ["Y", ["A", "B"]], ["W", [1.5, 2.0]]]
my_table = table_builder [["X", [1, 2, 3, 2]], ["Y", ["Z", "ZZ", "ZZZ", "ZZZZ"]], ["Z", [10, 20, 30, 40]]]
@ -106,7 +122,7 @@ spec setup =
m2.at "Y" . to_vector . should_equal ["A", "B", "ZZZ", "B"]
m2.at "W" . to_vector . should_equal [1.5, 2.0, Nothing, 2.0]
Test.specify "will report unmatched rows (if allow_unmatched_rows=False)" <|
group_builder.specify "will report unmatched rows (if allow_unmatched_rows=False)" <|
lookup = table_builder [["X", [1, 2]], ["Y", ["A", "B"]]]
my_table = table_builder [["X", [1, 2, 3, 2]], ["Y", ["Z", "ZZ", "ZZZ", "ZZZZ"]], ["Z", [10, 20, 30, 40]]]
@ -124,7 +140,7 @@ spec setup =
m3.at "Y" . to_vector . should_equal ["A", "B", "A", "A"]
m3.at "Z" . to_vector . should_equal [10, 20, 30, 40]
Test.specify "will fail on missing key columns in either table" <|
group_builder.specify "will fail on missing key columns in either table" <|
lookup = table_builder [["X", [1, 2]], ["Y", ["A", "B"]]]
my_table = table_builder [["X", [1, 2, 3, 2]], ["Z", [10, 20, 30, 40]]]
@ -137,7 +153,7 @@ spec setup =
r3.catch.criteria . should_equal ["Z"]
r3.catch.to_display_text . should_contain "in the lookup table"
Test.specify "should allow matching by multiple key columns" <|
group_builder.specify "should allow matching by multiple key columns" <|
lookup = table_builder [["X", [1, 2, 1]], ["Y", ["A", "B", "B"]], ["Z", [100, 100, 200]]]
my_table = table_builder [["X", [1, 1, 1, 2]], ["Y", ["A", "B", "A", "B"]], ["Z", [10, 20, 30, 40]], ["W", [1000, 2000, 3000, 4000]]]
@ -150,7 +166,7 @@ spec setup =
m2.at "Y" . to_vector . should_equal ["A", "B", "A", "B"]
m2.at "Z" . to_vector . should_equal [100, 200, 100, 100]
Test.specify "will fail on duplicate matches in the lookup table" <|
group_builder.specify "will fail on duplicate matches in the lookup table" <|
lookup = table_builder [["X", [1, 2, 1]], ["Y", ["A", "B", "C"]]]
my_table = table_builder [["X", [4, 2, 3, 2]], ["Y", ["Z", "ZZ", "ZZZ", "ZZZZ"]], ["Z", [10, 20, 30, 40]]]
@ -186,7 +202,7 @@ spec setup =
r4.catch.clashing_example_key_values . should_equal [1, "A"]
r4.catch.clashing_example_row_count . should_equal 2
Test.specify "will preserve count of rows, even if there are duplicates" <|
group_builder.specify "will preserve count of rows, even if there are duplicates" <|
lookup = table_builder [["X", [1, 2]], ["Y", ["A", "B"]]]
my_table = table_builder [["X", [1, 2, 2, 2, 1]], ["Z", [10, 20, 20, 20, 50]]]
@ -197,7 +213,7 @@ spec setup =
m2.at "Y" . to_vector . should_equal ["A", "B", "B", "B", "A"]
m2.at "Z" . to_vector . should_equal [10, 20, 20, 20, 50]
Test.specify "should correctly preserve types of original, merged and added columns" <|
group_builder.specify "should correctly preserve types of original, merged and added columns" <|
table = table_builder [["key1", [0, 1]], ["key2", ["o", "?"]], ["X", [1, 10]], ["Y", ["A", "E"]], ["Z", [1.5, 2.0]], ["W", [True, False]], ["A", [2, 22]], ["B", ["1", "2"]], ["C", [2.0, 2.5]], ["D", [False, False]]]
lookup = table_builder [["key1", [0, 2]], ["key2", ["o", "?"]], ["X2", [100, 1000]], ["Y2", ["foo", "bar"]], ["Z2", [0.5, 4.0]], ["W2", [False, True]], ["A", [3, 55]], ["B", ["F", "F"]], ["C", [3.0, 10.5]], ["D", [True, False]]]
@ -226,9 +242,9 @@ spec setup =
t2.at "Z2" . value_type . should_equal Value_Type.Float
t2.at "W2" . value_type . should_equal Value_Type.Boolean
if setup.test_selection.fixed_length_text_columns then Test.specify "should correctly preserve types of original, merged and added columns (various Char types test case)" <|
table2 = table_builder [["key", ["0"]], ["X", ["a"]], ["A", ["bbbbb"]]] . cast "key" (Value_Type.Char size=50) . cast "X" (Value_Type.Char size=1) . cast "A" (Value_Type.Char size=5 variable_length=False)
lookup2 = table_builder [["key", ["0"]], ["X2", ["ccc"]], ["A", ["dddd"]]] . cast "key" (Value_Type.Char size=100) . cast "X2" (Value_Type.Char size=3 variable_length=False) . cast "A" (Value_Type.Char size=4 variable_length=False)
if setup.test_selection.fixed_length_text_columns then group_builder.specify "should correctly preserve types of original, merged and added columns (various Char types test case)" <|
table2 = (table_builder [["key", ["0"]], ["X", ["a"]], ["A", ["bbbbb"]]]) . cast "key" (Value_Type.Char size=50) . cast "X" (Value_Type.Char size=1) . cast "A" (Value_Type.Char size=5 variable_length=False)
lookup2 = (table_builder [["key", ["0"]], ["X2", ["ccc"]], ["A", ["dddd"]]]) . cast "key" (Value_Type.Char size=100) . cast "X2" (Value_Type.Char size=3 variable_length=False) . cast "A" (Value_Type.Char size=4 variable_length=False)
table2.at "key" . value_type . should_equal (Value_Type.Char size=50 variable_length=True)
table2.at "X" . value_type . should_equal (Value_Type.Char size=1 variable_length=True)
@ -255,7 +271,7 @@ spec setup =
# If unmatched rows are not allowed, we can guarantee only _new_ values (from the lookup table) will be in the result, so instead of merging the type we inherit the type from the lookup table.
t3.at "A" . value_type . should_equal (Value_Type.Char size=4 variable_length=False)
Test.specify "will report Floating_Point_Equality if floating-point columns are used as key" <|
group_builder.specify "will report Floating_Point_Equality if floating-point columns are used as key" <|
lookup = table_builder [["X", [1.0, 2.0, 3.0]], ["Y", ["A", "B", "C"]]]
my_table = table_builder [["X", [2.0, 3.0, 2.0, 3.0]], ["Y", ["Z", "ZZ", "ZZZ", "ZZZZ"]], ["Z", [10, 20, 30, 40]]]
lookup.at "X" . value_type . is_floating_point . should_be_true
@ -270,7 +286,7 @@ spec setup =
w2 = Problems.expect_only_warning Floating_Point_Equality t2
w2.to_display_text . should_contain "X"
Test.specify "will fail with No_Common_Type if types of updated columns are not compatible" <|
group_builder.specify "will fail with No_Common_Type if types of updated columns are not compatible" <|
lookup = table_builder [["X", [1, 2, 3]], ["Y", [1, 11, 111]]]
my_table = table_builder [["X", [2, 3, 2, 3]], ["Y", ["Z", "ZZ", "ZZZ", "ZZZZ"]], ["Z", [10, 20, 30, 40]]]
@ -280,7 +296,7 @@ spec setup =
r2.catch.to_display_text . should_contain "Char"
r2.catch.to_display_text . should_contain "when unifying column [Y]"
Test.specify "will allow incompatible types if allow_unmatched_rows=False" <|
group_builder.specify "will allow incompatible types if allow_unmatched_rows=False" <|
lookup = table_builder [["X", [1, 2, 3]], ["Y", [1, 11, 111]]]
my_table = table_builder [["X", [2, 3, 2, 3]], ["Y", ["Z", "ZZ", "ZZZ", "ZZZZ"]], ["Z", [10, 20, 30, 40]]]
my_table.at "Y" . value_type . is_text . should_be_true
@ -293,7 +309,7 @@ spec setup =
m2.at "Y" . to_vector . should_equal [11, 111, 11, 111]
m2.at "Z" . to_vector . should_equal [10, 20, 30, 40]
Test.specify "will fail if key columns of the lookup table contain Nothing" <|
group_builder.specify "will fail if key columns of the lookup table contain Nothing" <|
lookup1 = table_builder [["X", [1, 2, Nothing]], ["Y", ["A", "B", "C"]]]
my_table1 = table_builder [["X", [2, 3, 2, 3]], ["Z", [10, 20, 30, 40]]]
r1 = my_table1.merge lookup1 key_columns="X" add_new_columns=True
@ -312,14 +328,14 @@ spec setup =
r3 = my_table2.merge lookup2 key_columns="X" allow_unmatched_rows=False add_new_columns=True
r3.should_fail_with Unmatched_Rows_In_Lookup
Test.specify "will not allow providing no key_columns" <|
group_builder.specify "will not allow providing no key_columns" <|
lookup = table_builder [["X", [1, 2]], ["Y", ["A", "B"]]]
my_table = table_builder [["X", [2, 1]], ["Z", [10, 20]]]
r2 = my_table.merge lookup key_columns=[] add_new_columns=True
r2.should_fail_with Illegal_Argument
if setup.is_database.not then Test.specify "(in-memory only) will preserve the order of rows from the original table" <|
if setup.is_database.not then group_builder.specify "(in-memory only) will preserve the order of rows from the original table" <|
lookup = table_builder [["Y", [1, 0]], ["V", ["TRUE", "FALSE"]]]
xs = 0.up_to 50 . to_vector
ys = xs.map x-> x%2
@ -333,7 +349,7 @@ spec setup =
vs = xs.map x-> if (x%2) == 1 then "TRUE" else "FALSE"
t2.at "V" . to_vector . should_equal vs
if setup.is_database then Test.specify "(database-only) will fail if pre-checked invariants get invalidated between the query is constructed and then materialized" <|
if setup.is_database then group_builder.specify "(database-only) will fail if pre-checked invariants get invalidated between the query is constructed and then materialized" <|
Test.with_clue "(lookup is unique check) " <|
lookup = table_builder [["X", [1, 2]], ["Y", ["A", "B"]]]
table = table_builder [["X", [1, 2, 2]], ["Y", ["Z", "ZZ", "ZZZ"]], ["Z", [10, 20, 30]]]
@ -419,7 +435,7 @@ spec setup =
m2.should_fail_with Invariant_Violation
# This does not seem useful really, but there is no reason to disallow it, so we should ensure it does not crash.
Test.specify "(edge-case) should allow lookup with itself" <|
group_builder.specify "(edge-case) should allow lookup with itself" <|
table = table_builder [["X", [1, 2, 3]], ["Y", ["A", "B", "C"]]]
t2 = table.merge table key_columns="X"
t2.column_names . should_equal ["X", "Y"]
@ -428,7 +444,7 @@ spec setup =
m2.at "X" . to_vector . should_equal [1, 2, 3]
m2.at "Y" . to_vector . should_equal ["A", "B", "C"]
Test.specify "should gracefully handle tables from different backends" <|
group_builder.specify "should gracefully handle tables from different backends" <|
t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]]
alternative_connection = Database.connect (SQLite In_Memory)
t0 = (Table.new [["A", [3, 2, 1]], ["B", ["x", "y", "z"]]]).select_into_database_table alternative_connection "T0" temporary=True

View File

@ -8,22 +8,38 @@ from Standard.Table.Errors import all
from Standard.Database import all
from Standard.Database.Errors import Unsupported_Database_Operation, Integrity_Error
from Standard.Test import Test, Problems
import Standard.Test.Extensions
from Standard.Test_New import all
from project.Common_Table_Operations.Util import expect_column_names, run_default_backend, within_table
import project.Util
main = run_default_backend add_specs
type My_Type
Value x y
main = run_default_backend spec
type Data
Value ~connection
spec setup =
setup create_connection_fn =
Data.Value (create_connection_fn Nothing)
teardown self = self.connection.close
add_specs suite_builder setup =
prefix = setup.prefix
table_builder = setup.table_builder
Test.group prefix+"Table.union" <|
Test.specify "should merge columns from multiple tables" <|
create_connection_fn = setup.create_connection_func
suite_builder.group prefix+"Table.union" group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
table_builder cols =
setup.table_builder cols connection=data.connection
group_builder.specify "should merge columns from multiple tables" <|
t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]], ["C", [True, False, True]]]
t2 = table_builder [["A", [4, 5, 6]], ["B", ["d", "e", "f"]], ["C", [False, True, False]]]
t3 = table_builder [["A", [7, 8, 9]], ["B", ["g", "h", "i"]], ["C", [True, False, False]]]
@ -40,7 +56,7 @@ spec setup =
t5.at "B" . to_vector . should_equal ["g", "h", "i", "a", "b", "c", "d", "e", "f"]
t5.at "C" . to_vector . should_equal [True, False, False, True, False, True, False, True, False]
Test.specify "should fill unmatched columns (by name matching) with nulls and report a warning by default" <|
group_builder.specify "should fill unmatched columns (by name matching) with nulls and report a warning by default" <|
t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]]
t2 = table_builder [["C", ["d", "e", "f"]], ["A", [4, 5, 6]]]
t3 = table_builder [["D", [Nothing, Nothing, 0]], ["C", ["g", "h", "i"]]]
@ -64,7 +80,7 @@ spec setup =
problems2 = [Unmatched_Columns.Error ["A", "D"]]
Problems.test_problem_handling action2 problems2 tester2
Test.specify "should drop unmatched columns if asked to" <|
group_builder.specify "should drop unmatched columns if asked to" <|
t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]]
t2 = table_builder [["C", ["d", "e", "f"]], ["A", [4, 5, 6]]]
t3 = table_builder [["A", [Nothing, Nothing, 0]], ["C", ["g", "h", "i"]]]
@ -74,7 +90,7 @@ spec setup =
expect_column_names ["A"] t4
t4.at "A" . to_vector . should_equal [1, 2, 3, 4, 5, 6, Nothing, Nothing, 0]
Test.specify "should keep unmatched columns without errors if asked to" <|
group_builder.specify "should keep unmatched columns without errors if asked to" <|
t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]]
t2 = table_builder [["C", ["d", "e", "f"]], ["A", [4, 5, 6]]]
t3 = table_builder [["A", [Nothing, Nothing, 0]], ["C", ["g", "h", "i"]]]
@ -86,7 +102,7 @@ spec setup =
t4.at "B" . to_vector . should_equal ["a", "b", "c", Nothing, Nothing, Nothing, Nothing, Nothing, Nothing]
t4.at "C" . to_vector . should_equal [Nothing, Nothing, Nothing, "d", "e", "f", "g", "h", "i"]
Test.specify "should fail if asked to drop unmatched columns but the set of common columns is empty" <|
group_builder.specify "should fail if asked to drop unmatched columns but the set of common columns is empty" <|
t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]]
t2 = table_builder [["C", ["d", "e", "f"]], ["A", [4, 5, 6]]]
t3 = table_builder [["D", [Nothing, Nothing, 0]], ["C", ["g", "h", "i"]]]
@ -95,7 +111,7 @@ spec setup =
t4.should_fail_with No_Output_Columns
t4.catch.to_display_text . should_equal "No columns in the result, because of another problem: Unmatched columns are set to be dropped, but no common column names were found."
Test.specify "should ignore column names when matching by position" <|
group_builder.specify "should ignore column names when matching by position" <|
t1 = table_builder [["A", [1, 2, 3]], ["Y", ["a", "b", "c"]]]
t2 = table_builder [["X", [4, 5, 6]], ["A", ["d", "e", "f"]]]
@ -104,7 +120,7 @@ spec setup =
t3.at "A" . to_vector . should_equal [1, 2, 3, 4, 5, 6]
t3.at "Y" . to_vector . should_equal ["a", "b", "c", "d", "e", "f"]
Test.specify "should fill extra columns (positional matching) with nulls and report a warning by default" <|
group_builder.specify "should fill extra columns (positional matching) with nulls and report a warning by default" <|
t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]]
t2 = table_builder [["A1", [4, 5, 6]], ["B1", ["d", "e", "f"]], ["C", [7, 8, 9]]]
t3 = table_builder [["A2", [10, 20, 30]]]
@ -118,7 +134,7 @@ spec setup =
problems = [Column_Count_Mismatch.Error 3 1]
Problems.test_problem_handling action problems tester
Test.specify "should keep the least number of columns with positional matching if asked to drop unmatched ones" <|
group_builder.specify "should keep the least number of columns with positional matching if asked to drop unmatched ones" <|
t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]]
t2 = table_builder [["A1", [4, 5, 6]], ["B1", ["d", "e", "f"]], ["C", [7, 8, 9]]]
t3 = table_builder [["A2", [10, 20, 30]]]
@ -128,7 +144,7 @@ spec setup =
expect_column_names ["A"] t4
t4.at "A" . to_vector . should_equal [1, 2, 3, 4, 5, 6, 10, 20, 30]
Test.specify "should keep the greatest number of columns with positional matching if asked to keep unmatched ones, filling missing values with null and reporting no problems" <|
group_builder.specify "should keep the greatest number of columns with positional matching if asked to keep unmatched ones, filling missing values with null and reporting no problems" <|
t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]]
t2 = table_builder [["A1", [4, 5, 6]], ["B1", ["d", "e", "f"]], ["C", [7, 8, 9]]]
t3 = table_builder [["A2", [10, 20, 30]]]
@ -140,7 +156,7 @@ spec setup =
t4.at "B1" . to_vector . should_equal ["a", "b", "c", "d", "e", "f", Nothing, Nothing, Nothing]
t4.at "C" . to_vector . should_equal [Nothing, Nothing, Nothing, 7, 8, 9, Nothing, Nothing, Nothing]
Test.specify "should use column names from the first table that has enough columns in positional matching mode" <|
group_builder.specify "should use column names from the first table that has enough columns in positional matching mode" <|
t1 = table_builder [["A", [1, 2, 3]]]
t2 = table_builder [["X", [4, 5, 6]], ["A", ["a", "b", "c"]]]
@ -164,14 +180,14 @@ spec setup =
t8 = t1.union [t2, t5, t6, t7] match_columns=Match_Columns.By_Position
expect_column_names ["Y", "A", "Z"] t8
Test.specify "should allow to merge a table with itself" <|
group_builder.specify "should allow to merge a table with itself" <|
t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]]
t2 = t1.union [t1, t1]
expect_column_names ["A", "B"] t2
t2.at "A" . to_vector . should_equal [1, 2, 3, 1, 2, 3, 1, 2, 3]
t2.at "B" . to_vector . should_equal ["a", "b", "c", "a", "b", "c", "a", "b", "c"]
Test.specify "should not de-duplicate rows" <|
group_builder.specify "should not de-duplicate rows" <|
t1 = table_builder [["A", [1, 1, 3]], ["B", ["a", "a", "c"]]]
t2 = table_builder [["A", [1, 2, 2]], ["B", ["a", "b", "b"]]]
t3 = t1.union t2
@ -179,7 +195,7 @@ spec setup =
t3.at "A" . to_vector . should_equal [1, 1, 3, 1, 2, 2]
t3.at "B" . to_vector . should_equal ["a", "a", "c", "a", "b", "b"]
Test.specify "should gracefully handle the case where no tables to union were provided" <|
group_builder.specify "should gracefully handle the case where no tables to union were provided" <|
t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]]
check_same table =
@ -196,9 +212,9 @@ spec setup =
check_same <| t1.union [] keep_unmatched_columns=True
check_same <| t1.union [] match_columns=Match_Columns.By_Position keep_unmatched_columns=True
Test.specify "should correctly unify text columns of various lengths" pending=(if setup.test_selection.fixed_length_text_columns.not then "Fixed-length Char columns are not supported by this backend.") <|
t1 = table_builder [["A", ["a", "b", "c"]]] . cast "A" (Value_Type.Char size=1 variable_length=False)
t2 = table_builder [["A", ["xyz", "abc", "def"]]] . cast "A" (Value_Type.Char size=3 variable_length=False)
group_builder.specify "should correctly unify text columns of various lengths" pending=(if setup.test_selection.fixed_length_text_columns.not then "Fixed-length Char columns are not supported by this backend.") <|
t1 = (table_builder [["A", ["a", "b", "c"]]]) . cast "A" (Value_Type.Char size=1 variable_length=False)
t2 = (table_builder [["A", ["xyz", "abc", "def"]]]) . cast "A" (Value_Type.Char size=3 variable_length=False)
t1.at "A" . value_type . should_equal (Value_Type.Char size=1 variable_length=False)
t2.at "A" . value_type . should_equal (Value_Type.Char size=3 variable_length=False)
@ -210,7 +226,7 @@ spec setup =
Test.with_clue "t3[A].value_type="+(t3.at "A").value_type.to_display_text+": " <|
t3.at "A" . value_type . variable_length . should_be_true
Test.specify "should find a common type that will fit the merged columns" <|
group_builder.specify "should find a common type that will fit the merged columns" <|
t1 = table_builder [["A", [0, 1, 2]]]
t2 = table_builder [["A", [1.0, 2.0, 2.5]]]
@ -239,7 +255,7 @@ spec setup =
# Database backends are not required to support Mixed types.
if setup.is_database.not then
Test.specify "should resort to Mixed value type only if at least one column is already Mixed" <|
group_builder.specify "should resort to Mixed value type only if at least one column is already Mixed" <|
## TODO currently no way to retype a column to Mixed, so we are
using a custom object
t1 = table_builder [["A", [1, 2, 3]], ["mixed", ["a", My_Type.Value 1 2, Nothing]]]
@ -265,7 +281,7 @@ spec setup =
t6.at "mixed" . to_vector . should_equal ["X", "y", "a", My_Type.Value 1 2, Nothing, 1, 2, 3, True, False]
t6.at "mixed" . value_type . should_equal Value_Type.Mixed
Test.specify "if no common type can be found, should report error and drop the problematic column" <|
group_builder.specify "if no common type can be found, should report error and drop the problematic column" <|
t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]], ["C", [True, False, Nothing]]]
t2 = table_builder [["C", ["x", "Y", "Z"]], ["A", [4, 5, 6]], ["B", [1, 2, 3]]]
@ -292,7 +308,7 @@ spec setup =
r4 = t3.union t4 match_columns=Match_Columns.By_Position on_problems=Problem_Behavior.Report_Error
r4.should_fail_with No_Common_Type
Test.specify "if type widening is not allowed, should use the type from first table that contained the given column" <|
group_builder.specify "if type widening is not allowed, should use the type from first table that contained the given column" <|
t1 = table_builder [["A", [1, 2, 3]]]
t2 = table_builder [["A", [4, 5, 6]], ["B", [1.2, 2.2, 3.1]]]
@ -306,7 +322,7 @@ spec setup =
t2.at "B" . value_type . is_floating_point . should_be_true
t3.at "B" . value_type . is_floating_point . should_be_true
Test.specify "if type widening is not allowed and types do not match, should report error and drop the problematic column" <|
group_builder.specify "if type widening is not allowed and types do not match, should report error and drop the problematic column" <|
t1 = table_builder [["A", [1, 2, 3]], ["B", [1, 2, 3]], ["E", [1.1, 2.5, 3.2]]]
t2 = table_builder [["A", [4, 5, 6]], ["B", [1.5, 2.5, 3.5]], ["E", [1, 2, 3]]]
@ -332,7 +348,7 @@ spec setup =
# Database backends are not required to support Mixed types.
if setup.is_database.not then
Test.specify "even if type widening is not allowed, if the first column is mixed, it should accept any column to be concatenated to it" <|
group_builder.specify "even if type widening is not allowed, if the first column is mixed, it should accept any column to be concatenated to it" <|
t1 = table_builder [["X", ["a", 1, Nothing]]]
t2 = table_builder [["X", [1]]]
t3 = table_builder [["X", [1.2, 2.3, 3.4]]]
@ -347,7 +363,7 @@ spec setup =
t6.at "X" . value_type . should_equal Value_Type.Mixed
t6.at "X" . to_vector . should_equal ["a", 1, Nothing, 1, 1.2, 2.3, 3.4, "a", "b", True, False]
Test.specify "when finding a common type for numeric columns to be Float, any precision loss should be reported" <|
group_builder.specify "when finding a common type for numeric columns to be Float, any precision loss should be reported" <|
t1 = table_builder [["X", [1, (2^62)-1, 3]]]
t2 = table_builder [["X", [1.5, 2.5, 3.5]]]
t3 = table_builder [["X", [(2^100)+1, 2^10, 2]]]
@ -365,7 +381,7 @@ spec setup =
# Losing precision on (2^62)-1 and 2^100+1.
w.affected_rows_count . should_equal 2
Test.specify "if type mismatches cause all columns to be dropped, fail with No_Output_Columns" <|
group_builder.specify "if type mismatches cause all columns to be dropped, fail with No_Output_Columns" <|
t1 = table_builder [["A", [1, 2, 3]]]
t2 = table_builder [["A", ['x']]]
@ -376,11 +392,13 @@ spec setup =
e5 = t1.union t4 allow_type_widening=False on_problems=Problem_Behavior.Ignore
e5.should_fail_with No_Output_Columns
t1 = table_builder [["X", [0, 1, 2]], ["Y", ['aa', 'bb', 'cc']]] . cast "X" (Value_Type.Integer Bits.Bits_16) . cast "Y" (Value_Type.Char size=2 variable_length=False)
t2 = table_builder [["X", [3, 4, 5]], ["Y", ['x', 'y', 'z']]] . cast "X" (Value_Type.Integer Bits.Bits_32) . cast "Y" (Value_Type.Char size=1 variable_length=False)
group_builder.specify "should find a common type (2)" <|
t1 = (table_builder [["X", [0, 1, 2]], ["Y", ['aa', 'bb', 'cc']]]) . cast "X" (Value_Type.Integer Bits.Bits_16) . cast "Y" (Value_Type.Char size=2 variable_length=False)
t2 = (table_builder [["X", [3, 4, 5]], ["Y", ['x', 'y', 'z']]]) . cast "X" (Value_Type.Integer Bits.Bits_32) . cast "Y" (Value_Type.Char size=1 variable_length=False)
supports_complex_types = (t1.is_error || t2.is_error || Problems.get_attached_warnings t1 . not_empty).not
if supports_complex_types then
Test.specify "should find a common type (2)" <|
case supports_complex_types of
False -> Nothing
True ->
t12 = t1.union t2
Problems.assume_no_problems t12
t12.at "X" . value_type . should_equal (Value_Type.Integer Bits.Bits_32)
@ -389,7 +407,13 @@ spec setup =
t12.at "X" . to_vector . should_equal [0, 1, 2, 3, 4, 5]
t12.at "Y" . to_vector . should_equal ['aa', 'bb', 'cc', 'x', 'y', 'z']
Test.specify "should fail to find a common type if widening is not allowed (2)" <|
group_builder.specify "should fail to find a common type if widening is not allowed (2)" <|
t1 = (table_builder [["X", [0, 1, 2]], ["Y", ['aa', 'bb', 'cc']]]) . cast "X" (Value_Type.Integer Bits.Bits_16) . cast "Y" (Value_Type.Char size=2 variable_length=False)
t2 = (table_builder [["X", [3, 4, 5]], ["Y", ['x', 'y', 'z']]]) . cast "X" (Value_Type.Integer Bits.Bits_32) . cast "Y" (Value_Type.Char size=1 variable_length=False)
supports_complex_types = (t1.is_error || t2.is_error || Problems.get_attached_warnings t1 . not_empty).not
case supports_complex_types of
False -> Nothing
True ->
r1 = t1.union t2 allow_type_widening=False
r1.should_fail_with No_Output_Columns
r1.catch.cause . should_be_a Column_Type_Mismatch
@ -398,7 +422,7 @@ spec setup =
# And this should report Column_Type_Mismatch as the more important error too:
t1.union t2 allow_type_widening=False on_problems=Problem_Behavior.Report_Error . should_fail_with Column_Type_Mismatch
Test.specify "should gracefully handle tables from different backends" <|
group_builder.specify "should gracefully handle tables from different backends" <|
t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]]
alternative_connection = Database.connect (SQLite In_Memory)

View File

@ -8,22 +8,36 @@ from Standard.Table.Errors import all
from Standard.Database import all
from Standard.Database.Errors import Unsupported_Database_Operation, Integrity_Error
from Standard.Test import Test, Problems
import Standard.Test.Extensions
from Standard.Test_New import all
from project.Common_Table_Operations.Util import expect_column_names, run_default_backend
main = run_default_backend spec
type Data
Value ~connection
spec setup =
setup create_connection_fn =
Data.Value (create_connection_fn Nothing)
teardown self = self.connection.close
add_specs suite_builder setup =
prefix = setup.prefix
table_builder = setup.table_builder
materialize = setup.materialize
create_connection_fn = setup.create_connection_func
db_todo = if setup.is_database.not then Nothing else "Table.zip is still WIP for the DB backend."
Test.group prefix+"Table.zip" pending=db_todo <|
suite_builder.group prefix+"Table.zip" pending=db_todo group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
table_builder cols =
setup.table_builder cols connection=data.connection
if setup.is_database.not then
Test.specify "should allow to zip two tables, preserving memory layout order" <|
group_builder.specify "should allow to zip two tables, preserving memory layout order" <|
t1 = table_builder [["X", [1, 2, 3]], ["Y", [4, 5, 6]]]
t2 = table_builder [["Z", ['a', 'b', 'c']], ["W", ['x', 'y', 'z']]]
@ -38,7 +52,7 @@ spec setup =
expected_rows = [r0, r1, r2]
r.should_equal expected_rows
Test.specify "should allow to zip two tables, preserving the order defined by `order_by`" <|
group_builder.specify "should allow to zip two tables, preserving the order defined by `order_by`" <|
t1 = table_builder [["X", [100, 2]], ["Y", [4, 5]]]
t2 = table_builder [["Z", ['a', 'b']], ["W", ['x', 'd']]]
@ -56,7 +70,7 @@ spec setup =
expected_rows = [r0, r1]
r.should_equal expected_rows
Test.specify "should report unmatched rows if the row counts do not match and pad them with nulls" <|
group_builder.specify "should report unmatched rows if the row counts do not match and pad them with nulls" <|
t1 = table_builder [["X", [1, 2, 3]], ["Y", [4, 5, 6]]]
t2 = table_builder [["Z", ['a', 'b']], ["W", ['x', 'd']]]
@ -80,7 +94,7 @@ spec setup =
problems_2 = [Row_Count_Mismatch.Error 2 3]
Problems.test_problem_handling action_2 problems_2 tester_2
Test.specify "should allow to keep the unmatched rows padded with nulls without reporting problems" <|
group_builder.specify "should allow to keep the unmatched rows padded with nulls without reporting problems" <|
t1 = table_builder [["X", [1, 2, 3]], ["Y", [4, 5, 6]]]
t2 = table_builder [["Z", ['a']], ["W", ['x']]]
@ -92,7 +106,7 @@ spec setup =
t3.at "Z" . to_vector . should_equal ['a', Nothing, Nothing]
t3.at "W" . to_vector . should_equal ['x', Nothing, Nothing]
Test.specify "should allow to drop the unmatched rows" <|
group_builder.specify "should allow to drop the unmatched rows" <|
t1 = table_builder [["X", [1, 2, 3]], ["Y", [4, 5, 6]]]
t2 = table_builder [["Z", ['a']], ["W", ['x']]]
@ -104,7 +118,7 @@ spec setup =
t3.at "Z" . to_vector . should_equal ['a']
t3.at "W" . to_vector . should_equal ['x']
Test.specify "should work when zipping with an empty table" <|
group_builder.specify "should work when zipping with an empty table" <|
t1 = table_builder [["X", [1, 2]], ["Y", [4, 5]]]
t2 = table_builder [["Z", ['a']], ["W", ['c']]]
# Workaround to easily create empty table until table builder allows that directly.
@ -137,7 +151,7 @@ spec setup =
t6.row_count . should_equal 0
t6.at "X" . to_vector . should_equal []
Test.specify "should not report unmatched rows for rows that simply are all null" <|
group_builder.specify "should not report unmatched rows for rows that simply are all null" <|
t1 = table_builder [["X", [1, 2, 3]], ["Y", [4, 5, 6]]]
t2 = table_builder [["Z", ['a', Nothing, Nothing]], ["W", ['b', Nothing, Nothing]]]
t3 = t1.zip t2 on_problems=Problem_Behavior.Report_Error
@ -148,7 +162,7 @@ spec setup =
t3.at "Z" . to_vector . should_equal ['a', Nothing, Nothing]
t3.at "W" . to_vector . should_equal ['b', Nothing, Nothing]
Test.specify "should rename columns of the right table to avoid duplicates" <|
group_builder.specify "should rename columns of the right table to avoid duplicates" <|
t1 = table_builder [["X", [1, 2]], ["Y", [5, 6]], ["Right Y", [7, 8]]]
t2 = table_builder [["X", ['a']], ["Y", ['d']]]
@ -170,7 +184,7 @@ spec setup =
expect_column_names ["X", "Y", "Right Y", "Right X 1", "Right X"] (t1.zip t4)
expect_column_names ["X", "Right X", "Right X 1", "Y", "Right Y"] (t4.zip t1)
Test.specify "should report both row count mismatch and duplicate column warnings at the same time" <|
group_builder.specify "should report both row count mismatch and duplicate column warnings at the same time" <|
t1 = table_builder [["X", [1, 2]], ["Right X", [5, 6]]]
t2 = table_builder [["X", ['a']], ["Z", ['d']]]
@ -178,7 +192,7 @@ spec setup =
expected_problems = [Row_Count_Mismatch.Error 2 1, Duplicate_Output_Column_Names.Error ["Right X"]]
Problems.get_attached_warnings t3 . should_contain_the_same_elements_as expected_problems
Test.specify "should allow to zip the table with itself" <|
group_builder.specify "should allow to zip the table with itself" <|
## Even though this does not seem very useful, we should verify that
this edge case works correctly. It may especially be fragile in
the Database backend.
@ -192,7 +206,7 @@ spec setup =
t2.at "Right Y" . to_vector . should_equal [4, 5]
if setup.is_database.not then
Test.specify "should correctly pad/truncate all kinds of column types" <|
group_builder.specify "should correctly pad/truncate all kinds of column types" <|
primitives = [["ints", [1, 2, 3]], ["strs", ['a', 'b', 'c']], ["bools", [True, Nothing, False]]]
times = [["dates", [Date.new 1999 1 1, Date.new 2000 4 1, Date.new 2001 1 2]], ["times", [Time_Of_Day.new 23 59, Time_Of_Day.new 0 0, Time_Of_Day.new 12 34]], ["datetimes", [Date_Time.new 1999 1 1 23 59, Date_Time.new 2000 4 1 0 0, Date_Time.new 2001 1 2 12 34]]]
t = table_builder <|
@ -239,7 +253,7 @@ spec setup =
padded.at "datetimes" . value_type . should_equal Value_Type.Date_Time
padded.at "mixed" . value_type . should_equal Value_Type.Mixed
Test.specify "should gracefully handle tables from different backends" <|
group_builder.specify "should gracefully handle tables from different backends" <|
t1 = table_builder [["X", [1, 2, 3]], ["Y", [4, 5, 6]]]
alternative_connection = Database.connect (SQLite In_Memory)

View File

@ -38,12 +38,14 @@ type Test_Setup
Arguments:
- prefix: A name to prepend to test groups to identify the tested
backend.
- table: A table using the tested backend containing data from
`data/data.csv`.
- empty_table: An empty table using the tested backend.
- table_fn: A function that takes Nothing and returns a table using the
tested backend containing data from `data/data.csv`.
- empty_table_fn: A function that takes Nothing and returns an empty
table using the tested backend.
- table_builder: A function used to build a table using the tested
backend from a vector of columns represented as pairs of name and
vector of values.
vector of values. The second optional argument is connection or Nothing.
An example signature of the method is `table_builder cols connection=Nothing`.
- materialize: A helper function which materializes a table from the
tested backend as an in-memory table. Used to easily inspect results of
a particular query/operation.
@ -54,9 +56,9 @@ type Test_Setup
- aggregate_test_selection: A selection of which aggregate test suites
should be run. Can be used to skip checks for backends which do not
support particular features.
- connection: A related database connection or Nothing for in-memory
tests.
Config prefix table empty_table table_builder materialize is_database test_selection aggregate_test_selection connection
- create_connection_func: A function that takes Nothing and creates a related
database connection or Nothing for in-memory tests.
Config prefix table_fn empty_table_fn (table_builder : (Vector Any -> (Any|Nothing)) -> Any) materialize is_database test_selection aggregate_test_selection create_connection_func
## Specifies if the given Table backend supports custom Enso types.
@ -119,32 +121,32 @@ type Test_Selection
Replace_Params that a backend supports.
Config supports_case_sensitive_columns=True order_by=True natural_ordering=False case_insensitive_ordering=True order_by_unicode_normalization_by_default=False case_insensitive_ascii_only=False allows_mixed_type_comparisons=True supports_unicode_normalization=False is_nan_and_nothing_distinct=True distinct_returns_first_row_from_group_if_ordered=True date_time=True fixed_length_text_columns=False length_restricted_text_columns=True removes_trailing_whitespace_casting_from_char_to_varchar=False different_size_integer_types=True supports_8bit_integer=False supports_decimal_type=False supports_time_duration=False supports_nanoseconds_in_time=False supports_mixed_columns=False supported_replace_params=Nothing
spec setup =
Core_Spec.spec setup
Select_Columns_Spec.spec setup
Column_Name_Edge_Cases_Spec.spec setup
Column_Operations_Spec.spec setup
Derived_Columns_Spec.spec setup
Date_Time_Spec.spec setup
Conversion_Spec.spec setup
Aggregate_Spec.spec setup
Filter_Spec.spec setup
Map_Spec.spec setup
Missing_Values_Spec.spec setup
Order_By_Spec.spec setup
Take_Drop_Spec.spec setup
Expression_Spec.spec detailed=False setup
Join_Spec.spec setup
Cross_Join_Spec.spec setup
Zip_Spec.spec setup
Union_Spec.spec setup
Lookup_Spec.spec setup
Distinct_Spec.spec setup
Cross_Tab_Spec.spec setup
Transpose_Spec.spec setup
Add_Row_Number_Spec.spec setup
Integration_Tests.spec setup
Temp_Column_Spec.spec setup
Nothing_Spec.spec setup
add_specs suite_builder setup =
Core_Spec.add_specs suite_builder setup
Select_Columns_Spec.add_specs suite_builder setup
Column_Name_Edge_Cases_Spec.add_specs suite_builder setup
Column_Operations_Spec.add_specs suite_builder setup
Derived_Columns_Spec.add_specs suite_builder setup
Date_Time_Spec.add_specs suite_builder setup
Conversion_Spec.add_specs suite_builder setup
Aggregate_Spec.add_specs suite_builder setup
Filter_Spec.add_specs suite_builder setup
Map_Spec.add_specs suite_builder setup
Missing_Values_Spec.add_specs suite_builder setup
Order_By_Spec.add_specs suite_builder setup
Take_Drop_Spec.add_specs suite_builder setup
Expression_Spec.add_specs suite_builder detailed=False setup
Join_Spec.add_specs suite_builder setup
Cross_Join_Spec.add_specs suite_builder setup
Zip_Spec.add_specs suite_builder setup
Union_Spec.add_specs suite_builder setup
Lookup_Spec.add_specs suite_builder setup
Distinct_Spec.add_specs suite_builder setup
Cross_Tab_Spec.add_specs suite_builder setup
Transpose_Spec.add_specs suite_builder setup
Add_Row_Number_Spec.add_specs suite_builder setup
Integration_Tests.add_specs suite_builder setup
Temp_Column_Spec.add_specs suite_builder setup
Nothing_Spec.add_specs suite_builder setup
main = run_default_backend spec
main = run_default_backend add_specs

View File

@ -7,42 +7,61 @@ import Standard.Table.Data.Type.Value_Type.Bits
from Standard.Database.Errors import Unsupported_Database_Operation
from Standard.Test import Test, Problems
import Standard.Test.Extensions
from Standard.Test_New import all
from project.Common_Table_Operations.Util import run_default_backend
main = run_default_backend spec
main = run_default_backend add_specs
spec setup =
type Data
Value ~connection
setup create_connection_fn = Data.Value <|
connection = create_connection_fn Nothing
connection
teardown self =
self.connection.close
add_specs suite_builder setup =
prefix = setup.prefix
table_builder = setup.table_builder
create_connection_fn = setup.create_connection_func
suite_builder.group prefix+"Column.map" group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
table_builder cols =
setup.table_builder cols connection=data.connection
Test.group prefix+"Column.map" <|
if setup.is_database then
Test.specify "should report unsupported error" <|
group_builder.specify "should report unsupported error" <|
t = table_builder [["X", [1, 2, 3]]]
t.at "X" . map (x-> x + 1) . should_fail_with Unsupported_Database_Operation
if setup.is_database.not then
Test.specify "should allow to map a column with an arbitrary function" <|
group_builder.specify "should allow to map a column with an arbitrary function" <|
t = table_builder [["X", [1, 2, 3]]]
f x = 2*x + 1
t.at "X" . map f . to_vector . should_equal [3, 5, 7]
Test.specify "should forward dataflow errors that occur within the map" <|
group_builder.specify "should forward dataflow errors that occur within the map" <|
t = table_builder [["X", [1, 2, 3]]]
f x = if x == 2 then Error.throw (Illegal_State.Error "MY ERROR") else 2*x + 1
c1 = t.at "X" . map f
c1.should_fail_with Illegal_State
c1.catch.message . should_equal "MY ERROR"
Test.specify "should forward panics that occur within the map" <|
group_builder.specify "should forward panics that occur within the map" <|
t = table_builder [["X", [1, 2, 3]]]
f x = if x == 2 then Panic.throw (Illegal_State.Error "MY PANIC") else 2*x + 1
Test.expect_panic_with (t.at "X" . map f) Illegal_State
Test.specify "should forward warnings that are attached to results of function invocation" <|
group_builder.specify "should forward warnings that are attached to results of function invocation" <|
t = table_builder [["X", [1, 2, 3, 4]]]
f x =
if (x % 2) == 0 then Warning.attach (Illegal_State.Error "MY WARNING "+x.to_text) (2*x + 1) else 2*x + 1
@ -51,7 +70,7 @@ spec setup =
warnings.map Error.unwrap . map .message . should_contain_the_same_elements_as ["MY WARNING 2", "MY WARNING 4"]
c1.to_vector . should_equal [3, 5, 7, 9]
Test.specify "should respect the expected_value_type" <|
group_builder.specify "should respect the expected_value_type" <|
t = table_builder [["X", [1, 2, 3]]]
f x = 2*x + 1
c2 = t.at "X" . map f expected_value_type=Value_Type.Float
@ -89,7 +108,7 @@ spec setup =
c7.value_type . should_equal Value_Type.Mixed
c7.to_vector . should_equal [2, "A", 4]
Test.specify "should fail with Invalid_Value_Type if some results do not match the expected_value_type" <|
group_builder.specify "should fail with Invalid_Value_Type if some results do not match the expected_value_type" <|
t = table_builder [["X", [1, 2, 3]]]
c1 = t.at "X"
@ -132,32 +151,40 @@ spec setup =
r8.should_fail_with Invalid_Value_Type
r8.catch.to_display_text . should_contain "Expected type Date, but got a value 42 of type Integer (16 bits)"
Test.group prefix+"Column.zip" <|
suite_builder.group prefix+"Column.zip" group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
table_builder cols =
setup.table_builder cols connection=data.connection
if setup.is_database then
Test.specify "should report unsupported error" <|
group_builder.specify "should report unsupported error" <|
t = table_builder [["X", [1, 2, 3]], ["Y", [4, 5, 6]]]
f x y = 10*x + y
(t.at "X") . zip (t.at "Y") f . should_fail_with Unsupported_Database_Operation
if setup.is_database.not then
Test.specify "should allow to zip two columns with an arbitrary function" <|
group_builder.specify "should allow to zip two columns with an arbitrary function" <|
t = table_builder [["X", [1, 2, 3]], ["Y", [4, 5, 6]]]
f x y = 10*x + y
(t.at "X") . zip (t.at "Y") f . to_vector . should_equal [14, 25, 36]
Test.specify "should forward dataflow errors that occur within the zip" <|
group_builder.specify "should forward dataflow errors that occur within the zip" <|
t = table_builder [["X", [1, 2, 3]], ["Y", [4, 5, 6]]]
f x y = if x == 2 then Error.throw (Illegal_State.Error "MY ERROR") else 10*x + y
c1 = (t.at "X") . zip (t.at "Y") f
c1.should_fail_with Illegal_State
c1.catch.message . should_equal "MY ERROR"
Test.specify "should forward panics that occur within the zip" <|
group_builder.specify "should forward panics that occur within the zip" <|
t = table_builder [["X", [1, 2, 3]], ["Y", [4, 5, 6]]]
f x y = if x == 2 then Panic.throw (Illegal_State.Error "MY PANIC") else 10*x + y
Test.expect_panic_with ((t.at "X") . zip (t.at "Y") f) Illegal_State
Test.specify "should forward warnings that are attached to results of function invocation" <|
group_builder.specify "should forward warnings that are attached to results of function invocation" <|
t = table_builder [["X", [1, 2, 3]], ["Y", [4, 5, 6]]]
f x y =
if x == 2 then Warning.attach (Illegal_State.Error "MY WARNING") (10*x + y) else 10*x + y
@ -166,7 +193,7 @@ spec setup =
w1.message . should_equal "MY WARNING"
c1.to_vector . should_equal [14, 25, 36]
Test.specify "should respect the expected_value_type" <|
group_builder.specify "should respect the expected_value_type" <|
t = table_builder [["X", [1, 2, 3]], ["Y", [4, 5, 6]]]
f x y = 10*x + y
@ -189,7 +216,7 @@ spec setup =
c4.value_type . should_equal (Value_Type.Char size=2 variable_length=False)
c4.to_vector . should_equal ["a4", "b5", "c6"]
Test.specify "should fail with Invalid_Value_Type if some results do not match the expected_value_type" <|
group_builder.specify "should fail with Invalid_Value_Type if some results do not match the expected_value_type" <|
t = table_builder [["X", [100, 110, 120]], ["Y", [1, 3, 1]]]
r1 = (t.at "X") . zip (t.at "Y") (*) expected_value_type=Value_Type.Byte

View File

@ -6,19 +6,25 @@ from Standard.Table.Errors import all
from Standard.Database.Errors import Unsupported_Database_Operation
from Standard.Test import Test
import Standard.Test.Extensions
from Standard.Test_New import all
from project.Common_Table_Operations.Util import run_default_backend
main = run_default_backend spec
main = run_default_backend add_specs
spec setup =
prefix = setup.prefix
table_builder = setup.table_builder
test_selection = setup.test_selection
Test.group prefix+"Dropping Missing Values" <|
t0 = table_builder [["a", [0, 1, Nothing, 42, Nothing, 5]], ["b", [True, Nothing, True, False, Nothing, False]], ["c", ["", "foo", "bar", Nothing, Nothing, " "]]]
type Data
Value ~data
connection self = self.data.at 0
t0 self = self.data.at 1
t1 self = self.data.at 2
t3 self = self.data.at 3
t4 self = self.data.at 4
setup create_connection_fn table_builder =
connection = create_connection_fn Nothing
t0 = table_builder [["a", [0, 1, Nothing, 42, Nothing, 5]], ["b", [True, Nothing, True, False, Nothing, False]], ["c", ["", "foo", "bar", Nothing, Nothing, " "]]] connection=connection
t1 =
a = ["a", [1, Nothing, 3, 4]]
b = ["b", ["a", "b", Nothing, " "]]
@ -26,22 +32,46 @@ spec setup =
d = ["d", [Nothing, True, False, True]]
e = ["e", ["", "", "foo", "bar"]]
f = ["f", [Nothing, "", Nothing, ""]]
table_builder [a, b, c, d, e, f]
table_builder [a, b, c, d, e, f] connection=connection
t3 = table_builder [["X", [2.0, 1.5, Number.nan, Number.nan]], ["Y", [Nothing, 2.0, Nothing, 5.0]]]
t4 =
c = ["c", [10, 20, 40, 30]]
g = ["g", [Number.nan, 1, 2, 3.4]]
h = ["h", [Number.nan, Nothing, Number.nan, Nothing]]
table_builder [c, g, h]
Data.Value [connection, t0, t1, t3, t4]
Test.specify "filter_blank_rows should drop rows that contain at least one missing cell" <|
d = t0.filter_blank_rows when=Blank_Selector.Any_Cell
teardown self =
self.connection.close
add_specs suite_builder setup =
prefix = setup.prefix
create_connection_fn = setup.create_connection_func
test_selection = setup.test_selection
suite_builder.group prefix+"Dropping Missing Values" group_builder->
data = Data.setup create_connection_fn setup.table_builder
group_builder.teardown <|
data.teardown
table_builder cols =
setup.table_builder cols connection=data.connection
group_builder.specify "filter_blank_rows should drop rows that contain at least one missing cell" <|
d = data.t0.filter_blank_rows when=Blank_Selector.Any_Cell
d.row_count . should_equal 1
d.at "a" . to_vector . should_equal [5]
d.at "b" . to_vector . should_equal [False]
d.at "c" . to_vector . should_equal [" "]
Test.specify "filter_blank_rows should drop rows that are all blank" <|
d2 = t0.filter_blank_rows when=Blank_Selector.All_Cells
group_builder.specify "filter_blank_rows should drop rows that are all blank" <|
d2 = data.t0.filter_blank_rows when=Blank_Selector.All_Cells
d2.at "a" . to_vector . should_equal [0, 1, Nothing, 42, 5]
d2.at "b" . to_vector . should_equal [True, Nothing, True, False, False]
d2.at "c" . to_vector . should_equal ["", "foo", "bar", Nothing, " "]
Test.specify "filter_blank_rows should deal with edge cases" <|
group_builder.specify "filter_blank_rows should deal with edge cases" <|
## TODO currently our builder does not allow all-null tables, so we
create one with a 0 and remove it by filter. See #6159.
t0 = table_builder [["X", [0, Nothing, Nothing, Nothing]]]
@ -58,7 +88,7 @@ spec setup =
t4.row_count . should_equal 0
t4.at "X" . to_vector . should_equal []
Test.specify "filter_blank_rows should work with a table with many columns" <|
group_builder.specify "filter_blank_rows should work with a table with many columns" <|
cols = Vector.new 60 i->
["col_"+i.to_text, [i, Nothing]]
t1 = table_builder cols
@ -67,85 +97,79 @@ spec setup =
t2.row_count . should_equal 1
t2.at 42 . to_vector . should_equal [42]
Test.specify "should allow to select blank columns" <|
r1 = t1.select_blank_columns
group_builder.specify "should allow to select blank columns" <|
r1 = data.t1.select_blank_columns
r1.columns.map .name . should_equal ["f"]
r1.at "f" . to_vector . should_equal [Nothing, "", Nothing, ""]
r2 = t1.select_blank_columns when=Blank_Selector.Any_Cell
r2 = data.t1.select_blank_columns when=Blank_Selector.Any_Cell
r2.columns.map .name . should_equal ["a", "b", "d", "e", "f"]
r2.at "d" . to_vector . should_equal [Nothing, True, False, True]
Test.specify "should allow to remove blank columns" <|
r1 = t1.remove_blank_columns
group_builder.specify "should allow to remove blank columns" <|
r1 = data.t1.remove_blank_columns
r1.columns.map .name . should_equal ["a", "b", "c", "d", "e"]
r1.at "a" . to_vector . should_equal [1, Nothing, 3, 4]
r2 = t1.remove_blank_columns when=Blank_Selector.Any_Cell
r2 = data.t1.remove_blank_columns when=Blank_Selector.Any_Cell
r2.columns.map .name . should_equal ["c"]
r2.at "c" . to_vector . should_equal [10, 20, 30, 40]
t3 = table_builder [["X", [2.0, 1.5, Number.nan, Number.nan]], ["Y", [Nothing, 2.0, Nothing, 5.0]]]
t4 =
c = ["c", [10, 20, 40, 30]]
g = ["g", [Number.nan, 1, 2, 3.4]]
h = ["h", [Number.nan, Nothing, Number.nan, Nothing]]
table_builder [c, g, h]
if test_selection.is_nan_and_nothing_distinct then
Test.specify "should not treat NaNs as blank by default" <|
r1 = t3.filter_blank_rows when=Blank_Selector.Any_Cell
group_builder.specify "should not treat NaNs as blank by default" <|
r1 = data.t3.filter_blank_rows when=Blank_Selector.Any_Cell
# We cannot use `Vector.==` because `NaN != NaN`.
r1.at "X" . to_vector . to_text . should_equal "[1.5, NaN]"
r1.at "Y" . to_vector . should_equal [2.0, 5.0]
r2 = t3.filter_blank_rows when=Blank_Selector.All_Cells
r2 = data.t3.filter_blank_rows when=Blank_Selector.All_Cells
r2.at "X" . to_vector . to_text . should_equal "[2.0, 1.5, NaN, NaN]"
r2.at "Y" . to_vector . should_equal [Nothing, 2.0, Nothing, 5.0]
r3 = t4.remove_blank_columns
r3 = data.t4.remove_blank_columns
r3.columns.map .name . should_equal ["c", "g", "h"]
r3.at "g" . to_vector . to_text . should_equal "[NaN, 1.0, 2.0, 3.4]"
r4 = t4.remove_blank_columns when=Blank_Selector.Any_Cell
r4 = data.t4.remove_blank_columns when=Blank_Selector.Any_Cell
r4.columns.map .name . should_equal ["c", "g"]
r4.at "g" . to_vector . to_text . should_equal "[NaN, 1.0, 2.0, 3.4]"
r5 = t4.select_blank_columns when=Blank_Selector.Any_Cell
r5 = data.t4.select_blank_columns when=Blank_Selector.Any_Cell
r5.columns.map .name . should_equal ["h"]
r5.at "h" . to_vector . to_text . should_equal "[NaN, Nothing, NaN, Nothing]"
Test.specify "should allow to treat NaNs as blank if asked" <|
r1 = t3.filter_blank_rows when=Blank_Selector.Any_Cell treat_nans_as_blank=True
group_builder.specify "should allow to treat NaNs as blank if asked" <|
r1 = data.t3.filter_blank_rows when=Blank_Selector.Any_Cell treat_nans_as_blank=True
# We cannot use `Vector.==` because `NaN != NaN`.
r1.at "X" . to_vector . should_equal [1.5]
r1.at "Y" . to_vector . should_equal [2.0]
r2 = t3.filter_blank_rows when=Blank_Selector.All_Cells treat_nans_as_blank=True
r2 = data.t3.filter_blank_rows when=Blank_Selector.All_Cells treat_nans_as_blank=True
r2.at "X" . to_vector . to_text . should_equal "[2.0, 1.5, NaN]"
r2.at "Y" . to_vector . should_equal [Nothing, 2.0, 5.0]
r3 = t4.remove_blank_columns when=Blank_Selector.All_Cells treat_nans_as_blank=True
r3 = data.t4.remove_blank_columns when=Blank_Selector.All_Cells treat_nans_as_blank=True
r3.columns.map .name . should_equal ["c", "g"]
r3.at "g" . to_vector . to_text . should_equal "[NaN, 1.0, 2.0, 3.4]"
r4 = t4.select_blank_columns when=Blank_Selector.All_Cells treat_nans_as_blank=True
r4 = data.t4.select_blank_columns when=Blank_Selector.All_Cells treat_nans_as_blank=True
r4.columns.map .name . should_equal ["h"]
r4.at "h" . to_vector . to_text . should_equal "[NaN, Nothing, NaN, Nothing]"
r5 = t4.remove_blank_columns when=Blank_Selector.Any_Cell treat_nans_as_blank=True
r5 = data.t4.remove_blank_columns when=Blank_Selector.Any_Cell treat_nans_as_blank=True
r5.columns.map .name . should_equal ["c"]
r5.at "c" . to_vector . should_equal [10, 20, 40, 30]
r6 = t4.select_blank_columns when=Blank_Selector.Any_Cell treat_nans_as_blank=True
r6 = data.t4.select_blank_columns when=Blank_Selector.Any_Cell treat_nans_as_blank=True
r6.columns.map .name . should_equal ["g", "h"]
r6.at "h" . to_vector . to_text . should_equal "[NaN, Nothing, NaN, Nothing]"
if test_selection.is_nan_and_nothing_distinct.not then
Test.specify "this backend treats NaN as Nothing" <|
t3.at "X" . to_vector . should_equal [2.0, 1.5, Nothing, Nothing]
t3.at "X" . is_nan . to_vector . should_fail_with Unsupported_Database_Operation
group_builder.specify "this backend treats NaN as Nothing" <|
data.t3.at "X" . to_vector . should_equal [2.0, 1.5, Nothing, Nothing]
data.t3.at "X" . is_nan . to_vector . should_fail_with Unsupported_Database_Operation
Test.specify "select_blank_columns and remove_blank_columns should deal with edge cases" <|
group_builder.specify "select_blank_columns and remove_blank_columns should deal with edge cases" <|
t = table_builder [["X", [1, 2, 3, 4]]]
no_rows = t.filter "X" (Filter_Condition.Equal to=0)
no_rows.row_count . should_equal 0
@ -160,8 +184,16 @@ spec setup =
r3.should_fail_with No_Output_Columns
r3.catch.to_display_text . should_equal "No columns in the result, because of another problem: No columns were blank."
Test.group prefix+"Filling Missing Values" <|
Test.specify "should coerce long and double types to double" <|
suite_builder.group prefix+"Filling Missing Values" group_builder->
data = Data.setup create_connection_fn setup.table_builder
group_builder.teardown <|
data.teardown
table_builder cols =
setup.table_builder cols connection=data.connection
group_builder.specify "should coerce long and double types to double" <|
table = table_builder [["X", [1, Nothing, 2, Nothing]], ["Y", [0.5, Nothing, Nothing, 0.25]]]
ints = table.at "X"
ints_filled = ints.fill_nothing 0.5
@ -198,7 +230,7 @@ spec setup =
Test.with_clue "r4.value_type="+vt4.to_display_text+": " <|
vt4.is_floating_point.should_be_true
Test.specify "should keep String, Boolean, Long and Double type" <|
group_builder.specify "should keep String, Boolean, Long and Double type" <|
table = table_builder [["X", ["a", Nothing, "b", Nothing]], ["Y", [True, False, Nothing, Nothing]], ["Z", [1, Nothing, 2, Nothing]], ["W", [0.5, Nothing, Nothing, 0.25]]]
strs = table.at "X"
strs_filled = strs.fill_nothing "X"
@ -220,7 +252,7 @@ spec setup =
decimals_filled.to_vector . should_equal [0.5, 1.0, 1.0, 0.25]
decimals_filled.value_type.is_floating_point.should_be_true
Test.specify "should not allow mixing types by default" <|
group_builder.specify "should not allow mixing types by default" <|
table = table_builder [["X", [1, Nothing, 2, Nothing]], ["Y", [True, False, Nothing, Nothing]], ["Z", [0.5, Nothing, Nothing, 0.25]]]
ints = table.at "X"
ints_filled = ints.fill_nothing False
@ -233,7 +265,7 @@ spec setup =
table.at "Z" . fill_nothing True . should_fail_with No_Common_Type
if setup.is_database.not then
Test.specify "may allow mixed types if explicitly retyped" <|
group_builder.specify "may allow mixed types if explicitly retyped" <|
table = table_builder [["X", [1, Nothing, 2, Nothing]], ["Y", [True, False, Nothing, Nothing]], ["Z", [0.5, Nothing, Nothing, 0.25]]]
mix = table.at "X" . cast Value_Type.Mixed
mix.value_type . should_equal Value_Type.Mixed
@ -249,14 +281,14 @@ spec setup =
c3.to_vector . should_equal [0.5, Nothing, 2, 0.25]
c3.value_type . should_equal Value_Type.Mixed
Test.specify "will keep the Mixed type if was explicitly retyped" <|
group_builder.specify "will keep the Mixed type if was explicitly retyped" <|
table = table_builder [["X", [1, Nothing, 2, Nothing]]]
mix = table.at "X" . cast Value_Type.Mixed
mix_filled = mix.fill_nothing 0
mix_filled.to_vector . should_equal [1, 0, 2, 0]
mix_filled.value_type . should_equal Value_Type.Mixed
Test.specify "should correctly unify text columns of various lengths" pending=(if setup.test_selection.fixed_length_text_columns.not then "Fixed-length Char columns are not supported by this backend.") <|
group_builder.specify "should correctly unify text columns of various lengths" pending=(if setup.test_selection.fixed_length_text_columns.not then "Fixed-length Char columns are not supported by this backend.") <|
t0 = table_builder [["A", ["a", Nothing, "c"]], ["B", ["X", "Y", "Z"]], ["C", ["xyz", "abc", "def"]]]
t1 = t0 . cast "A" (Value_Type.Char size=1 variable_length=False) . cast "B" (Value_Type.Char size=1 variable_length=False) . cast "C" (Value_Type.Char size=3 variable_length=False)
@ -276,7 +308,7 @@ spec setup =
Test.with_clue "e.value_type="+e.value_type.to_display_text+": " <|
e.value_type.variable_length.should_be_true
Test.specify "should allow setting a default column by reference" <|
group_builder.specify "should allow setting a default column by reference" <|
t = table_builder [["A", ["x", "", Nothing]], ["B", ["a", "b", "c"]], ["C", [Nothing, Nothing, "ZZZ"]], ["D", [Nothing, "2", "3"]]]
t1 = t.fill_nothing "A" (Column_Ref.Name "B")
@ -291,7 +323,7 @@ spec setup =
t3.at "C" . to_vector . should_equal [Nothing, "2", "ZZZ"]
t3.at "D" . to_vector . should_equal [Nothing, "2", "3"]
if setup.is_database.not then Test.specify "should allow filling rows with previous value" <|
if setup.is_database.not then group_builder.specify "should allow filling rows with previous value" <|
t = table_builder [["A", ["a", "", Nothing, Nothing, "", "b", "c", Nothing]]]
t1 = t.fill_nothing "A" Previous_Value
@ -317,7 +349,7 @@ spec setup =
# C is unchanged
t5.at "C" . to_vector . should_equal ["", "foo", Nothing, "bar"]
if setup.is_database then Test.specify "will for now report that Previous_Value is not supported" <|
if setup.is_database then group_builder.specify "will for now report that Previous_Value is not supported" <|
t = table_builder [["A", ["a", "", Nothing, Nothing, "", "b", "c", Nothing]]]
t.fill_nothing "A" Previous_Value . should_fail_with Unsupported_Database_Operation
t.fill_empty "A" Previous_Value . should_fail_with Unsupported_Database_Operation

View File

@ -9,19 +9,23 @@ import Standard.Test.Extensions
from project.Common_Table_Operations.Util import run_default_backend
main = run_default_backend spec
main = run_default_backend add_specs
type My_Type
Value x:Text
type Data
Value ~connection
spec setup =
setup create_connection_fn =
Data.Value (create_connection_fn Nothing)
teardown self = self.connection.close
add_specs suite_builder setup =
prefix = setup.prefix
table_builder = setup.table_builder
table_builder_typed columns value_type =
cast_columns = columns.map c->
Column.from_vector (c.at 0) (c.at 1) . cast value_type
setup.table_builder cast_columns
create_connection_fn = setup.create_connection_func
# We cannot create a column of Nothing/NULL in the database without casting it to a non-mixed type.
type_for_nothing_column = if setup.is_database then Value_Type.Char else Value_Type.Mixed
@ -42,7 +46,17 @@ spec setup =
+ mixed_values
values_with_nothing = values_without_nothing + [[Nothing, Nothing, type_for_nothing_column]]
Test.group prefix+"Comparisons" <|
suite_builder.group prefix+"Comparisons" group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
table_builder_typed columns value_type =
cast_columns = columns.map c->
Column.from_vector (c.at 0) (c.at 1) . cast value_type
setup.table_builder cast_columns connection=data.connection
comparisons = [["==", ==], ["!=", !=], ["<", <], ["<=", <=], [">", >], [">=", >=]]
comparisons.map pair->
@ -53,99 +67,159 @@ spec setup =
value = triple.at 0
value_type = triple.at 2
group_builder.specify "Correctly handle Nothing in: "+value_type.to_text+" "+comparison_name+" Nothing value" <|
table = table_builder_typed [["x", [value]], ["n", [Nothing]]] value_type
Test.specify "Correctly handle Nothing in: "+value_type.to_text+" "+comparison_name+" Nothing value" <|
co = comparison (table.at "x") Nothing
co.to_vector . should_equal [Nothing]
Test.specify "Correctly handle Nothing in: "+value_type.to_text+" "+comparison_name+" Nothing column" <|
group_builder.specify "Correctly handle Nothing in: "+value_type.to_text+" "+comparison_name+" Nothing column" <|
table = table_builder_typed [["x", [value]], ["n", [Nothing]]] value_type
co = comparison (table.at "x") (table.at "n")
co.to_vector . should_equal [Nothing]
Test.specify "Correctly handle Nothing in: Nothing column "+comparison_name+" "+value_type.to_text <|
group_builder.specify "Correctly handle Nothing in: Nothing column "+comparison_name+" "+value_type.to_text <|
table = table_builder_typed [["x", [value]], ["n", [Nothing]]] value_type
co = comparison (table.at "n") (table.at "x")
co.to_vector . should_equal [Nothing]
Test.group prefix+"between" <|
suite_builder.group prefix+"between" group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
table_builder_typed columns value_type =
cast_columns = columns.map c->
Column.from_vector (c.at 0) (c.at 1) . cast value_type
setup.table_builder cast_columns connection=data.connection
values_with_nothing.map triple->
value = triple.at 0
value_type = triple.at 2
group_builder.specify "Correctly handle Nothing in: Nothing column between "+value_type.to_text+" and "+value_type.to_text <|
table = table_builder_typed [["x", [value]], ["y", [value]], ["n", [Nothing]]] value_type
Test.specify "Correctly handle Nothing in: Nothing column between "+value_type.to_text+" and "+value_type.to_text <|
co = table.at "n" . between (table.at "x") (table.at "y")
co.to_vector . should_equal [Nothing]
Test.specify "Correctly handle Nothing in: "+value_type.to_text+" between Nothing column and "+value_type.to_text <|
group_builder.specify "Correctly handle Nothing in: "+value_type.to_text+" between Nothing column and "+value_type.to_text <|
table = table_builder_typed [["x", [value]], ["y", [value]], ["n", [Nothing]]] value_type
co = table.at "x" . between (table.at "n") (table.at "y")
co.to_vector . should_equal [Nothing]
Test.specify "Correctly handle Nothing in: "+value_type.to_text+" between "+value_type.to_text+" and Nothing column" <|
group_builder.specify "Correctly handle Nothing in: "+value_type.to_text+" between "+value_type.to_text+" and Nothing column" <|
table = table_builder_typed [["x", [value]], ["y", [value]], ["n", [Nothing]]] value_type
co = table.at "x" . between (table.at "y") (table.at "n")
co.to_vector . should_equal [Nothing]
Test.specify "Correctly handle Nothing in: "+value_type.to_text+" between Nothing value and "+value_type.to_text <|
group_builder.specify "Correctly handle Nothing in: "+value_type.to_text+" between Nothing value and "+value_type.to_text <|
table = table_builder_typed [["x", [value]], ["y", [value]], ["n", [Nothing]]] value_type
co = table.at "x" . between Nothing (table.at "y")
co.to_vector . should_equal [Nothing]
Test.specify "Correctly handle Nothing in: "+value_type.to_text+" between "+value_type.to_text+" and Nothing value" <|
group_builder.specify "Correctly handle Nothing in: "+value_type.to_text+" between "+value_type.to_text+" and Nothing value" <|
table = table_builder_typed [["x", [value]], ["y", [value]], ["n", [Nothing]]] value_type
co = table.at "x" . between (table.at "y") Nothing
co.to_vector . should_equal [Nothing]
Test.group prefix+"is_nothing" <|
suite_builder.group prefix+"is_nothing" group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
table_builder_typed columns value_type =
cast_columns = columns.map c->
Column.from_vector (c.at 0) (c.at 1) . cast value_type
setup.table_builder cast_columns connection=data.connection
values_with_nothing.map triple->
value = triple.at 0
value_type = triple.at 2
Test.specify "Correctly handle Nothing in: "+value_type.to_text+".is_nothing" <|
group_builder.specify "Correctly handle Nothing in: "+value_type.to_text+".is_nothing" <|
table = table_builder_typed [["x", [value]]] value_type
co = table.at "x" . is_nothing
co.to_vector . should_equal [value == Nothing]
Test.group prefix+"not" <|
Test.specify "Correctly handle Nothing in .not" <|
suite_builder.group prefix+"not" group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
table_builder columns =
setup.table_builder columns connection=data.connection
group_builder.specify "Correctly handle Nothing in .not" <|
table = table_builder [["x", [True, False, Nothing]]]
table.at "x" . not . to_vector . should_equal [False, True, Nothing]
Test.group prefix+"is_in" <|
suite_builder.group prefix+"is_in" group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
table_builder_typed columns value_type =
cast_columns = columns.map c->
Column.from_vector (c.at 0) (c.at 1) . cast value_type
setup.table_builder cast_columns connection=data.connection
values_with_nothing.map triple->
value = triple.at 0
other_value = triple.at 1
value_type = triple.at 2
table = table_builder_typed [["x", [value, Nothing]], ["y", [other_value, Nothing]], ["z", [value, other_value]], ["n", [Nothing, Nothing]]] value_type
true_if_not_nothing = if value == Nothing then Nothing else True
Test.specify "Correctly handle Nothing in: "+value_type.to_text+".is_in Column (returning True)" <|
group_builder.specify "Correctly handle Nothing in: "+value_type.to_text+".is_in Column (returning True)" <|
table = table_builder_typed [["x", [value, Nothing]], ["y", [other_value, Nothing]], ["z", [value, other_value]], ["n", [Nothing, Nothing]]] value_type
table.at "x" . is_in (table.at "z") . to_vector . should_equal [true_if_not_nothing, Nothing]
Test.specify "Correctly handle Nothing in: "+value_type.to_text+".is_in Column with Nothings (returning True)" <|
group_builder.specify "Correctly handle Nothing in: "+value_type.to_text+".is_in Column with Nothings (returning True)" <|
table = table_builder_typed [["x", [value, Nothing]], ["y", [other_value, Nothing]], ["z", [value, other_value]], ["n", [Nothing, Nothing]]] value_type
table.at "x" . is_in (table.at "x") . to_vector . should_equal [true_if_not_nothing, Nothing]
Test.specify "Correctly handle Nothing in: "+value_type.to_text+".is_in Column with Nothings (returning Nothing)" <|
group_builder.specify "Correctly handle Nothing in: "+value_type.to_text+".is_in Column with Nothings (returning Nothing)" <|
table = table_builder_typed [["x", [value, Nothing]], ["y", [other_value, Nothing]], ["z", [value, other_value]], ["n", [Nothing, Nothing]]] value_type
table.at "x" . is_in (table.at "y") . to_vector . should_equal [Nothing, Nothing]
Test.specify "Correctly handle Nothing in: Nothing.is_in Column with Nothings (returning Nothing)" <|
group_builder.specify "Correctly handle Nothing in: Nothing.is_in Column with Nothings (returning Nothing)" <|
table = table_builder_typed [["x", [value, Nothing]], ["y", [other_value, Nothing]], ["z", [value, other_value]], ["n", [Nothing, Nothing]]] value_type
table.at "n" . is_in (table.at "x") . to_vector . should_equal [Nothing, Nothing]
Test.specify "Correctly handle Nothing in: "+value_type.to_text+".is_in Vector (returning True)" <|
group_builder.specify "Correctly handle Nothing in: "+value_type.to_text+".is_in Vector (returning True)" <|
table = table_builder_typed [["x", [value, Nothing]], ["y", [other_value, Nothing]], ["z", [value, other_value]], ["n", [Nothing, Nothing]]] value_type
table.at "x" . is_in (table.at "z" . to_vector) . to_vector . should_equal [true_if_not_nothing, Nothing]
Test.specify "Correctly handle Nothing in: "+value_type.to_text+".is_in Vector with Nothings (returning True)" <|
group_builder.specify "Correctly handle Nothing in: "+value_type.to_text+".is_in Vector with Nothings (returning True)" <|
table = table_builder_typed [["x", [value, Nothing]], ["y", [other_value, Nothing]], ["z", [value, other_value]], ["n", [Nothing, Nothing]]] value_type
table.at "x" . is_in (table.at "x" . to_vector) . to_vector . should_equal [true_if_not_nothing, Nothing]
Test.specify "Correctly handle Nothing in: "+value_type.to_text+".is_in Vector with Nothings (returning Nothing)" <|
group_builder.specify "Correctly handle Nothing in: "+value_type.to_text+".is_in Vector with Nothings (returning Nothing)" <|
table = table_builder_typed [["x", [value, Nothing]], ["y", [other_value, Nothing]], ["z", [value, other_value]], ["n", [Nothing, Nothing]]] value_type
table.at "x" . is_in (table.at "y" . to_vector) . to_vector . should_equal [Nothing, Nothing]
Test.specify "Correctly handle Nothing in: Nothing.is_in Vector with Nothings (returning Nothing)" <|
group_builder.specify "Correctly handle Nothing in: Nothing.is_in Vector with Nothings (returning Nothing)" <|
table = table_builder_typed [["x", [value, Nothing]], ["y", [other_value, Nothing]], ["z", [value, other_value]], ["n", [Nothing, Nothing]]] value_type
table.at "n" . is_in (table.at "x" . to_vector) . to_vector . should_equal [Nothing, Nothing]
Test.specify "Correctly handle Nothing in: "+value_type.to_text+".is_in empty Vector (returning False)" <|
group_builder.specify "Correctly handle Nothing in: "+value_type.to_text+".is_in empty Vector (returning False)" <|
table = table_builder_typed [["x", [value, Nothing]], ["y", [other_value, Nothing]], ["z", [value, other_value]], ["n", [Nothing, Nothing]]] value_type
table.at "x" . is_in [] . to_vector . should_equal [False, False]
Test.group prefix+"Boolean is_in" <|
suite_builder.group prefix+"Boolean is_in" group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
table_builder_typed columns value_type =
cast_columns = columns.map c->
Column.from_vector (c.at 0) (c.at 1) . cast value_type
setup.table_builder cast_columns connection=data.connection
make_containing_values had_null had_true had_false =
null_maybe = if had_null then [Nothing] else []
true_maybe = if had_true then [True] else []
@ -183,7 +257,7 @@ spec setup =
output = cs.at 4
containing_values = make_containing_values (cs.at 1) (cs.at 2) (cs.at 3)
Test.specify "Boolean is_in: (Vector), "+negation_desc+" "+cs.to_text <|
group_builder.specify "Boolean is_in: (Vector), "+negation_desc+" "+cs.to_text <|
input_column = transform_input [input]
t = table_builder_typed [["input", input_column]] Value_Type.Boolean
@ -191,7 +265,7 @@ spec setup =
c.to_vector . should_equal [output]
Test.specify "Boolean is_in: (Column), "+negation_desc+" "+cs.to_text <|
group_builder.specify "Boolean is_in: (Column), "+negation_desc+" "+cs.to_text <|
input_column = transform_input (Vector.fill containing_values.length input)
t = table_builder_typed [["input", input_column], ["containing", transform_argument containing_values]] Value_Type.Boolean
expected_output = if input_column.is_empty then [] else [output]
@ -201,26 +275,49 @@ spec setup =
c.to_vector . length . should_equal input_column.length
c.to_vector.distinct . should_equal expected_output
Test.group prefix+"distinct" <|
suite_builder.group prefix+"distinct" group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
table_builder columns =
setup.table_builder columns connection=data.connection
table_builder_typed columns value_type =
cast_columns = columns.map c->
Column.from_vector (c.at 0) (c.at 1) . cast value_type
setup.table_builder cast_columns connection=data.connection
values_without_nothing.map triple->
value = triple.at 0
other_value = triple.at 1
value_type = triple.at 2
Test.specify "Correctly handle Nothing in .distinct for "+value_type.to_text <|
group_builder.specify "Correctly handle Nothing in .distinct for "+value_type.to_text <|
table = table_builder [["x", [value, other_value, other_value, Nothing, value, Nothing]]]
t1 = table . distinct ["x"]
v = t1.at "x" . to_vector
v . length . should_equal 3
v . should_contain_the_same_elements_as [value, other_value, Nothing]
Test.specify "Correctly handle Nothing in .distinct for Nothing" <|
group_builder.specify "Correctly handle Nothing in .distinct for Nothing" <|
table = table_builder_typed [["x", [Nothing, Nothing, Nothing, Nothing, Nothing, Nothing]]] Value_Type.Char
t1 = table . distinct ["x"]
v = t1.at "x" . to_vector
v . should_equal [Nothing]
Test.group prefix+"order_by" <|
suite_builder.group prefix+"order_by" group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
table_builder_typed columns value_type =
cast_columns = columns.map c->
Column.from_vector (c.at 0) (c.at 1) . cast value_type
setup.table_builder cast_columns connection=data.connection
values_with_nothing.map triple->
value = triple.at 0
other_value = triple.at 1
@ -231,10 +328,10 @@ spec setup =
if is_comparable then
table = table_builder_typed [["x", [value, Nothing, other_value, other_value, Nothing, value, Nothing]]] value_type
Test.specify "Correctly handle Nothing in .order_by (asc) for "+value_type.to_text <|
group_builder.specify "Correctly handle Nothing in .order_by (asc) for "+value_type.to_text <|
t1 = table . order_by [Sort_Column.Name "x" Sort_Direction.Ascending]
t1.at "x" . to_vector . should_equal [Nothing, Nothing, Nothing, value, value, other_value, other_value]
Test.specify "Correctly handle Nothing in .order_by (desc) for "+value_type.to_text <|
group_builder.specify "Correctly handle Nothing in .order_by (desc) for "+value_type.to_text <|
t1 = table . order_by [Sort_Column.Name "x" Sort_Direction.Descending]
t1.at "x" . to_vector . should_equal [other_value, other_value, value, value, Nothing, Nothing, Nothing]

View File

@ -5,23 +5,21 @@ import Standard.Base.Errors.Common.Incomparable_Values
from Standard.Table import Sort_Column
from Standard.Table.Errors import all
from Standard.Test import Test, Problems
import Standard.Test.Extensions
from Standard.Test_New import all
from project.Common_Table_Operations.Util import run_default_backend
type My_Type
Foo x
main = run_default_backend add_specs
main = run_default_backend spec
type Data
Value ~data
spec setup =
prefix = setup.prefix
table_builder = setup.table_builder
test_selection = setup.test_selection
order_by_pending = if test_selection.order_by.not then
"ToDo: order_by is not yet supported by this backend."
Test.group prefix+"Table.order_by" pending=order_by_pending <|
connection self = self.data.at 0
table self = self.data.at 1
setup create_connection_fn table_builder = Data.Value <|
connection = create_connection_fn Nothing
mk_table =
col1 = ["alpha", [3, 2, 1, 0]]
col2 = ["beta", ["a", "b", "a", "b"]]
@ -33,30 +31,53 @@ spec setup =
col8 = ["phi", ["śc", Nothing, 's\u0301b', "śa"]]
col9 = ["tau", [32.0, 0.5, -0.1, 1.6]]
col10 = ["rho", ["BB", Nothing, Nothing, "B"]]
table_builder [col1, col2, col3, col4, col5, col6, col7, col8, col9, col10]
table = mk_table
table_builder [col1, col2, col3, col4, col5, col6, col7, col8, col9, col10] connection=connection
[connection, mk_table]
Test.specify "should work as shown in the doc examples" <|
t1 = table.order_by ["alpha"]
teardown self =
self.connection.close
type My_Type
Foo x
add_specs suite_builder setup =
prefix = setup.prefix
create_connection_fn = setup.create_connection_func
test_selection = setup.test_selection
order_by_pending = if test_selection.order_by.not then
"ToDo: order_by is not yet supported by this backend."
suite_builder.group prefix+"Table.order_by" pending=order_by_pending group_builder->
data = Data.setup create_connection_fn setup.table_builder
group_builder.teardown <|
data.teardown
table_builder cols =
setup.table_builder cols connection=data.connection
group_builder.specify "should work as shown in the doc examples" <|
t1 = data.table.order_by ["alpha"]
t1.at "alpha" . to_vector . should_equal [0, 1, 2, 3]
t1.at "gamma" . to_vector . should_equal [4, 3, 2, 1]
## Assumes stable sorting on database engine.
t2 = table.order_by [1, Sort_Column.Index -8 Sort_Direction.Descending]
t2 = data.table.order_by [1, Sort_Column.Index -8 Sort_Direction.Descending]
t2.at "beta" . to_vector . should_equal ["a", "a", "b", "b"]
t2.at "gamma" . to_vector . should_equal [3, 1, 4, 2]
t2.at "alpha" . to_vector . should_equal [1, 3, 0, 2]
t3 = table.order_by [Sort_Column.Select_By_Name "a.*" use_regex=True case_sensitivity=Case_Sensitivity.Insensitive]
t3 = data.table.order_by [Sort_Column.Select_By_Name "a.*" use_regex=True case_sensitivity=Case_Sensitivity.Insensitive]
t3.at "alpha" . to_vector . should_equal [0, 1, 2, 3]
Test.specify "should work with single column name" <|
t1 = table.order_by "alpha"
group_builder.specify "should work with single column name" <|
t1 = data.table.order_by "alpha"
t1.at "alpha" . to_vector . should_equal [0, 1, 2, 3]
t1.at "gamma" . to_vector . should_equal [4, 3, 2, 1]
Test.specify "should work with single Sort_Column" <|
t1 = table.order_by [Sort_Column.Name "alpha"]
group_builder.specify "should work with single Sort_Column" <|
t1 = data.table.order_by [Sort_Column.Name "alpha"]
t1.at "alpha" . to_vector . should_equal [0, 1, 2, 3]
t1.at "gamma" . to_vector . should_equal [4, 3, 2, 1]
@ -64,7 +85,7 @@ spec setup =
t2.at "alpha" . to_vector . should_equal [3, 2, 1, 0]
t2.at "gamma" . to_vector . should_equal [1, 2, 3, 4]
t3 = table.order_by [Sort_Column.Index 0]
t3 = data.table.order_by [Sort_Column.Index 0]
t3.at "alpha" . to_vector . should_equal [0, 1, 2, 3]
t3.at "gamma" . to_vector . should_equal [4, 3, 2, 1]
@ -72,89 +93,89 @@ spec setup =
t4.at "alpha" . to_vector . should_equal [3, 2, 1, 0]
t4.at "gamma" . to_vector . should_equal [1, 2, 3, 4]
Test.specify "should allow the selector to mix regex and case insensitive matching" <|
t4 = table.order_by [Sort_Column.Select_By_Name "A.*" use_regex=True case_sensitivity=Case_Sensitivity.Insensitive]
group_builder.specify "should allow the selector to mix regex and case insensitive matching" <|
t4 = data.table.order_by [Sort_Column.Select_By_Name "A.*" use_regex=True case_sensitivity=Case_Sensitivity.Insensitive]
t4.at "alpha" . to_vector . should_equal [0, 1, 2, 3]
Test.specify "should correctly handle regexes matching multiple names" <|
t1 = table.order_by [Sort_Column.Select_By_Name ".*ta" Sort_Direction.Descending use_regex=True]
group_builder.specify "should correctly handle regexes matching multiple names" <|
t1 = data.table.order_by [Sort_Column.Select_By_Name ".*ta" Sort_Direction.Descending use_regex=True]
t1.at "beta" . to_vector . should_equal ["b", "b", "a", "a"]
t1.at "delta" . to_vector . should_equal ["a1", "a03", "a2", "a10"]
t1.at "gamma" . to_vector . should_equal [2, 4, 3, 1]
Test.specify "should correctly handle problems: out of bounds indices" <|
group_builder.specify "should correctly handle problems: out of bounds indices" <|
selector = [0, 100, Sort_Column.Index -200, Sort_Column.Index 300]
expected_problem = Missing_Input_Columns.Error [100, -200, 300]
t1 = table.order_by selector
t1 = data.table.order_by selector
t1.should_fail_with Missing_Input_Columns
t1.catch . should_equal expected_problem
action = table.order_by selector error_on_missing_columns=False on_problems=_
action = data.table.order_by selector error_on_missing_columns=False on_problems=_
tester table =
table.at "alpha" . to_vector . should_equal [0, 1, 2, 3]
Problems.test_problem_handling action [expected_problem] tester
Test.specify "should correctly handle edge-cases: duplicate selectors" <|
group_builder.specify "should correctly handle edge-cases: duplicate selectors" <|
selector1 = ["alpha", Sort_Column.Name "alpha" Sort_Direction.Descending]
t1 = table.order_by selector1
t1 = data.table.order_by selector1
Problems.assume_no_problems t1
t1.at "alpha" . to_vector . should_equal [0, 1, 2, 3]
t1.at "gamma" . to_vector . should_equal [4, 3, 2, 1]
t2 = table.order_by [Sort_Column.Index 0, Sort_Column.Index 0 Sort_Direction.Descending]
t2 = data.table.order_by [Sort_Column.Index 0, Sort_Column.Index 0 Sort_Direction.Descending]
Problems.assume_no_problems t2
t2.at "alpha" . to_vector . should_equal [0, 1, 2, 3]
t2.at "gamma" . to_vector . should_equal [4, 3, 2, 1]
t3 = table.order_by [Sort_Column.Index 0, Sort_Column.Name "alpha" Sort_Direction.Descending]
t3 = data.table.order_by [Sort_Column.Index 0, Sort_Column.Name "alpha" Sort_Direction.Descending]
Problems.assume_no_problems t3
t3.at "alpha" . to_vector . should_equal [0, 1, 2, 3]
t3.at "gamma" . to_vector . should_equal [4, 3, 2, 1]
Test.specify "should correctly handle edge-cases: duplicate matches due to case insensitivity" <|
group_builder.specify "should correctly handle edge-cases: duplicate matches due to case insensitivity" <|
selector = [Sort_Column.Select_By_Name "ALPHA" case_sensitivity=Case_Sensitivity.Insensitive, Sort_Column.Select_By_Name "alpha" Sort_Direction.Descending]
t1 = table.order_by selector
t1 = data.table.order_by selector
Problems.assume_no_problems t1
t1.at "alpha" . to_vector . should_equal [0, 1, 2, 3]
t1.at "gamma" . to_vector . should_equal [4, 3, 2, 1]
Test.specify "should correctly handle edge-cases: duplicate matches due to regexes" <|
group_builder.specify "should correctly handle edge-cases: duplicate matches due to regexes" <|
selector = [Sort_Column.Select_By_Name "a.*" use_regex=True, Sort_Column.Select_By_Name "alpha" Sort_Direction.Descending]
t1 = table.order_by selector
t1 = data.table.order_by selector
Problems.assume_no_problems t1
t1.at "alpha" . to_vector . should_equal [0, 1, 2, 3]
t1.at "gamma" . to_vector . should_equal [4, 3, 2, 1]
Test.specify "should correctly handle edge-cases: mixed selector types" <|
t1 = table.order_by [Sort_Column.Name "alpha", Sort_Column.Index 1]
group_builder.specify "should correctly handle edge-cases: mixed selector types" <|
t1 = data.table.order_by [Sort_Column.Name "alpha", Sort_Column.Index 1]
t1.at "alpha" . to_vector . should_equal [0, 1, 2, 3]
t1.at "beta" . to_vector . should_equal ["b", "a", "b", "a"]
t1.at "gamma" . to_vector . should_equal [4, 3, 2, 1]
t2 = table.order_by [Sort_Column.Select_By_Name "a.*a" use_regex=True, Sort_Column.Index 1]
t2 = data.table.order_by [Sort_Column.Select_By_Name "a.*a" use_regex=True, Sort_Column.Index 1]
t2.at "alpha" . to_vector . should_equal [0, 1, 2, 3]
t2.at "beta" . to_vector . should_equal ["b", "a", "b", "a"]
t2.at "gamma" . to_vector . should_equal [4, 3, 2, 1]
Test.specify "should correctly handle problems: unmatched names" <|
group_builder.specify "should correctly handle problems: unmatched names" <|
weird_name = '.*?-!@#!"'
selector = [Sort_Column.Name "alpha", "hmm", Sort_Column.Name weird_name]
expected_problem = Missing_Input_Columns.Error ["hmm", weird_name]
t1 = table.order_by selector
t1 = data.table.order_by selector
t1.should_fail_with Missing_Input_Columns
t1.catch . should_equal expected_problem
action = table.order_by selector error_on_missing_columns=False on_problems=_
action = data.table.order_by selector error_on_missing_columns=False on_problems=_
tester table =
table.at "alpha" . to_vector . should_equal [0, 1, 2, 3]
Problems.test_problem_handling action [expected_problem] tester
Test.specify "should report a problem if no columns are selected for ordering" <|
t2 = table.order_by []
group_builder.specify "should report a problem if no columns are selected for ordering" <|
t2 = data.table.order_by []
t2.should_fail_with No_Input_Columns_Selected
Test.specify "should stack consecutive ordering operations" <|
t1 = table.order_by [Sort_Column.Name "alpha"]
group_builder.specify "should stack consecutive ordering operations" <|
t1 = data.table.order_by [Sort_Column.Name "alpha"]
t1.at "alpha" . to_vector . should_equal [0, 1, 2, 3]
t1.at "beta" . to_vector . should_equal ["b", "a", "b", "a"]
@ -172,51 +193,51 @@ spec setup =
t4.at "beta" . to_vector . should_equal ["a", "a", "b", "b"]
t4.at "alpha" . to_vector . should_equal [3, 1, 2, 0]
Test.specify "should give priority to the first selected column and use the next ones for breaking ties" <|
t1 = table.order_by ["beta", Sort_Column.Name "alpha" Sort_Direction.Ascending]
group_builder.specify "should give priority to the first selected column and use the next ones for breaking ties" <|
t1 = data.table.order_by ["beta", Sort_Column.Name "alpha" Sort_Direction.Ascending]
t1.at "beta" . to_vector . should_equal ["a", "a", "b", "b"]
t1.at "alpha" . to_vector . should_equal [1, 3, 0, 2]
t1.at "gamma" . to_vector . should_equal [3, 1, 4, 2]
t1a = table.order_by ["beta", Sort_Column.Name "alpha" Sort_Direction.Ascending]
t1a = data.table.order_by ["beta", Sort_Column.Name "alpha" Sort_Direction.Ascending]
t1a.at "beta" . to_vector . should_equal ["a", "a", "b", "b"]
t1a.at "alpha" . to_vector . should_equal [1, 3, 0, 2]
t1a.at "gamma" . to_vector . should_equal [3, 1, 4, 2]
t2 = table.order_by [Sort_Column.Name "beta", Sort_Column.Name "alpha" Sort_Direction.Descending]
t2 = data.table.order_by [Sort_Column.Name "beta", Sort_Column.Name "alpha" Sort_Direction.Descending]
t2.at "beta" . to_vector . should_equal ["a", "a", "b", "b"]
t2.at "alpha" . to_vector . should_equal [3, 1, 2, 0]
t2.at "gamma" . to_vector . should_equal [1, 3, 2, 4]
t3 = table.order_by [Sort_Column.Name "alpha", Sort_Column.Name "beta"]
t3 = data.table.order_by [Sort_Column.Name "alpha", Sort_Column.Name "beta"]
t3.at "alpha" . to_vector . should_equal [0, 1, 2, 3]
t3.at "beta" . to_vector . should_equal ["b", "a", "b", "a"]
t3.at "gamma" . to_vector . should_equal [4, 3, 2, 1]
t4 = table.order_by [Sort_Column.Index 1, Sort_Column.Index 0 Sort_Direction.Ascending]
t4 = data.table.order_by [Sort_Column.Index 1, Sort_Column.Index 0 Sort_Direction.Ascending]
t4.at "beta" . to_vector . should_equal ["a", "a", "b", "b"]
t4.at "alpha" . to_vector . should_equal [1, 3, 0, 2]
t4.at "gamma" . to_vector . should_equal [3, 1, 4, 2]
Test.specify "should deal with real numbers, and not warn when ordering by floats" <|
t1 = table.order_by ["tau"]
group_builder.specify "should deal with real numbers, and not warn when ordering by floats" <|
t1 = data.table.order_by ["tau"]
t1.at "tau" . to_vector . should_equal [-0.1, 0.5, 1.6, 32.0]
t1.at "alpha" . to_vector . should_equal [1, 2, 0, 3]
Problems.assume_no_problems t1
Test.specify "should deal with nulls" <|
t1 = table.order_by ["xi"]
group_builder.specify "should deal with nulls" <|
t1 = data.table.order_by ["xi"]
t1.at "xi" . to_vector . should_equal [Nothing, 0.5, 1.0, 1.5]
t1.at "alpha" . to_vector . should_equal [1, 0, 3, 2]
t2 = table.order_by [Sort_Column.Name "rho"]
t2 = data.table.order_by [Sort_Column.Name "rho"]
t2.at "rho" . to_vector . should_equal [Nothing, Nothing, "B", "BB"]
t3 = table.order_by [Sort_Column.Name "rho" Sort_Direction.Descending]
t3 = data.table.order_by [Sort_Column.Name "rho" Sort_Direction.Descending]
t3.at "rho" . to_vector . should_equal ["BB", "B", Nothing, Nothing]
Test.specify "should behave as expected with Unicode normalization, depending on the defaults settings" <|
t1 = table.order_by [Sort_Column.Name "phi"]
group_builder.specify "should behave as expected with Unicode normalization, depending on the defaults settings" <|
t1 = data.table.order_by [Sort_Column.Name "phi"]
case test_selection.order_by_unicode_normalization_by_default of
True ->
t1.at "phi" . to_vector . should_equal [Nothing, "śa", 's\u0301b', "śc"]
@ -225,55 +246,55 @@ spec setup =
t1.at "phi" . to_vector . should_equal [Nothing, 's\u0301b', "śa", "śc"]
t1.at "alpha" . to_vector . should_equal [2, 1, 0, 3]
Test.specify "should support natural ordering" pending=(if test_selection.natural_ordering.not then "Natural ordering is not supported.") <|
t1 = table.order_by [Sort_Column.Name "delta"] text_ordering=(Text_Ordering.Default sort_digits_as_numbers=True)
group_builder.specify "should support natural ordering" pending=(if test_selection.natural_ordering.not then "Natural ordering is not supported.") <|
t1 = data.table.order_by [Sort_Column.Name "delta"] text_ordering=(Text_Ordering.Default sort_digits_as_numbers=True)
t1.at "delta" . to_vector . should_equal ["a1", "a2", "a03", "a10"]
t1.at "alpha" . to_vector . should_equal [2, 1, 0, 3]
t2 = table.order_by ["delta"] text_ordering=(Text_Ordering.Default sort_digits_as_numbers=False)
t2 = data.table.order_by ["delta"] text_ordering=(Text_Ordering.Default sort_digits_as_numbers=False)
t2.at "delta" . to_vector . should_equal ["a03", "a1", "a10", "a2"]
t2.at "alpha" . to_vector . should_equal [0, 2, 3, 1]
Test.specify "should support case insensitive ordering" pending=(if test_selection.case_insensitive_ordering.not then "Case insensitive ordering is not supported.") <|
t1 = table.order_by [Sort_Column.Name "eta"] text_ordering=(Text_Ordering.Case_Insensitive)
group_builder.specify "should support case insensitive ordering" pending=(if test_selection.case_insensitive_ordering.not then "Case insensitive ordering is not supported.") <|
t1 = data.table.order_by [Sort_Column.Name "eta"] text_ordering=(Text_Ordering.Case_Insensitive)
expected = case test_selection.case_insensitive_ascii_only of
True -> ["Aleph", "alpha", "Beta", "bądź"]
False -> ["Aleph", "alpha", "bądź", "Beta"]
t1.at "eta" . to_vector . should_equal expected
t2 = table.order_by [Sort_Column.Name "eta"] text_ordering=(Text_Ordering.Case_Sensitive)
t2 = data.table.order_by [Sort_Column.Name "eta"] text_ordering=(Text_Ordering.Case_Sensitive)
t2.at "eta" . to_vector . should_equal ["Aleph", "Beta", "alpha", "bądź"]
t3 = table.order_by [Sort_Column.Name "psi"] text_ordering=(Text_Ordering.Case_Insensitive)
t3 = data.table.order_by [Sort_Column.Name "psi"] text_ordering=(Text_Ordering.Case_Insensitive)
t3.at "psi" . to_vector . should_equal [Nothing, "c01", "c10", "C2"]
t4 = table.order_by [Sort_Column.Name "psi" Sort_Direction.Descending] text_ordering=(Text_Ordering.Case_Sensitive)
t4 = data.table.order_by [Sort_Column.Name "psi" Sort_Direction.Descending] text_ordering=(Text_Ordering.Case_Sensitive)
t4.at "psi" . to_vector . should_equal ["c10", "c01", "C2", Nothing]
Test.specify "should support natural and case insensitive ordering at the same time" pending=(if (test_selection.natural_ordering.not || test_selection.case_insensitive_ordering.not) then "Natural ordering or case sensitive ordering is not supported.") <|
t1 = table.order_by [Sort_Column.Name "psi"] text_ordering=(Text_Ordering.Case_Insensitive sort_digits_as_numbers=True)
group_builder.specify "should support natural and case insensitive ordering at the same time" pending=(if (test_selection.natural_ordering.not || test_selection.case_insensitive_ordering.not) then "Natural ordering or case sensitive ordering is not supported.") <|
t1 = data.table.order_by [Sort_Column.Name "psi"] text_ordering=(Text_Ordering.Case_Insensitive sort_digits_as_numbers=True)
t1.at "psi" . to_vector . should_equal [Nothing, "c01", "C2", "c10"]
t2 = table.order_by [Sort_Column.Name "psi"] text_ordering=(Text_Ordering.Default sort_digits_as_numbers=True)
t2 = data.table.order_by [Sort_Column.Name "psi"] text_ordering=(Text_Ordering.Default sort_digits_as_numbers=True)
t2.at "psi" . to_vector . should_equal [Nothing, "C2", "c01", "c10"]
t3 = table.order_by [Sort_Column.Name "psi"] text_ordering=(Text_Ordering.Case_Insensitive)
t3 = data.table.order_by [Sort_Column.Name "psi"] text_ordering=(Text_Ordering.Case_Insensitive)
t3.at "psi" . to_vector . should_equal [Nothing, "c01", "c10", "C2"]
t4 = table.order_by [Sort_Column.Name "psi"]
t4 = data.table.order_by [Sort_Column.Name "psi"]
t4.at "psi" . to_vector . should_equal [Nothing, "C2", "c01", "c10"]
Test.specify "text ordering settings should not affect numeric columns" <|
group_builder.specify "text ordering settings should not affect numeric columns" <|
ordering = Text_Ordering.Case_Insensitive sort_digits_as_numbers=True
t1 = table.order_by [Sort_Column.Name "alpha"] text_ordering=ordering
t1 = data.table.order_by [Sort_Column.Name "alpha"] text_ordering=ordering
t1.at "alpha" . to_vector . should_equal [0, 1, 2, 3]
t1.at "gamma" . to_vector . should_equal [4, 3, 2, 1]
if setup.is_database.not then
Test.specify "should allow ordering enso objects with a comparator" <|
group_builder.specify "should allow ordering enso objects with a comparator" <|
t = table_builder [["X", [Day_Of_Week.Friday, Day_Of_Week.Monday, Nothing, Nothing, Day_Of_Week.Wednesday]]]
t.order_by "X" . at "X" . to_vector . should_equal [Nothing, Nothing, Day_Of_Week.Monday, Day_Of_Week.Wednesday, Day_Of_Week.Friday]
Test.specify "should raise Incomparable_Values if ordering by incomparable values" <|
group_builder.specify "should raise Incomparable_Values if ordering by incomparable values" <|
t = table_builder [["X", [My_Type.Foo 42, My_Type.Foo "a"]]]
t.order_by "X" . should_fail_with Incomparable_Values

View File

@ -3,17 +3,21 @@ from Standard.Base import all
from Standard.Table import Position
from Standard.Table.Errors import all
from Standard.Test import Test, Problems
import Standard.Test.Extensions
from Standard.Test_New import all
from project.Common_Table_Operations.Util import expect_column_names, run_default_backend
main = run_default_backend spec
main = run_default_backend add_specs
spec setup =
prefix = setup.prefix
table_builder = setup.table_builder
test_selection = setup.test_selection
type Select_Columns_Data
Value ~data
connection self = self.data.at 0
table self = self.data.at 1
setup create_connection_fn table_builder = Select_Columns_Data.Value <|
connection = create_connection_fn Nothing
table =
col1 = ["foo", [1,2,3]]
col2 = ["bar", [4,5,6]]
@ -22,276 +26,20 @@ spec setup =
col5 = ["foo 2", [13,14,15]]
col6 = ["ab.+123", [16,17,18]]
col7 = ["abcd123", [19,20,21]]
table_builder [col1, col2, col3, col4, col5, col6, col7]
table_builder [col1, col2, col3, col4, col5, col6, col7] connection=connection
[connection, table]
Test.group prefix+"Table.select_columns" <|
Test.specify "should work as shown in the doc examples" <|
expect_column_names ["foo", "bar"] <| table.select_columns ["bar", "foo"]
expect_column_names ["bar", "Baz", "foo 1", "foo 2"] <| table.select_columns ["foo.+".to_regex, "b.*".to_regex True]
expect_column_names ["abcd123", "foo", "bar"] <| table.select_columns [-1, 0, 1] reorder=True
teardown self =
self.connection.close
Test.specify "should allow to reorder columns if asked to" <|
table_2 = table.select_columns ["bar", "foo"] reorder=True
expect_column_names ["bar", "foo"] table_2
table_2 . at "bar" . to_vector . should_equal [4,5,6]
table_2 . at "foo" . to_vector . should_equal [1,2,3]
type Sort_Columns_Data
Value ~data
Test.specify "should correctly handle regex matching" <|
expect_column_names ["foo"] <| table.select_columns ["foo".to_regex]
expect_column_names ["ab.+123", "abcd123"] <| table.select_columns ["a.*".to_regex]
expect_column_names ["ab.+123", "abcd123"] <| table.select_columns ["ab.+123".to_regex]
expect_column_names ["ab.+123"] <| table.select_columns ["ab.+123"]
expect_column_names ["abcd123"] <| table.select_columns ["abcd123".to_regex]
connection self = self.data.at 0
table self = self.data.at 1
Test.specify "should allow negative indices" <|
expect_column_names ["foo", "bar", "foo 2"] <| table.select_columns [-3, 0, 1]
Test.specify "should allow mixed names and indexes" <|
expect_column_names ["foo", "bar", "foo 2"] <| table.select_columns [-3, "bar", 0]
expect_column_names ["foo 2", "bar", "foo"] <| table.select_columns [-3, "bar", 0] reorder=True
expect_column_names ["foo", "bar", "foo 1", "foo 2", "abcd123"] <| table.select_columns [-1, "bar", "foo.*".to_regex]
expect_column_names ["foo", "foo 1", "foo 2", "bar", "abcd123"] <| table.select_columns ["foo.*".to_regex, "bar", "foo", -1] reorder=True
if test_selection.supports_case_sensitive_columns then
Test.specify "should correctly handle exact matches matching multiple names due to case insensitivity" <|
table =
col1 = ["foo", [1,2,3]]
col2 = ["bar", [4,5,6]]
col3 = ["Bar", [7,8,9]]
table_builder [col1, col2, col3]
expect_column_names ["bar", "Bar"] <| table.select_columns ["bar"] case_sensitivity=Case_Sensitivity.Insensitive
Test.specify "should correctly handle regexes matching multiple names" <|
expect_column_names ["foo", "bar", "foo 1", "foo 2"] <| table.select_columns ["b.*".to_regex, "f.+".to_regex]
expect_column_names ["bar", "foo", "foo 1", "foo 2"] <| table.select_columns ["b.*".to_regex, "f.+".to_regex] reorder=True
Test.specify "should correctly handle problems: out of bounds indices" <|
selector = [1, 0, 100, -200, 300]
action = table.select_columns selector error_on_missing_columns=False on_problems=_
tester = expect_column_names ["foo", "bar"]
problems = [Missing_Input_Columns.Error [100, -200, 300]]
Problems.test_problem_handling action problems tester
err = table.select_columns selector
err.should_fail_with Missing_Input_Columns
Test.specify "should correctly handle edge-cases: duplicate indices" <|
selector = [0, 0, 0]
t = table.select_columns selector on_problems=Problem_Behavior.Report_Error
expect_column_names ["foo"] t
expect_column_names ["foo", "bar"] <|
table.select_columns [0, 1, 0]
Test.specify "should correctly handle edge-cases: aliased indices" <|
selector = [0, -6, 1, -7]
t = table.select_columns selector on_problems=Problem_Behavior.Report_Error
expect_column_names ["foo", "bar"] t
Test.specify "should correctly handle edge-cases: duplicate names" <|
selector = ["foo", "foo"]
t = table.select_columns selector on_problems=Problem_Behavior.Report_Error
expect_column_names ["foo"] t
expect_column_names ["foo", "bar"] <|
table.select_columns ["foo", "bar", "foo", "foo", "bar"] reorder=True
expect_column_names ["bar", "foo"] <|
table.select_columns ["bar", "foo", "bar", "foo", "foo", "bar"] reorder=True
expect_column_names ["foo", "bar"] <|
table.select_columns ["bar", "foo", "foo", "bar"] reorder=False
Test.specify "should correctly handle edge-cases: duplicate matches due to case insensitivity" <|
selector = ["FOO", "foo"]
t = table.select_columns selector case_sensitivity=Case_Sensitivity.Insensitive on_problems=Problem_Behavior.Report_Error
expect_column_names ["foo"] t
expect_column_names ["bar", "foo"] <|
table.select_columns ["BAR", "foo", "bar"] reorder=True case_sensitivity=Case_Sensitivity.Insensitive
Test.specify "should correctly handle problems: unmatched names" <|
weird_name = '.*?-!@#!"'
selector = ["foo", "hmm", weird_name]
action = table.select_columns selector error_on_missing_columns=False on_problems=_
tester = expect_column_names ["foo"]
problems = [Missing_Input_Columns.Error ["hmm", weird_name]]
Problems.test_problem_handling action problems tester
err = table.select_columns selector on_problems=Problem_Behavior.Ignore
err.should_fail_with Missing_Input_Columns
err.catch.criteria . should_equal ["hmm", weird_name]
Test.specify "should correctly handle problems in mixed case" <|
err = table.select_columns ["foo", "hmm", 99] on_problems=Problem_Behavior.Ignore
err.should_fail_with Missing_Input_Columns
err.catch.criteria . should_equal ["hmm", 99]
Test.specify "should correctly handle problems: no columns in the output" <|
[Problem_Behavior.Ignore, Problem_Behavior.Report_Warning, Problem_Behavior.Report_Error].each pb->
t = table.select_columns [] on_problems=pb
t.should_fail_with No_Output_Columns
# Just selecting [] means the No_Output_Columns does not have an additional cause.
t.catch.cause . should_equal Nothing
t.catch.to_display_text . should_equal "The result would contain no columns."
table.select_columns ["hmmm"] . should_fail_with Missing_Input_Columns
r2 = table.select_columns ["hmmm"] error_on_missing_columns=False
r2.should_fail_with No_Output_Columns
r2.catch.cause . should_be_a Missing_Input_Columns
r2.catch.to_display_text . should_equal "No columns in the result, because of another problem: The criteria 'hmmm' did not match any columns."
Test.group prefix+"Table.remove_columns" <|
Test.specify "should work as shown in the doc examples" <|
expect_column_names ["Baz", "foo 1", "foo 2", "ab.+123", "abcd123"] <| table.remove_columns ["bar", "foo"]
expect_column_names ["foo", "ab.+123", "abcd123"] <| table.remove_columns ["foo.+".to_regex, "b.*".to_regex] Case_Sensitivity.Insensitive
expect_column_names ["Baz", "foo 1", "foo 2", "ab.+123"] <| table.remove_columns [-1, 0, 1]
Test.specify "should correctly handle regex matching" <|
last_ones = table.columns.drop 1 . map .name
expect_column_names last_ones <| table.remove_columns ["foo".to_regex]
first_ones = ["foo", "bar", "Baz", "foo 1", "foo 2"]
expect_column_names first_ones <| table.remove_columns ["a.*".to_regex]
expect_column_names first_ones <| table.remove_columns ["ab.+123".to_regex]
expect_column_names first_ones+["abcd123"] <| table.remove_columns ["ab.+123"] Case_Sensitivity.Insensitive
expect_column_names first_ones+["ab.+123"] <| table.remove_columns ["abcd123".to_regex]
Test.specify "should allow negative indices" <|
expect_column_names ["Baz", "foo 1", "ab.+123"] <| table.remove_columns [-1, -3, 0, 1]
if test_selection.supports_case_sensitive_columns then
Test.specify "should correctly handle exact matches matching multiple names due to case insensitivity" <|
table =
col1 = ["foo", [1,2,3]]
col2 = ["bar", [4,5,6]]
col3 = ["Bar", [7,8,9]]
table_builder [col1, col2, col3]
expect_column_names ["foo"] <| table.remove_columns "bar" Case_Sensitivity.Insensitive
Test.specify "should correctly handle regexes matching multiple names" <|
expect_column_names ["Baz", "ab.+123", "abcd123"] <| table.remove_columns ["f.+".to_regex, "b.*".to_regex]
Test.specify "should correctly handle problems: out of bounds indices" <|
selector = [1, 0, 100, -200, 300]
action = table.remove_columns selector on_problems=_
tester = expect_column_names ["Baz", "foo 1", "foo 2", "ab.+123", "abcd123"]
problems = [Missing_Input_Columns.Error [100, -200, 300]]
Problems.test_problem_handling action problems tester
err = table.remove_columns selector error_on_missing_columns=True
err.should_fail_with Missing_Input_Columns
Test.specify "should correctly handle edge-cases: duplicate indices" <|
selector = [0, 0, 0]
t = table.remove_columns selector on_problems=Problem_Behavior.Report_Error
expect_column_names ["bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123"] t
Test.specify "should correctly handle edge-cases: aliased indices" <|
selector = [0, -7, -6, 1]
t = table.remove_columns selector on_problems=Problem_Behavior.Report_Error
expect_column_names ["Baz", "foo 1", "foo 2", "ab.+123", "abcd123"] t
Test.specify "should correctly handle edge-cases: duplicate names" <|
selector = ["foo", "foo"]
t = table.remove_columns selector on_problems=Problem_Behavior.Report_Error
expect_column_names ["bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123"] t
Test.specify "should correctly handle edge-cases: duplicate matches due to case insensitivity" <|
selector = ["FOO", "foo"]
t = table.remove_columns selector Case_Sensitivity.Insensitive on_problems=Problem_Behavior.Report_Error
expect_column_names ["bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123"] t
Test.specify "should correctly handle problems: unmatched names" <|
weird_name = '.*?-!@#!"'
selector = ["foo", "hmm", weird_name]
action = table.remove_columns selector on_problems=_
tester = expect_column_names ["bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123"]
problems = [Missing_Input_Columns.Error ["hmm", weird_name]]
Problems.test_problem_handling action problems tester
err = table.remove_columns selector error_on_missing_columns=True on_problems=Problem_Behavior.Ignore
err.should_fail_with Missing_Input_Columns
Test.specify "should correctly handle problems: no columns in the output" <|
[Problem_Behavior.Ignore, Problem_Behavior.Report_Warning, Problem_Behavior.Report_Error].each pb->
selector = [".*".to_regex]
t = table.remove_columns selector on_problems=pb
t.should_fail_with No_Output_Columns
selector_2 = [".*".to_regex, "hmmm".to_regex]
t1 = table.remove_columns selector_2
t1.should_fail_with No_Output_Columns
# No cause specified - even if some criteria were unmatched, that is not the reason for the No_Output_Columns (the reason is all other columns got deleted, by other criteria that _did_ match).
t1.catch.cause . should_equal Nothing
Test.group prefix+"Table.reorder_columns" <|
Test.specify "should work as shown in the doc examples" <|
expect_column_names ["bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123", "foo"] <| table.reorder_columns "foo" Position.After_Other_Columns
expect_column_names ["Baz", "foo 1", "foo 2", "ab.+123", "abcd123", "foo", "bar"] <| table.reorder_columns ["foo", "bar"] Position.After_Other_Columns
expect_column_names ["foo 1", "foo 2", "bar", "Baz", "foo", "ab.+123", "abcd123"] <| table.reorder_columns ["foo.+".to_regex, "b.*".to_regex] case_sensitivity=Case_Sensitivity.Insensitive
expect_column_names ["bar", "foo", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123"] <| table.reorder_columns [1, 0] Position.Before_Other_Columns
expect_column_names ["bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123", "foo"] <| table.reorder_columns [0] Position.After_Other_Columns
Test.specify "should correctly handle regex matching" <|
expect_column_names ["bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123", "foo"] <| table.reorder_columns ["foo".to_regex] Position.After_Other_Columns
rest = ["foo", "bar", "Baz", "foo 1", "foo 2"]
expect_column_names ["ab.+123", "abcd123"]+rest <| table.reorder_columns ["a.*".to_regex]
expect_column_names ["ab.+123", "abcd123"]+rest <| table.reorder_columns ["ab.+123".to_regex]
expect_column_names ["ab.+123"]+rest+["abcd123"] <| table.reorder_columns ["ab.+123"]
expect_column_names ["abcd123"]+rest+["ab.+123"] <| table.reorder_columns ["abcd123".to_regex]
Test.specify "should allow negative indices" <|
expect_column_names ["abcd123", "foo 2", "foo", "bar", "Baz", "foo 1", "ab.+123"] <| table.reorder_columns [-1, -3, 0, 1]
if test_selection.supports_case_sensitive_columns then
Test.specify "should correctly handle exact matches matching multiple names due to case insensitivity" <|
table =
col1 = ["foo", [1,2,3]]
col2 = ["bar", [4,5,6]]
col3 = ["Bar", [7,8,9]]
table_builder [col1, col2, col3]
expect_column_names ["bar", "Bar", "foo"] <| table.reorder_columns ["bar"] case_sensitivity=Case_Sensitivity.Insensitive
Test.specify "should correctly handle regexes matching multiple names" <|
expect_column_names ["bar", "foo", "foo 1", "foo 2", "Baz", "ab.+123", "abcd123"] <| table.reorder_columns ["b.*".to_regex, "f.+".to_regex]
Test.specify "should correctly handle problems: out of bounds indices" <|
selector = [1, 0, 100, -200, 300]
action = table.reorder_columns selector on_problems=_
tester = expect_column_names ["bar", "foo", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123"]
problems = [Missing_Input_Columns.Error [100, -200, 300]]
Problems.test_problem_handling action problems tester
err = table.reorder_columns selector error_on_missing_columns=True
err.should_fail_with Missing_Input_Columns
Test.specify "should correctly handle edge-cases: duplicate indices" <|
selector = [0, 0, 0]
t = table.reorder_columns selector Position.After_Other_Columns on_problems=Problem_Behavior.Report_Error
expect_column_names ["bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123", "foo"] t
Test.specify "should correctly handle edge-cases: aliased indices" <|
selector = [0, -7, -6, 1]
t = table.reorder_columns selector Position.After_Other_Columns on_problems=Problem_Behavior.Report_Error
expect_column_names ["Baz", "foo 1", "foo 2", "ab.+123", "abcd123", "foo", "bar"] t
Test.specify "should correctly handle edge-cases: duplicate names" <|
selector = ["foo", "foo"]
t = table.reorder_columns selector Position.After_Other_Columns on_problems=Problem_Behavior.Report_Error
expect_column_names ["bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123", "foo"] t
Test.specify "should correctly handle problems: unmatched names" <|
weird_name = '.*?-!@#!"'
selector = ["foo", "hmm", weird_name]
action = table.reorder_columns selector Position.After_Other_Columns on_problems=_
tester = expect_column_names ["bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123", "foo"]
problems = [Missing_Input_Columns.Error ["hmm", weird_name]]
Problems.test_problem_handling action problems tester
err = table.reorder_columns selector Position.After_Other_Columns error_on_missing_columns=True
err.should_fail_with Missing_Input_Columns
Test.group prefix+"Table.sort_columns" <|
setup create_connection_fn table_builder = Sort_Columns_Data.Value <|
connection = create_connection_fn Nothing
table =
col1 = ["foo 21", [1,2,3]]
col2 = ["foo 100", [4,5,6]]
@ -300,128 +48,445 @@ spec setup =
col5 = ["foo 3", [13,14,15]]
col6 = ["foo 001", [16,17,18]]
col7 = ["bar", [19,20,21]]
table_builder [col1, col2, col3, col4, col5, col6, col7]
table_builder [col1, col2, col3, col4, col5, col6, col7] connection=connection
[connection, table]
Test.specify "should work as shown in the doc examples" <|
sorted = table.sort_columns
expect_column_names ["Foo 2", "bar", "foo 001", "foo 1", "foo 100", "foo 21", "foo 3"] sorted
sorted.columns.first.to_vector . should_equal [10,11,12]
teardown self =
self.connection.close
expect_column_names ["bar", "foo 001", "foo 1", "Foo 2", "foo 3", "foo 21", "foo 100"] <| table.sort_columns text_ordering=(Text_Ordering.Case_Insensitive sort_digits_as_numbers=True)
expect_column_names ["foo 3", "foo 21", "foo 100", "foo 1", "foo 001", "bar", "Foo 2"] <| table.sort_columns Sort_Direction.Descending
Test.specify "should correctly handle case-insensitive sorting" <|
expect_column_names ["bar", "foo 001", "foo 1", "foo 100", "Foo 2", "foo 21", "foo 3"] <| table.sort_columns text_ordering=(Text_Ordering.Case_Insensitive)
type Rename_Columns_Data
Value ~data
Test.specify "should correctly handle natural order sorting" <|
expect_column_names ["Foo 2", "bar", "foo 001", "foo 1", "foo 3", "foo 21", "foo 100"] <| table.sort_columns text_ordering=(Text_Ordering.Default sort_digits_as_numbers=True)
connection self = self.data.at 0
table self = self.data.at 1
Test.specify "should correctly handle various combinations of options" <|
expect_column_names ["foo 100", "foo 21", "foo 3", "Foo 2", "foo 1", "foo 001", "bar"] <| table.sort_columns Sort_Direction.Descending text_ordering=(Text_Ordering.Case_Insensitive sort_digits_as_numbers=True)
Test.group prefix+"Table.rename_columns" <|
setup create_connection_fn table_builder = Rename_Columns_Data.Value <|
connection = create_connection_fn Nothing
table =
col1 = ["alpha", [1,2,3]]
col2 = ["beta", [4,5,6]]
col3 = ["gamma", [16,17,18]]
col4 = ["delta", [19,20,21]]
table_builder [col1, col2, col3, col4]
table_builder [col1, col2, col3, col4] connection=connection
[connection, table]
Test.specify "should work as shown in the doc examples" <|
teardown self =
self.connection.close
add_specs suite_builder setup =
prefix = setup.prefix
table_builder = setup.table_builder
create_connection_fn = setup.create_connection_func
test_selection = setup.test_selection
suite_builder.group prefix+"Table.select_columns" group_builder->
data = Select_Columns_Data.setup create_connection_fn table_builder
group_builder.teardown <|
data.teardown
group_builder.specify "should work as shown in the doc examples" <|
expect_column_names ["foo", "bar"] <| data.table.select_columns ["bar", "foo"]
expect_column_names ["bar", "Baz", "foo 1", "foo 2"] <| data.table.select_columns ["foo.+".to_regex, "b.*".to_regex True]
expect_column_names ["abcd123", "foo", "bar"] <| data.table.select_columns [-1, 0, 1] reorder=True
group_builder.specify "should allow to reorder columns if asked to" <|
table_2 = data.table.select_columns ["bar", "foo"] reorder=True
expect_column_names ["bar", "foo"] table_2
table_2 . at "bar" . to_vector . should_equal [4,5,6]
table_2 . at "foo" . to_vector . should_equal [1,2,3]
group_builder.specify "should correctly handle regex matching" <|
expect_column_names ["foo"] <| data.table.select_columns ["foo".to_regex]
expect_column_names ["ab.+123", "abcd123"] <| data.table.select_columns ["a.*".to_regex]
expect_column_names ["ab.+123", "abcd123"] <| data.table.select_columns ["ab.+123".to_regex]
expect_column_names ["ab.+123"] <| data.table.select_columns ["ab.+123"]
expect_column_names ["abcd123"] <| data.table.select_columns ["abcd123".to_regex]
group_builder.specify "should allow negative indices" <|
expect_column_names ["foo", "bar", "foo 2"] <| data.table.select_columns [-3, 0, 1]
group_builder.specify "should allow mixed names and indexes" <|
expect_column_names ["foo", "bar", "foo 2"] <| data.table.select_columns [-3, "bar", 0]
expect_column_names ["foo 2", "bar", "foo"] <| data.table.select_columns [-3, "bar", 0] reorder=True
expect_column_names ["foo", "bar", "foo 1", "foo 2", "abcd123"] <| data.table.select_columns [-1, "bar", "foo.*".to_regex]
expect_column_names ["foo", "foo 1", "foo 2", "bar", "abcd123"] <| data.table.select_columns ["foo.*".to_regex, "bar", "foo", -1] reorder=True
if test_selection.supports_case_sensitive_columns then
group_builder.specify "should correctly handle exact matches matching multiple names due to case insensitivity" <|
table =
col1 = ["foo", [1,2,3]]
col2 = ["bar", [4,5,6]]
col3 = ["Bar", [7,8,9]]
table_builder [col1, col2, col3]
expect_column_names ["bar", "Bar"] <| table.select_columns ["bar"] case_sensitivity=Case_Sensitivity.Insensitive
group_builder.specify "should correctly handle regexes matching multiple names" <|
expect_column_names ["foo", "bar", "foo 1", "foo 2"] <| data.table.select_columns ["b.*".to_regex, "f.+".to_regex]
expect_column_names ["bar", "foo", "foo 1", "foo 2"] <| data.table.select_columns ["b.*".to_regex, "f.+".to_regex] reorder=True
group_builder.specify "should correctly handle problems: out of bounds indices" <|
selector = [1, 0, 100, -200, 300]
action = data.table.select_columns selector error_on_missing_columns=False on_problems=_
tester = expect_column_names ["foo", "bar"]
problems = [Missing_Input_Columns.Error [100, -200, 300]]
Problems.test_problem_handling action problems tester
err = data.table.select_columns selector
err.should_fail_with Missing_Input_Columns
group_builder.specify "should correctly handle edge-cases: duplicate indices" <|
selector = [0, 0, 0]
t = data.table.select_columns selector on_problems=Problem_Behavior.Report_Error
expect_column_names ["foo"] t
expect_column_names ["foo", "bar"] <|
data.table.select_columns [0, 1, 0]
group_builder.specify "should correctly handle edge-cases: aliased indices" <|
selector = [0, -6, 1, -7]
t = data.table.select_columns selector on_problems=Problem_Behavior.Report_Error
expect_column_names ["foo", "bar"] t
group_builder.specify "should correctly handle edge-cases: duplicate names" <|
selector = ["foo", "foo"]
t = data.table.select_columns selector on_problems=Problem_Behavior.Report_Error
expect_column_names ["foo"] t
expect_column_names ["foo", "bar"] <|
data.table.select_columns ["foo", "bar", "foo", "foo", "bar"] reorder=True
expect_column_names ["bar", "foo"] <|
data.table.select_columns ["bar", "foo", "bar", "foo", "foo", "bar"] reorder=True
expect_column_names ["foo", "bar"] <|
data.table.select_columns ["bar", "foo", "foo", "bar"] reorder=False
group_builder.specify "should correctly handle edge-cases: duplicate matches due to case insensitivity" <|
selector = ["FOO", "foo"]
t = data.table.select_columns selector case_sensitivity=Case_Sensitivity.Insensitive on_problems=Problem_Behavior.Report_Error
expect_column_names ["foo"] t
expect_column_names ["bar", "foo"] <|
data.table.select_columns ["BAR", "foo", "bar"] reorder=True case_sensitivity=Case_Sensitivity.Insensitive
group_builder.specify "should correctly handle problems: unmatched names" <|
weird_name = '.*?-!@#!"'
selector = ["foo", "hmm", weird_name]
action = data.table.select_columns selector error_on_missing_columns=False on_problems=_
tester = expect_column_names ["foo"]
problems = [Missing_Input_Columns.Error ["hmm", weird_name]]
Problems.test_problem_handling action problems tester
err = data.table.select_columns selector on_problems=Problem_Behavior.Ignore
err.should_fail_with Missing_Input_Columns
err.catch.criteria . should_equal ["hmm", weird_name]
group_builder.specify "should correctly handle problems in mixed case" <|
err = data.table.select_columns ["foo", "hmm", 99] on_problems=Problem_Behavior.Ignore
err.should_fail_with Missing_Input_Columns
err.catch.criteria . should_equal ["hmm", 99]
group_builder.specify "should correctly handle problems: no columns in the output" <|
[Problem_Behavior.Ignore, Problem_Behavior.Report_Warning, Problem_Behavior.Report_Error].each pb->
t = data.table.select_columns [] on_problems=pb
t.should_fail_with No_Output_Columns
# Just selecting [] means the No_Output_Columns does not have an additional cause.
t.catch.cause . should_equal Nothing
t.catch.to_display_text . should_equal "The result would contain no columns."
data.table.select_columns ["hmmm"] . should_fail_with Missing_Input_Columns
r2 = data.table.select_columns ["hmmm"] error_on_missing_columns=False
r2.should_fail_with No_Output_Columns
r2.catch.cause . should_be_a Missing_Input_Columns
r2.catch.to_display_text . should_equal "No columns in the result, because of another problem: The criteria 'hmmm' did not match any columns."
suite_builder.group prefix+"Table.remove_columns" group_builder->
data = Select_Columns_Data.setup create_connection_fn table_builder
group_builder.teardown <|
data.teardown
group_builder.specify "should work as shown in the doc examples" <|
expect_column_names ["Baz", "foo 1", "foo 2", "ab.+123", "abcd123"] <| data.table.remove_columns ["bar", "foo"]
expect_column_names ["foo", "ab.+123", "abcd123"] <| data.table.remove_columns ["foo.+".to_regex, "b.*".to_regex] Case_Sensitivity.Insensitive
expect_column_names ["Baz", "foo 1", "foo 2", "ab.+123"] <| data.table.remove_columns [-1, 0, 1]
group_builder.specify "should correctly handle regex matching" <|
last_ones = data.table.columns.drop 1 . map .name
expect_column_names last_ones <| data.table.remove_columns ["foo".to_regex]
first_ones = ["foo", "bar", "Baz", "foo 1", "foo 2"]
expect_column_names first_ones <| data.table.remove_columns ["a.*".to_regex]
expect_column_names first_ones <| data.table.remove_columns ["ab.+123".to_regex]
expect_column_names first_ones+["abcd123"] <| data.table.remove_columns ["ab.+123"] Case_Sensitivity.Insensitive
expect_column_names first_ones+["ab.+123"] <| data.table.remove_columns ["abcd123".to_regex]
group_builder.specify "should allow negative indices" <|
expect_column_names ["Baz", "foo 1", "ab.+123"] <| data.table.remove_columns [-1, -3, 0, 1]
if test_selection.supports_case_sensitive_columns then
group_builder.specify "should correctly handle exact matches matching multiple names due to case insensitivity" <|
table =
col1 = ["foo", [1,2,3]]
col2 = ["bar", [4,5,6]]
col3 = ["Bar", [7,8,9]]
table_builder [col1, col2, col3]
expect_column_names ["foo"] <| table.remove_columns "bar" Case_Sensitivity.Insensitive
group_builder.specify "should correctly handle regexes matching multiple names" <|
expect_column_names ["Baz", "ab.+123", "abcd123"] <| data.table.remove_columns ["f.+".to_regex, "b.*".to_regex]
group_builder.specify "should correctly handle problems: out of bounds indices" <|
selector = [1, 0, 100, -200, 300]
action = data.table.remove_columns selector on_problems=_
tester = expect_column_names ["Baz", "foo 1", "foo 2", "ab.+123", "abcd123"]
problems = [Missing_Input_Columns.Error [100, -200, 300]]
Problems.test_problem_handling action problems tester
err = data.table.remove_columns selector error_on_missing_columns=True
err.should_fail_with Missing_Input_Columns
group_builder.specify "should correctly handle edge-cases: duplicate indices" <|
selector = [0, 0, 0]
t = data.table.remove_columns selector on_problems=Problem_Behavior.Report_Error
expect_column_names ["bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123"] t
group_builder.specify "should correctly handle edge-cases: aliased indices" <|
selector = [0, -7, -6, 1]
t = data.table.remove_columns selector on_problems=Problem_Behavior.Report_Error
expect_column_names ["Baz", "foo 1", "foo 2", "ab.+123", "abcd123"] t
group_builder.specify "should correctly handle edge-cases: duplicate names" <|
selector = ["foo", "foo"]
t = data.table.remove_columns selector on_problems=Problem_Behavior.Report_Error
expect_column_names ["bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123"] t
group_builder.specify "should correctly handle edge-cases: duplicate matches due to case insensitivity" <|
selector = ["FOO", "foo"]
t = data.table.remove_columns selector Case_Sensitivity.Insensitive on_problems=Problem_Behavior.Report_Error
expect_column_names ["bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123"] t
group_builder.specify "should correctly handle problems: unmatched names" <|
weird_name = '.*?-!@#!"'
selector = ["foo", "hmm", weird_name]
action = data.table.remove_columns selector on_problems=_
tester = expect_column_names ["bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123"]
problems = [Missing_Input_Columns.Error ["hmm", weird_name]]
Problems.test_problem_handling action problems tester
err = data.table.remove_columns selector error_on_missing_columns=True on_problems=Problem_Behavior.Ignore
err.should_fail_with Missing_Input_Columns
group_builder.specify "should correctly handle problems: no columns in the output" <|
[Problem_Behavior.Ignore, Problem_Behavior.Report_Warning, Problem_Behavior.Report_Error].each pb->
selector = [".*".to_regex]
t = data.table.remove_columns selector on_problems=pb
t.should_fail_with No_Output_Columns
selector_2 = [".*".to_regex, "hmmm".to_regex]
t1 = data.table.remove_columns selector_2
t1.should_fail_with No_Output_Columns
# No cause specified - even if some criteria were unmatched, that is not the reason for the No_Output_Columns (the reason is all other columns got deleted, by other criteria that _did_ match).
t1.catch.cause . should_equal Nothing
suite_builder.group prefix+"Table.reorder_columns" group_builder->
data = Select_Columns_Data.setup create_connection_fn table_builder
group_builder.teardown <|
data.teardown
group_builder.specify "should work as shown in the doc examples" <|
expect_column_names ["bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123", "foo"] <| data.table.reorder_columns "foo" Position.After_Other_Columns
expect_column_names ["Baz", "foo 1", "foo 2", "ab.+123", "abcd123", "foo", "bar"] <| data.table.reorder_columns ["foo", "bar"] Position.After_Other_Columns
expect_column_names ["foo 1", "foo 2", "bar", "Baz", "foo", "ab.+123", "abcd123"] <| data.table.reorder_columns ["foo.+".to_regex, "b.*".to_regex] case_sensitivity=Case_Sensitivity.Insensitive
expect_column_names ["bar", "foo", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123"] <| data.table.reorder_columns [1, 0] Position.Before_Other_Columns
expect_column_names ["bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123", "foo"] <| data.table.reorder_columns [0] Position.After_Other_Columns
group_builder.specify "should correctly handle regex matching" <|
expect_column_names ["bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123", "foo"] <| data.table.reorder_columns ["foo".to_regex] Position.After_Other_Columns
rest = ["foo", "bar", "Baz", "foo 1", "foo 2"]
expect_column_names ["ab.+123", "abcd123"]+rest <| data.table.reorder_columns ["a.*".to_regex]
expect_column_names ["ab.+123", "abcd123"]+rest <| data.table.reorder_columns ["ab.+123".to_regex]
expect_column_names ["ab.+123"]+rest+["abcd123"] <| data.table.reorder_columns ["ab.+123"]
expect_column_names ["abcd123"]+rest+["ab.+123"] <| data.table.reorder_columns ["abcd123".to_regex]
group_builder.specify "should allow negative indices" <|
expect_column_names ["abcd123", "foo 2", "foo", "bar", "Baz", "foo 1", "ab.+123"] <| data.table.reorder_columns [-1, -3, 0, 1]
if test_selection.supports_case_sensitive_columns then
group_builder.specify "should correctly handle exact matches matching multiple names due to case insensitivity" <|
table =
col1 = ["foo", [1,2,3]]
col2 = ["bar", [4,5,6]]
col3 = ["Bar", [7,8,9]]
table_builder [col1, col2, col3]
expect_column_names ["bar", "Bar", "foo"] <| table.reorder_columns ["bar"] case_sensitivity=Case_Sensitivity.Insensitive
group_builder.specify "should correctly handle regexes matching multiple names" <|
expect_column_names ["bar", "foo", "foo 1", "foo 2", "Baz", "ab.+123", "abcd123"] <| data.table.reorder_columns ["b.*".to_regex, "f.+".to_regex]
group_builder.specify "should correctly handle problems: out of bounds indices" <|
selector = [1, 0, 100, -200, 300]
action = data.table.reorder_columns selector on_problems=_
tester = expect_column_names ["bar", "foo", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123"]
problems = [Missing_Input_Columns.Error [100, -200, 300]]
Problems.test_problem_handling action problems tester
err = data.table.reorder_columns selector error_on_missing_columns=True
err.should_fail_with Missing_Input_Columns
group_builder.specify "should correctly handle edge-cases: duplicate indices" <|
selector = [0, 0, 0]
t = data.table.reorder_columns selector Position.After_Other_Columns on_problems=Problem_Behavior.Report_Error
expect_column_names ["bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123", "foo"] t
group_builder.specify "should correctly handle edge-cases: aliased indices" <|
selector = [0, -7, -6, 1]
t = data.table.reorder_columns selector Position.After_Other_Columns on_problems=Problem_Behavior.Report_Error
expect_column_names ["Baz", "foo 1", "foo 2", "ab.+123", "abcd123", "foo", "bar"] t
group_builder.specify "should correctly handle edge-cases: duplicate names" <|
selector = ["foo", "foo"]
t = data.table.reorder_columns selector Position.After_Other_Columns on_problems=Problem_Behavior.Report_Error
expect_column_names ["bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123", "foo"] t
group_builder.specify "should correctly handle problems: unmatched names" <|
weird_name = '.*?-!@#!"'
selector = ["foo", "hmm", weird_name]
action = data.table.reorder_columns selector Position.After_Other_Columns on_problems=_
tester = expect_column_names ["bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123", "foo"]
problems = [Missing_Input_Columns.Error ["hmm", weird_name]]
Problems.test_problem_handling action problems tester
err = data.table.reorder_columns selector Position.After_Other_Columns error_on_missing_columns=True
err.should_fail_with Missing_Input_Columns
suite_builder.group prefix+"Table.sort_columns" group_builder->
data = Sort_Columns_Data.setup create_connection_fn table_builder
group_builder.teardown <|
data.teardown
group_builder.specify "should work as shown in the doc examples" <|
sorted = data.table.sort_columns
expect_column_names ["Foo 2", "bar", "foo 001", "foo 1", "foo 100", "foo 21", "foo 3"] sorted
sorted.columns.first.to_vector . should_equal [10,11,12]
expect_column_names ["bar", "foo 001", "foo 1", "Foo 2", "foo 3", "foo 21", "foo 100"] <| data.table.sort_columns text_ordering=(Text_Ordering.Case_Insensitive sort_digits_as_numbers=True)
expect_column_names ["foo 3", "foo 21", "foo 100", "foo 1", "foo 001", "bar", "Foo 2"] <| data.table.sort_columns Sort_Direction.Descending
group_builder.specify "should correctly handle case-insensitive sorting" <|
expect_column_names ["bar", "foo 001", "foo 1", "foo 100", "Foo 2", "foo 21", "foo 3"] <| data.table.sort_columns text_ordering=(Text_Ordering.Case_Insensitive)
group_builder.specify "should correctly handle natural order sorting" <|
expect_column_names ["Foo 2", "bar", "foo 001", "foo 1", "foo 3", "foo 21", "foo 100"] <| data.table.sort_columns text_ordering=(Text_Ordering.Default sort_digits_as_numbers=True)
group_builder.specify "should correctly handle various combinations of options" <|
expect_column_names ["foo 100", "foo 21", "foo 3", "Foo 2", "foo 1", "foo 001", "bar"] <| data.table.sort_columns Sort_Direction.Descending text_ordering=(Text_Ordering.Case_Insensitive sort_digits_as_numbers=True)
suite_builder.group prefix+"Table.rename_columns" group_builder->
data = Rename_Columns_Data.setup create_connection_fn table_builder
group_builder.teardown <|
data.teardown
group_builder.specify "should work as shown in the doc examples" <|
expect_column_names ["FirstColumn", "beta", "gamma", "delta"] <|
table.rename_columns ["FirstColumn"]
data.table.rename_columns ["FirstColumn"]
expect_column_names ["prefix_alpha", "prefix_beta", "prefix_gamma", "prefix_delta"] <|
table.rename_columns (table.columns.map c-> "prefix_" + c.name)
data.table.rename_columns (data.table.columns.map c-> "prefix_" + c.name)
t1 = table_builder [["alpha", [1]], ["name=123", [2]], ["name= foo bar", [3]]]
expect_column_names ["alpha", "key:123", "key: foo bar"] <|
t1.rename_columns (Map.from_vector [["name=(.*)".to_regex, "key:$1"]])
Test.specify "should work by index" <|
group_builder.specify "should work by index" <|
map = Map.from_vector [[0, "FirstColumn"], [-2, "Another"]]
expect_column_names ["FirstColumn", "beta", "Another", "delta"] <|
table.rename_columns map
data.table.rename_columns map
Test.specify "should work by position" <|
group_builder.specify "should work by position" <|
vec = ["one", "two", "three"]
expect_column_names ["one", "two", "three", "delta"] <|
table.rename_columns vec
data.table.rename_columns vec
Test.specify "should work by Vector" <|
group_builder.specify "should work by Vector" <|
vec = ["one", "two", "three"]
expect_column_names ["one", "two", "three", "delta"] <|
table.rename_columns vec
data.table.rename_columns vec
Test.specify "should work by Vector of Pairs" <|
group_builder.specify "should work by Vector of Pairs" <|
vec = [["beta", "one"], ["delta", "two"], ["alpha", "three"]]
expect_column_names ["three", "one", "gamma", "two"] <|
table.rename_columns vec
data.table.rename_columns vec
Test.specify "should work by name" <|
group_builder.specify "should work by name" <|
map = Map.from_vector [["alpha", "FirstColumn"], ["delta", "Another"]]
expect_column_names ["FirstColumn", "beta", "gamma", "Another"] <|
table.rename_columns map
data.table.rename_columns map
Test.specify "should work by mixed Map" <|
group_builder.specify "should work by mixed Map" <|
map = Map.from_vector [["alpha", "FirstColumn"], [-1, "Another"]]
expect_column_names ["FirstColumn", "beta", "gamma", "Another"] <|
table.rename_columns map
data.table.rename_columns map
Test.specify "should work by name case-insensitively" <|
group_builder.specify "should work by name case-insensitively" <|
map = Map.from_vector [["ALPHA", "FirstColumn"], ["DELTA", "Another"]]
expect_column_names ["FirstColumn", "beta", "gamma", "Another"] <|
table.rename_columns map Case_Sensitivity.Insensitive
data.table.rename_columns map Case_Sensitivity.Insensitive
Test.specify "should work by name using regex" <|
group_builder.specify "should work by name using regex" <|
map = Map.from_vector [["a.*".to_regex, "FirstColumn"]]
expect_column_names ["FirstColumn", "beta", "gamma", "delta"] <|
table.rename_columns map
data.table.rename_columns map
Test.specify "should work by name using regex substitution" <|
group_builder.specify "should work by name using regex substitution" <|
map = Map.from_vector [["a(.*)".to_regex, "$1"]]
expect_column_names ["lpha", "beta", "gamma", "delta"] <|
table.rename_columns map
data.table.rename_columns map
Test.specify "should correctly handle problems: unmatched names" <|
group_builder.specify "should correctly handle problems: unmatched names" <|
weird_name = '.*?-!@#!"'
map = Map.from_vector [["alpha", "FirstColumn"], ["omicron", "Another"], [weird_name, "Fixed"]]
action = table.rename_columns map error_on_missing_columns=False on_problems=_
action = data.table.rename_columns map error_on_missing_columns=False on_problems=_
tester = expect_column_names ["FirstColumn", "beta", "gamma", "delta"]
err_checker err =
err.catch.should_be_a Missing_Input_Columns.Error
err.catch.criteria.should_contain_the_same_elements_as ["omicron", weird_name]
Problems.test_advanced_problem_handling action err_checker (x-> x) tester
err = table.rename_columns map
err = data.table.rename_columns map
err.should_fail_with Missing_Input_Columns
Test.specify "should correctly handle problems: out of bounds indices" <|
group_builder.specify "should correctly handle problems: out of bounds indices" <|
map = Map.from_vector [[0, "FirstColumn"], [-1, "Another"], [100, "Boo"], [-200, "Nothing"], [300, "Here"]]
action = table.rename_columns map error_on_missing_columns=False on_problems=_
action = data.table.rename_columns map error_on_missing_columns=False on_problems=_
tester = expect_column_names ["FirstColumn", "beta", "gamma", "Another"]
err_checker err =
err.catch.should_be_a Missing_Input_Columns.Error
err.catch.criteria.should_contain_the_same_elements_as [-200, 100, 300]
Problems.test_advanced_problem_handling action err_checker (x-> x) tester
err = table.rename_columns map
err = data.table.rename_columns map
err.should_fail_with Missing_Input_Columns
Test.specify "should correctly handle edge-cases: aliased indices" <|
group_builder.specify "should correctly handle edge-cases: aliased indices" <|
map1 = Map.from_vector [[1, "FirstColumn"], [-3, "FirstColumn"]]
t1 = table.rename_columns map1 on_problems=Problem_Behavior.Report_Error
t1 = data.table.rename_columns map1 on_problems=Problem_Behavior.Report_Error
Problems.assume_no_problems t1
expect_column_names ["alpha", "FirstColumn", "gamma", "delta"] t1
map2 = Map.from_vector [[1, "FirstColumn"], [-3, "DifferentName!"]]
t2 = table.rename_columns map2 on_problems=Problem_Behavior.Report_Error
t2 = data.table.rename_columns map2 on_problems=Problem_Behavior.Report_Error
t2.should_fail_with Ambiguous_Column_Rename
err = t2.catch
err.column_name . should_equal "beta"
err.new_names . should_equal ["FirstColumn", "DifferentName!"]
Test.specify "should correctly handle edge-cases: aliased selectors" <|
group_builder.specify "should correctly handle edge-cases: aliased selectors" <|
t = table_builder [["alpha", [1,2,3]], ["bet", [4,5,6]]]
map1 = Map.from_vector [["a.*".to_regex, "AA"], [".*a".to_regex, "AA"]]
t1 = t.rename_columns map1 on_problems=Problem_Behavior.Report_Error
@ -446,47 +511,47 @@ spec setup =
Problems.assume_no_problems t4
expect_column_names ["aaA", "bbb"] t4
Test.specify "should correctly handle problems: invalid names ''" <|
group_builder.specify "should correctly handle problems: invalid names ''" <|
map = Map.from_vector [[1, ""]]
[Problem_Behavior.Ignore, Problem_Behavior.Report_Warning, Problem_Behavior.Report_Error].each pb->
r = table.rename_columns map on_problems=pb
r = data.table.rename_columns map on_problems=pb
r.should_fail_with Invalid_Column_Names
Test.specify "should correctly handle problems: invalid names Nothing" <|
group_builder.specify "should correctly handle problems: invalid names Nothing" <|
map = ["alpha", Nothing]
[Problem_Behavior.Ignore, Problem_Behavior.Report_Warning, Problem_Behavior.Report_Error].each pb->
r = table.rename_columns map on_problems=pb
r = data.table.rename_columns map on_problems=pb
r.should_fail_with Invalid_Column_Names
Test.specify "should correctly handle problems: invalid names null character" <|
group_builder.specify "should correctly handle problems: invalid names null character" <|
map = ["alpha", 'a\0b']
[Problem_Behavior.Ignore, Problem_Behavior.Report_Warning, Problem_Behavior.Report_Error].each pb->
r = table.rename_columns map on_problems=pb
r = data.table.rename_columns map on_problems=pb
r.should_fail_with Invalid_Column_Names
Test.specify "should correctly handle problems: duplicate names" <|
group_builder.specify "should correctly handle problems: duplicate names" <|
map = ["Test", "Test", "Test", "Test"]
action = table.rename_columns map on_problems=_
action = data.table.rename_columns map on_problems=_
tester = expect_column_names ["Test 1", "Test 2", "Test 3", "Test"]
problems = [Duplicate_Output_Column_Names.Error ["Test", "Test", "Test"]]
Problems.test_problem_handling action problems tester
Test.specify "should correctly handle problems: new name is clashing with existing name of existing column" <|
group_builder.specify "should correctly handle problems: new name is clashing with existing name of existing column" <|
map = Map.from_vector [["alpha", "beta"]]
action = table.rename_columns map on_problems=_
action = data.table.rename_columns map on_problems=_
tester = expect_column_names ["beta", "beta 1", "gamma", "delta"]
problems = [Duplicate_Output_Column_Names.Error ["beta"]]
Problems.test_problem_handling action problems tester
map2 = Map.from_vector [["beta", "alpha"]]
action2 = table.rename_columns map2 on_problems=_
action2 = data.table.rename_columns map2 on_problems=_
tester2 = expect_column_names ["alpha 1", "alpha", "gamma", "delta"]
problems2 = [Duplicate_Output_Column_Names.Error ["alpha"]]
Problems.test_problem_handling action2 problems2 tester2
Test.specify "should correctly handle problems: too many input names" <|
group_builder.specify "should correctly handle problems: too many input names" <|
map = ["A", "B", "C", "D", "E", "F"]
action = table.rename_columns map on_problems=_
action = data.table.rename_columns map on_problems=_
tester = expect_column_names ["A", "B", "C", "D"]
problem_checker problem =
problem.should_be_a Too_Many_Column_Names_Provided.Error

View File

@ -7,133 +7,185 @@ import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
from Standard.Table.Data.Aggregate_Column.Aggregate_Column import Group_By, Sum
from Standard.Table.Errors import all
from Standard.Test import Test
import Standard.Test.Extensions
from Standard.Test_New import all
from project.Util import all
from project.Common_Table_Operations.Util import run_default_backend
spec setup =
prefix = setup.prefix
table_builder = setup.table_builder
main = run_default_backend add_specs
Test.group prefix+"Table.take/drop" <|
type Table_Take_Drop_Data
Value ~data
connection self = self.data.at 0
table self = self.data.at 1
empty self = self.data.at 2
setup create_connection_fn table_builder = Table_Take_Drop_Data.Value <|
connection = create_connection_fn Nothing
table =
col1 = ["alpha", [1,2,3,4,5,6,7,8]]
col2 = ["beta", ["A","B","C","D","E","F","G","H"]]
table_builder [col1, col2] . order_by "alpha"
(table_builder [col1, col2] connection=connection) . order_by "alpha"
empty = table.remove_all_rows
[connection, table, empty]
Test.specify "should allow selecting first or last N rows" <|
table.take.at "alpha" . to_vector . should_equal [1]
table.take.at "beta" . to_vector . should_equal ["A"]
table.drop.at "alpha" . to_vector . should_equal [2,3,4,5,6,7,8]
teardown self =
self.connection.close
table.take (First 4) . at "alpha" . to_vector . should_equal [1,2,3,4]
table.take (First 0) . at "alpha" . to_vector . should_equal []
table.take (First -1) . at "alpha" . to_vector . should_equal []
table.take (First 100) . should_equal table
table.drop (First 2) . at "beta" . to_vector . should_equal ["C","D","E","F","G","H"]
table.drop (First 0) . should_equal table
table.drop (First -1) . should_equal table
table.drop (First 100) . should_equal empty
type Column_Take_Drop_Data
Value ~data
table.take 4 . at "alpha" . to_vector . should_equal [1,2,3,4]
table.take 0 . at "alpha" . to_vector . should_equal []
table.take -1 . at "alpha" . to_vector . should_equal []
table.take 100 . should_equal table
connection self = self.data.at 0
table self = self.data.at 1
alpha self = self.data.at 2
beta self = self.data.at 3
empty_alpha self = self.data.at 4
empty_beta self = self.data.at 5
table.drop 2 . at "beta" . to_vector . should_equal ["C","D","E","F","G","H"]
table.drop 0 . should_equal table
table.drop -1 . should_equal table
table.drop 100 . should_equal empty
setup create_connection_fn table_builder = Column_Take_Drop_Data.Value <|
connection = create_connection_fn Nothing
table =
col1 = ["alpha", [1,2,3,4,5,6,7,8]]
col2 = ["beta", ["A","B","C","D","E","F","G","H"]]
(table_builder [col1, col2] connection=connection) . order_by "alpha"
alpha = table.at "alpha"
beta = table.at "beta"
table.take (Last 4) . at "beta" . to_vector . should_equal ["E","F","G","H"]
table.take (Last 0) . should_equal empty
table.take (Last -1) . should_equal empty
table.take (Last 100) . should_equal table
empty_table = table.remove_all_rows
empty_alpha = empty_table.at "alpha"
empty_beta = empty_table.at "beta"
table.drop (Last 2) . at "alpha" . to_vector . should_equal [1,2,3,4,5,6]
table.drop (Last 0) . should_equal table
table.drop (Last -1) . should_equal table
table.drop (Last 100) . should_equal empty
[connection, table, alpha, beta, empty_alpha, empty_beta]
Test.specify "should handle consecutive take/drops" <|
table.take 5 . order_by "alpha" . take 3 . at "alpha" . to_vector . should_equal [1, 2, 3]
table.take 3 . order_by "alpha" . take 5 . at "alpha" . to_vector . should_equal [1, 2, 3]
table.take 5 . order_by "alpha" . drop 3 . at "alpha" . to_vector . should_equal [4, 5]
table.drop 3 . order_by "alpha" . drop 2 . at "alpha" . to_vector . should_equal [6, 7, 8]
table.drop 2 . order_by "alpha" . drop 3 . at "alpha" . to_vector . should_equal [6, 7, 8]
table.drop 3 . order_by "alpha" . take 2 . at "alpha" . to_vector . should_equal [4, 5]
teardown self =
self.connection.close
Test.specify "should allow selecting rows by ranges or indices" <|
table.take (2.up_to 4) . at "beta" . to_vector . should_equal ["C", "D"]
table.take (0.up_to 0) . should_equal empty
table.take (100.up_to 100) . should_fail_with Index_Out_Of_Bounds
table.take (100.up_to 100) . catch . should_equal (Index_Out_Of_Bounds.Error 100 8)
table.take (0.up_to 100) . should_equal table
table.take (0.up_to table.row_count) . should_equal table
empty.take (0.up_to 0) . should_fail_with Index_Out_Of_Bounds
empty.take (0.up_to 0) . catch . should_equal (Index_Out_Of_Bounds.Error 0 0)
table.take (100.up_to 99) . should_fail_with Index_Out_Of_Bounds
table.drop (2.up_to 4) . at "alpha" . to_vector . should_equal [1, 2, 5, 6, 7, 8]
table.drop (0.up_to 0) . should_equal table
table.drop (100.up_to 100) . should_fail_with Index_Out_Of_Bounds
table.drop (100.up_to 100) . catch . should_equal (Index_Out_Of_Bounds.Error 100 8)
table.drop (0.up_to 100) . should_equal empty
table.drop (0.up_to table.row_count) . should_equal empty
empty.drop (0.up_to 0) . should_fail_with Index_Out_Of_Bounds
empty.drop (0.up_to 0) . catch . should_equal (Index_Out_Of_Bounds.Error 0 0)
table.drop (100.up_to 99) . should_fail_with Index_Out_Of_Bounds
add_specs suite_builder setup =
prefix = setup.prefix
create_connection_fn = setup.create_connection_func
table.take (Index_Sub_Range.By_Index 0) . at "beta" . to_vector . should_equal ["A"]
empty.take (Index_Sub_Range.By_Index 0) . should_fail_with Index_Out_Of_Bounds
table.take (Index_Sub_Range.By_Index []) . should_equal empty
table.take (Index_Sub_Range.By_Index [-1, -1]) . at "beta" . to_vector . should_equal ["H", "H"]
table.take (Index_Sub_Range.By_Index [0, 0, 3.up_to 100]) . at "alpha" . to_vector . should_equal [1, 1, 4, 5, 6, 7, 8]
table.take (0.up_to 100 . with_step 2) . at "alpha" . to_vector . should_equal [1, 3, 5, 7]
table.take (Index_Sub_Range.By_Index [0.up_to 100 . with_step 2, 1.up_to 6 . with_step 2]) . at "alpha" . to_vector . should_equal [1, 3, 5, 7, 2, 4, 6]
table.take (Index_Sub_Range.By_Index [1.up_to 3, 2.up_to 5]) . at "alpha" . to_vector . should_equal [2, 3, 3, 4, 5]
table.take (Index_Sub_Range.By_Index [2.up_to 5, 1.up_to 3]) . at "alpha" . to_vector . should_equal [3, 4, 5, 2, 3]
table.take (Index_Sub_Range.By_Index [0, 1, 100.up_to 200]) . should_fail_with Index_Out_Of_Bounds
table.take (Index_Sub_Range.By_Index 100) . should_fail_with Index_Out_Of_Bounds
suite_builder.group prefix+"Table.take/drop" group_builder->
data = Table_Take_Drop_Data.setup create_connection_fn setup.table_builder
table.drop (Index_Sub_Range.By_Index 0) . at "alpha" . to_vector . should_equal [2, 3, 4, 5, 6, 7, 8]
table.drop (Index_Sub_Range.By_Index []) . should_equal table
table.drop (Index_Sub_Range.By_Index [-1, -1]) . at "alpha" . to_vector . should_equal [1, 2, 3, 4, 5, 6, 7]
table.drop (Index_Sub_Range.By_Index [0, 0, 3.up_to 100]) . at "alpha" . to_vector . should_equal [2, 3]
table.drop (0.up_to 100 . with_step 2) . at "alpha" . to_vector . should_equal [2, 4, 6, 8]
table.drop (Index_Sub_Range.By_Index [0.up_to 100 . with_step 2, 1.up_to 6 . with_step 2]) . at "alpha" . to_vector . should_equal [8]
table.drop (Index_Sub_Range.By_Index [1.up_to 3, 2.up_to 5]) . at "alpha" . to_vector . should_equal [1, 6, 7, 8]
table.drop (Index_Sub_Range.By_Index [2.up_to 5, 1.up_to 3]) . at "alpha" . to_vector . should_equal [1, 6, 7, 8]
table.drop (Index_Sub_Range.By_Index [0, 1, 100.up_to 200]) . should_fail_with Index_Out_Of_Bounds
table.drop (Index_Sub_Range.By_Index 100) . should_fail_with Index_Out_Of_Bounds
group_builder.teardown <|
data.teardown
Test.specify "should allow selecting every Nth row" <|
table.take (Every 1) . should_equal table
table.take (Every 3) . at "alpha" . to_vector . should_equal [1, 4, 7]
table.take (Every 3 first=1) . at "alpha" . to_vector . should_equal [2, 5, 8]
table.take (Every 2 first=1) . at "beta" . to_vector . should_equal ["B", "D", "F", "H"]
table.take (Every 2 first=100) . at "alpha" . to_vector . should_equal []
table.take (Every 200) . at "alpha" . to_vector . should_equal [1]
empty.take (Every 2) . should_equal empty
table.take (Every 0) . should_fail_with Illegal_Argument
empty.take (Every 0) . should_fail_with Illegal_Argument
table_builder cols =
setup.table_builder cols connection=data.connection
table.drop (Every 1) . should_equal empty
table.drop (Every 3) . at "alpha" . to_vector . should_equal [2, 3, 5, 6, 8]
table.drop (Every 3 first=1) . at "alpha" . to_vector . should_equal [1, 3, 4, 6, 7]
table.drop (Every 2 first=1) . at "alpha" . to_vector . should_equal [1, 3, 5, 7]
table.drop (Every 2 first=100) . should_equal table
table.drop (Every 200) . at "beta" . to_vector . should_equal ["B", "C", "D", "E", "F", "G", "H"]
empty.drop (Every 2) . should_equal empty
table.drop (Every 0) . should_fail_with Illegal_Argument
empty.drop (Every 0) . should_fail_with Illegal_Argument
group_builder.specify "should allow selecting first or last N rows" <|
data.table.take.at "alpha" . to_vector . should_equal [1]
data.table.take.at "beta" . to_vector . should_equal ["A"]
data.table.drop.at "alpha" . to_vector . should_equal [2,3,4,5,6,7,8]
data.table.take (First 4) . at "alpha" . to_vector . should_equal [1,2,3,4]
data.table.take (First 0) . at "alpha" . to_vector . should_equal []
data.table.take (First -1) . at "alpha" . to_vector . should_equal []
data.table.take (First 100) . should_equal data.table
data.table.drop (First 2) . at "beta" . to_vector . should_equal ["C","D","E","F","G","H"]
data.table.drop (First 0) . should_equal data.table
data.table.drop (First -1) . should_equal data.table
data.table.drop (First 100) . should_equal data.empty
data.table.take 4 . at "alpha" . to_vector . should_equal [1,2,3,4]
data.table.take 0 . at "alpha" . to_vector . should_equal []
data.table.take -1 . at "alpha" . to_vector . should_equal []
data.table.take 100 . should_equal data.table
data.table.drop 2 . at "beta" . to_vector . should_equal ["C","D","E","F","G","H"]
data.table.drop 0 . should_equal data.table
data.table.drop -1 . should_equal data.table
data.table.drop 100 . should_equal data.empty
data.table.take (Last 4) . at "beta" . to_vector . should_equal ["E","F","G","H"]
data.table.take (Last 0) . should_equal data.empty
data.table.take (Last -1) . should_equal data.empty
data.table.take (Last 100) . should_equal data.table
data.table.drop (Last 2) . at "alpha" . to_vector . should_equal [1,2,3,4,5,6]
data.table.drop (Last 0) . should_equal data.table
data.table.drop (Last -1) . should_equal data.table
data.table.drop (Last 100) . should_equal data.empty
group_builder.specify "should handle consecutive take/drops" <|
data.table.take 5 . order_by "alpha" . take 3 . at "alpha" . to_vector . should_equal [1, 2, 3]
data.table.take 3 . order_by "alpha" . take 5 . at "alpha" . to_vector . should_equal [1, 2, 3]
data.table.take 5 . order_by "alpha" . drop 3 . at "alpha" . to_vector . should_equal [4, 5]
data.table.drop 3 . order_by "alpha" . drop 2 . at "alpha" . to_vector . should_equal [6, 7, 8]
data.table.drop 2 . order_by "alpha" . drop 3 . at "alpha" . to_vector . should_equal [6, 7, 8]
data.table.drop 3 . order_by "alpha" . take 2 . at "alpha" . to_vector . should_equal [4, 5]
group_builder.specify "should allow selecting rows by ranges or indices" <|
data.table.take (2.up_to 4) . at "beta" . to_vector . should_equal ["C", "D"]
data.table.take (0.up_to 0) . should_equal data.empty
data.table.take (100.up_to 100) . should_fail_with Index_Out_Of_Bounds
data.table.take (100.up_to 100) . catch . should_equal (Index_Out_Of_Bounds.Error 100 8)
data.table.take (0.up_to 100) . should_equal data.table
data.table.take (0.up_to data.table.row_count) . should_equal data.table
data.empty.take (0.up_to 0) . should_fail_with Index_Out_Of_Bounds
data.empty.take (0.up_to 0) . catch . should_equal (Index_Out_Of_Bounds.Error 0 0)
data.table.take (100.up_to 99) . should_fail_with Index_Out_Of_Bounds
data.table.drop (2.up_to 4) . at "alpha" . to_vector . should_equal [1, 2, 5, 6, 7, 8]
data.table.drop (0.up_to 0) . should_equal data.table
data.table.drop (100.up_to 100) . should_fail_with Index_Out_Of_Bounds
data.table.drop (100.up_to 100) . catch . should_equal (Index_Out_Of_Bounds.Error 100 8)
data.table.drop (0.up_to 100) . should_equal data.empty
data.table.drop (0.up_to data.table.row_count) . should_equal data.empty
data.empty.drop (0.up_to 0) . should_fail_with Index_Out_Of_Bounds
data.empty.drop (0.up_to 0) . catch . should_equal (Index_Out_Of_Bounds.Error 0 0)
data.table.drop (100.up_to 99) . should_fail_with Index_Out_Of_Bounds
data.table.take (Index_Sub_Range.By_Index 0) . at "beta" . to_vector . should_equal ["A"]
data.empty.take (Index_Sub_Range.By_Index 0) . should_fail_with Index_Out_Of_Bounds
data.table.take (Index_Sub_Range.By_Index []) . should_equal data.empty
data.table.take (Index_Sub_Range.By_Index [-1, -1]) . at "beta" . to_vector . should_equal ["H", "H"]
data.table.take (Index_Sub_Range.By_Index [0, 0, 3.up_to 100]) . at "alpha" . to_vector . should_equal [1, 1, 4, 5, 6, 7, 8]
data.table.take (0.up_to 100 . with_step 2) . at "alpha" . to_vector . should_equal [1, 3, 5, 7]
data.table.take (Index_Sub_Range.By_Index [0.up_to 100 . with_step 2, 1.up_to 6 . with_step 2]) . at "alpha" . to_vector . should_equal [1, 3, 5, 7, 2, 4, 6]
data.table.take (Index_Sub_Range.By_Index [1.up_to 3, 2.up_to 5]) . at "alpha" . to_vector . should_equal [2, 3, 3, 4, 5]
data.table.take (Index_Sub_Range.By_Index [2.up_to 5, 1.up_to 3]) . at "alpha" . to_vector . should_equal [3, 4, 5, 2, 3]
data.table.take (Index_Sub_Range.By_Index [0, 1, 100.up_to 200]) . should_fail_with Index_Out_Of_Bounds
data.table.take (Index_Sub_Range.By_Index 100) . should_fail_with Index_Out_Of_Bounds
data.table.drop (Index_Sub_Range.By_Index 0) . at "alpha" . to_vector . should_equal [2, 3, 4, 5, 6, 7, 8]
data.table.drop (Index_Sub_Range.By_Index []) . should_equal data.table
data.table.drop (Index_Sub_Range.By_Index [-1, -1]) . at "alpha" . to_vector . should_equal [1, 2, 3, 4, 5, 6, 7]
data.table.drop (Index_Sub_Range.By_Index [0, 0, 3.up_to 100]) . at "alpha" . to_vector . should_equal [2, 3]
data.table.drop (0.up_to 100 . with_step 2) . at "alpha" . to_vector . should_equal [2, 4, 6, 8]
data.table.drop (Index_Sub_Range.By_Index [0.up_to 100 . with_step 2, 1.up_to 6 . with_step 2]) . at "alpha" . to_vector . should_equal [8]
data.table.drop (Index_Sub_Range.By_Index [1.up_to 3, 2.up_to 5]) . at "alpha" . to_vector . should_equal [1, 6, 7, 8]
data.table.drop (Index_Sub_Range.By_Index [2.up_to 5, 1.up_to 3]) . at "alpha" . to_vector . should_equal [1, 6, 7, 8]
data.table.drop (Index_Sub_Range.By_Index [0, 1, 100.up_to 200]) . should_fail_with Index_Out_Of_Bounds
data.table.drop (Index_Sub_Range.By_Index 100) . should_fail_with Index_Out_Of_Bounds
group_builder.specify "should allow selecting every Nth row" <|
data.table.take (Every 1) . should_equal data.table
data.table.take (Every 3) . at "alpha" . to_vector . should_equal [1, 4, 7]
data.table.take (Every 3 first=1) . at "alpha" . to_vector . should_equal [2, 5, 8]
data.table.take (Every 2 first=1) . at "beta" . to_vector . should_equal ["B", "D", "F", "H"]
data.table.take (Every 2 first=100) . at "alpha" . to_vector . should_equal []
data.table.take (Every 200) . at "alpha" . to_vector . should_equal [1]
data.empty.take (Every 2) . should_equal data.empty
data.table.take (Every 0) . should_fail_with Illegal_Argument
data.empty.take (Every 0) . should_fail_with Illegal_Argument
data.table.drop (Every 1) . should_equal data.empty
data.table.drop (Every 3) . at "alpha" . to_vector . should_equal [2, 3, 5, 6, 8]
data.table.drop (Every 3 first=1) . at "alpha" . to_vector . should_equal [1, 3, 4, 6, 7]
data.table.drop (Every 2 first=1) . at "alpha" . to_vector . should_equal [1, 3, 5, 7]
data.table.drop (Every 2 first=100) . should_equal data.table
data.table.drop (Every 200) . at "beta" . to_vector . should_equal ["B", "C", "D", "E", "F", "G", "H"]
data.empty.drop (Every 2) . should_equal data.empty
data.table.drop (Every 0) . should_fail_with Illegal_Argument
data.empty.drop (Every 0) . should_fail_with Illegal_Argument
if setup.is_database.not then
Test.specify "should allow sampling rows" <|
group_builder.specify "should allow sampling rows" <|
one = table_builder [["X", ["a"]]] . order_by "X"
two = table_builder [["X", ["a", "a"]]] . order_by "X"
three = table_builder [["X", ["a", "a", "a"]]] . order_by "X"
@ -151,22 +203,22 @@ spec setup =
three.drop (Sample 1) . should_equal two
three.drop (Sample 100) . should_equal empty
rnd = table.take (Sample 3 seed=42)
rnd = data.table.take (Sample 3 seed=42)
random_indices = [5, 6, 2]
alpha_sample = random_indices.map (table.at "alpha" . to_vector . at)
beta_sample = random_indices.map (table.at "beta" . to_vector . at)
alpha_sample = random_indices.map (data.table.at "alpha" . to_vector . at)
beta_sample = random_indices.map (data.table.at "beta" . to_vector . at)
rnd.at "alpha" . to_vector . should_equal alpha_sample
rnd.at "beta" . to_vector . should_equal beta_sample
Test.specify "sampling should be deterministic when a seed is supplied" <|
table.take (Sample 3 seed=4200000) . should_equal (table.take (Sample 3 seed=4200000))
group_builder.specify "sampling should be deterministic when a seed is supplied" <|
data.table.take (Sample 3 seed=4200000) . should_equal (data.table.take (Sample 3 seed=4200000))
Test.specify "sampling should be non-deterministic when a seed is not supplied" <|
group_builder.specify "sampling should be non-deterministic when a seed is not supplied" <|
0.up_to 3 . map _->
table.take (Sample 3) . should_not_equal (table.take (Sample 3))
data.table.take (Sample 3) . should_not_equal (data.table.take (Sample 3))
if setup.is_database.not then
Test.specify "should allow selecting rows as long as they satisfy a predicate" <|
group_builder.specify "should allow selecting rows as long as they satisfy a predicate" <|
t = table_builder [["a", [1, 2, 3, 4]], ["b", [5, 6, 7, 8]]]
t2 = t.take (While (row -> row.at "a" < 3))
@ -174,7 +226,7 @@ spec setup =
t2.at "a" . to_vector . should_equal [1, 2]
t2.at "b" . to_vector . should_equal [5, 6]
Test.specify "should gracefully handle missing constructor arguments" <|
group_builder.specify "should gracefully handle missing constructor arguments" <|
t = table_builder [["X", [1, 2, 3]]]
t.take "FOO" . should_fail_with Type_Error
t.drop "FOO" . should_fail_with Type_Error
@ -191,7 +243,7 @@ spec setup =
r3.should_fail_with Illegal_Argument
r3.catch.to_display_text . should_contain "Got a Function instead of a range, is a constructor argument missing?"
Test.specify "unordered table" <|
group_builder.specify "unordered table" <|
unordered_table =
col1 = ["alpha", [1,2,3,4,5,6,7,8]]
col2 = ["beta", ["A","B","C","D","E","F","G","H"]]
@ -200,7 +252,7 @@ spec setup =
True -> unordered_table.take . should_fail_with Illegal_Argument
False -> unordered_table.take . at "alpha" . to_vector . should_equal [1]
Test.specify "Should work correctly after aggregation" <|
group_builder.specify "Should work correctly after aggregation" <|
t0 = table_builder [["X", ["a", "b", "a", "c"]], ["Y", [1, 2, 4, 8]]]
t1 = t0.aggregate [Group_By "X", Sum "Y"]
@ -212,128 +264,125 @@ spec setup =
t3.at "X" . to_vector . should_equal ['b', 'c']
t3.at "Sum Y" . to_vector . should_equal [2.0, 8.0]
Test.group prefix+"Column.take/drop" <|
table =
col1 = ["alpha", [1,2,3,4,5,6,7,8]]
col2 = ["beta", ["A","B","C","D","E","F","G","H"]]
table_builder [col1, col2] . order_by "alpha"
alpha = table.at "alpha"
beta = table.at "beta"
suite_builder.group prefix+"Column.take/drop" group_builder->
data = Column_Take_Drop_Data.setup create_connection_fn setup.table_builder
empty_table = table.remove_all_rows
empty_alpha = empty_table.at "alpha"
empty_beta = empty_table.at "beta"
group_builder.teardown <|
data.teardown
Test.specify "should allow selecting first or last N rows" <|
alpha.take.to_vector . should_equal [1]
beta.take.to_vector . should_equal ["A"]
alpha.drop.to_vector . should_equal [2,3,4,5,6,7,8]
table_builder cols =
setup.table_builder cols connection=data.connection
alpha.take (First 4) . to_vector . should_equal [1,2,3,4]
alpha.take (First 0) . should_equal empty_alpha
alpha.take (First -1) . should_equal empty_alpha
alpha.take (First 100) . should_equal alpha
group_builder.specify "should allow selecting first or last N rows" <|
data.alpha.take.to_vector . should_equal [1]
data.beta.take.to_vector . should_equal ["A"]
data.alpha.drop.to_vector . should_equal [2,3,4,5,6,7,8]
alpha.take 4 . to_vector . should_equal [1,2,3,4]
alpha.take 0 . should_equal empty_alpha
alpha.take -1 . should_equal empty_alpha
alpha.take 100 . should_equal alpha
data.alpha.take (First 4) . to_vector . should_equal [1,2,3,4]
data.alpha.take (First 0) . should_equal data.empty_alpha
data.alpha.take (First -1) . should_equal data.empty_alpha
data.alpha.take (First 100) . should_equal data.alpha
beta.drop (First 2) . to_vector . should_equal ["C","D","E","F","G","H"]
alpha.drop (First 0) . should_equal alpha
alpha.drop (First -1) . should_equal alpha
alpha.drop (First 100) . should_equal empty_alpha
data.alpha.take 4 . to_vector . should_equal [1,2,3,4]
data.alpha.take 0 . should_equal data.empty_alpha
data.alpha.take -1 . should_equal data.empty_alpha
data.alpha.take 100 . should_equal data.alpha
beta.drop 2 . to_vector . should_equal ["C","D","E","F","G","H"]
alpha.drop 0 . should_equal alpha
alpha.drop -1 . should_equal alpha
alpha.drop 100 . should_equal empty_alpha
data.beta.drop (First 2) . to_vector . should_equal ["C","D","E","F","G","H"]
data.alpha.drop (First 0) . should_equal data.alpha
data.alpha.drop (First -1) . should_equal data.alpha
data.alpha.drop (First 100) . should_equal data.empty_alpha
beta.take (Last 4) . to_vector . should_equal ["E","F","G","H"]
beta.take (Last 0) . should_equal empty_beta
beta.take (Last -1) . should_equal empty_beta
beta.take (Last 100) . should_equal beta
data.beta.drop 2 . to_vector . should_equal ["C","D","E","F","G","H"]
data.alpha.drop 0 . should_equal data.alpha
data.alpha.drop -1 . should_equal data.alpha
data.alpha.drop 100 . should_equal data.empty_alpha
alpha.drop (Last 2) . to_vector . should_equal [1,2,3,4,5,6]
alpha.drop (Last 0) . should_equal alpha
alpha.drop (Last -1) . should_equal alpha
alpha.drop (Last 100) . should_equal empty_alpha
data.beta.take (Last 4) . to_vector . should_equal ["E","F","G","H"]
data.beta.take (Last 0) . should_equal data.empty_beta
data.beta.take (Last -1) . should_equal data.empty_beta
data.beta.take (Last 100) . should_equal data.beta
Test.specify "should handle consecutive take/drops" <|
alpha.take 5 . sort . take 3 . to_vector . should_equal [1, 2, 3]
alpha.take 3 . sort . take 5 . to_vector . should_equal [1, 2, 3]
alpha.take 5 . sort . drop 3 . to_vector . should_equal [4, 5]
alpha.drop 3 . sort . drop 2 . to_vector . should_equal [6, 7, 8]
alpha.drop 2 . sort . drop 3 . to_vector . should_equal [6, 7, 8]
alpha.drop 3 . sort . take 2 . to_vector . should_equal [4, 5]
data.alpha.drop (Last 2) . to_vector . should_equal [1,2,3,4,5,6]
data.alpha.drop (Last 0) . should_equal data.alpha
data.alpha.drop (Last -1) . should_equal data.alpha
data.alpha.drop (Last 100) . should_equal data.empty_alpha
Test.specify "should allow selecting rows by ranges or indices" <|
beta.take (2.up_to 4) . to_vector . should_equal ["C", "D"]
beta.take (0.up_to 0) . should_equal empty_beta
beta.take (100.up_to 100) . should_fail_with Index_Out_Of_Bounds
beta.take (100.up_to 100) . catch . should_equal (Index_Out_Of_Bounds.Error 100 8)
beta.take (0.up_to 100) . should_equal beta
beta.take (0.up_to table.row_count) . should_equal beta
empty_beta.take (0.up_to 0) . should_fail_with Index_Out_Of_Bounds
empty_beta.take (0.up_to 0) . catch . should_equal (Index_Out_Of_Bounds.Error 0 0)
beta.take (100.up_to 99) . should_fail_with Index_Out_Of_Bounds
group_builder.specify "should handle consecutive take/drops" <|
data.alpha.take 5 . sort . take 3 . to_vector . should_equal [1, 2, 3]
data.alpha.take 3 . sort . take 5 . to_vector . should_equal [1, 2, 3]
data.alpha.take 5 . sort . drop 3 . to_vector . should_equal [4, 5]
data.alpha.drop 3 . sort . drop 2 . to_vector . should_equal [6, 7, 8]
data.alpha.drop 2 . sort . drop 3 . to_vector . should_equal [6, 7, 8]
data.alpha.drop 3 . sort . take 2 . to_vector . should_equal [4, 5]
alpha.drop (2.up_to 4) . to_vector . should_equal [1, 2, 5, 6, 7, 8]
alpha.drop (0.up_to 0) . should_equal alpha
alpha.drop (100.up_to 100) . should_fail_with Index_Out_Of_Bounds
alpha.drop (100.up_to 100) . catch . should_equal (Index_Out_Of_Bounds.Error 100 8)
alpha.drop (0.up_to 100) . should_equal empty_alpha
alpha.drop (0.up_to table.row_count) . should_equal empty_alpha
empty_alpha.drop (0.up_to 0) . should_fail_with Index_Out_Of_Bounds
empty_alpha.drop (0.up_to 0) . catch . should_equal (Index_Out_Of_Bounds.Error 0 0)
alpha.drop (100.up_to 99) . should_fail_with Index_Out_Of_Bounds
group_builder.specify "should allow selecting rows by ranges or indices" <|
data.beta.take (2.up_to 4) . to_vector . should_equal ["C", "D"]
data.beta.take (0.up_to 0) . should_equal data.empty_beta
data.beta.take (100.up_to 100) . should_fail_with Index_Out_Of_Bounds
data.beta.take (100.up_to 100) . catch . should_equal (Index_Out_Of_Bounds.Error 100 8)
data.beta.take (0.up_to 100) . should_equal data.beta
data.beta.take (0.up_to data.table.row_count) . should_equal data.beta
data.empty_beta.take (0.up_to 0) . should_fail_with Index_Out_Of_Bounds
data.empty_beta.take (0.up_to 0) . catch . should_equal (Index_Out_Of_Bounds.Error 0 0)
data.beta.take (100.up_to 99) . should_fail_with Index_Out_Of_Bounds
beta.take (Index_Sub_Range.By_Index 0) . to_vector . should_equal ["A"]
empty_beta.take (Index_Sub_Range.By_Index 0) . should_fail_with Index_Out_Of_Bounds
beta.take (Index_Sub_Range.By_Index []) . should_equal empty_beta
beta.take (Index_Sub_Range.By_Index [-1, -1]) . to_vector . should_equal ["H", "H"]
alpha.take (Index_Sub_Range.By_Index [0, 0, 3.up_to 100]) . to_vector . should_equal [1, 1, 4, 5, 6, 7, 8]
alpha.take (0.up_to 100 . with_step 2) . to_vector . should_equal [1, 3, 5, 7]
alpha.take (Index_Sub_Range.By_Index [0.up_to 100 . with_step 2, 1.up_to 6 . with_step 2]) . to_vector . should_equal [1, 3, 5, 7, 2, 4, 6]
alpha.take (Index_Sub_Range.By_Index [1.up_to 3, 2.up_to 5]) . to_vector . should_equal [2, 3, 3, 4, 5]
alpha.take (Index_Sub_Range.By_Index [2.up_to 5, 1.up_to 3]) . to_vector . should_equal [3, 4, 5, 2, 3]
alpha.take (Index_Sub_Range.By_Index [0, 1, 100.up_to 200]) . should_fail_with Index_Out_Of_Bounds
alpha.take (Index_Sub_Range.By_Index 100) . should_fail_with Index_Out_Of_Bounds
data.alpha.drop (2.up_to 4) . to_vector . should_equal [1, 2, 5, 6, 7, 8]
data.alpha.drop (0.up_to 0) . should_equal data.alpha
data.alpha.drop (100.up_to 100) . should_fail_with Index_Out_Of_Bounds
data.alpha.drop (100.up_to 100) . catch . should_equal (Index_Out_Of_Bounds.Error 100 8)
data.alpha.drop (0.up_to 100) . should_equal data.empty_alpha
data.alpha.drop (0.up_to data.table.row_count) . should_equal data.empty_alpha
data.empty_alpha.drop (0.up_to 0) . should_fail_with Index_Out_Of_Bounds
data.empty_alpha.drop (0.up_to 0) . catch . should_equal (Index_Out_Of_Bounds.Error 0 0)
data.alpha.drop (100.up_to 99) . should_fail_with Index_Out_Of_Bounds
alpha.drop (Index_Sub_Range.By_Index 0) . to_vector . should_equal [2, 3, 4, 5, 6, 7, 8]
alpha.drop (Index_Sub_Range.By_Index []) . should_equal alpha
alpha.drop (Index_Sub_Range.By_Index [-1, -1]) . to_vector . should_equal [1, 2, 3, 4, 5, 6, 7]
alpha.drop (Index_Sub_Range.By_Index [0, 0, 3.up_to 100]) . to_vector . should_equal [2, 3]
alpha.drop (0.up_to 100 . with_step 2) . to_vector . should_equal [2, 4, 6, 8]
alpha.drop (Index_Sub_Range.By_Index [0.up_to 100 . with_step 2, 1.up_to 6 . with_step 2]) . to_vector . should_equal [8]
alpha.drop (Index_Sub_Range.By_Index [1.up_to 3, 2.up_to 5]) . to_vector . should_equal [1, 6, 7, 8]
alpha.drop (Index_Sub_Range.By_Index [2.up_to 5, 1.up_to 3]) . to_vector . should_equal [1, 6, 7, 8]
alpha.drop (Index_Sub_Range.By_Index [0, 1, 100.up_to 200]) . should_fail_with Index_Out_Of_Bounds
alpha.drop (Index_Sub_Range.By_Index 100) . should_fail_with Index_Out_Of_Bounds
data.beta.take (Index_Sub_Range.By_Index 0) . to_vector . should_equal ["A"]
data.empty_beta.take (Index_Sub_Range.By_Index 0) . should_fail_with Index_Out_Of_Bounds
data.beta.take (Index_Sub_Range.By_Index []) . should_equal data.empty_beta
data.beta.take (Index_Sub_Range.By_Index [-1, -1]) . to_vector . should_equal ["H", "H"]
data.alpha.take (Index_Sub_Range.By_Index [0, 0, 3.up_to 100]) . to_vector . should_equal [1, 1, 4, 5, 6, 7, 8]
data.alpha.take (0.up_to 100 . with_step 2) . to_vector . should_equal [1, 3, 5, 7]
data.alpha.take (Index_Sub_Range.By_Index [0.up_to 100 . with_step 2, 1.up_to 6 . with_step 2]) . to_vector . should_equal [1, 3, 5, 7, 2, 4, 6]
data.alpha.take (Index_Sub_Range.By_Index [1.up_to 3, 2.up_to 5]) . to_vector . should_equal [2, 3, 3, 4, 5]
data.alpha.take (Index_Sub_Range.By_Index [2.up_to 5, 1.up_to 3]) . to_vector . should_equal [3, 4, 5, 2, 3]
data.alpha.take (Index_Sub_Range.By_Index [0, 1, 100.up_to 200]) . should_fail_with Index_Out_Of_Bounds
data.alpha.take (Index_Sub_Range.By_Index 100) . should_fail_with Index_Out_Of_Bounds
Test.specify "should allow selecting every Nth row" <|
alpha.take (Every 1) . should_equal alpha
alpha.take (Every 3) . to_vector . should_equal [1, 4, 7]
alpha.take (Every 3 first=1) . to_vector . should_equal [2, 5, 8]
beta.take (Every 2 first=1) . to_vector . should_equal ["B", "D", "F", "H"]
alpha.take (Every 2 first=100) . to_vector . should_equal []
alpha.take (Every 200) . to_vector . should_equal [1]
empty_beta.take (Every 2) . should_equal empty_beta
beta.take (Every 0) . should_fail_with Illegal_Argument
empty_beta.take (Every 0) . should_fail_with Illegal_Argument
data.alpha.drop (Index_Sub_Range.By_Index 0) . to_vector . should_equal [2, 3, 4, 5, 6, 7, 8]
data.alpha.drop (Index_Sub_Range.By_Index []) . should_equal data.alpha
data.alpha.drop (Index_Sub_Range.By_Index [-1, -1]) . to_vector . should_equal [1, 2, 3, 4, 5, 6, 7]
data.alpha.drop (Index_Sub_Range.By_Index [0, 0, 3.up_to 100]) . to_vector . should_equal [2, 3]
data.alpha.drop (0.up_to 100 . with_step 2) . to_vector . should_equal [2, 4, 6, 8]
data.alpha.drop (Index_Sub_Range.By_Index [0.up_to 100 . with_step 2, 1.up_to 6 . with_step 2]) . to_vector . should_equal [8]
data.alpha.drop (Index_Sub_Range.By_Index [1.up_to 3, 2.up_to 5]) . to_vector . should_equal [1, 6, 7, 8]
data.alpha.drop (Index_Sub_Range.By_Index [2.up_to 5, 1.up_to 3]) . to_vector . should_equal [1, 6, 7, 8]
data.alpha.drop (Index_Sub_Range.By_Index [0, 1, 100.up_to 200]) . should_fail_with Index_Out_Of_Bounds
data.alpha.drop (Index_Sub_Range.By_Index 100) . should_fail_with Index_Out_Of_Bounds
alpha.drop (Every 1) . should_equal empty_alpha
alpha.drop (Every 3) . to_vector . should_equal [2, 3, 5, 6, 8]
alpha.drop (Every 3 first=1) . to_vector . should_equal [1, 3, 4, 6, 7]
alpha.drop (Every 2 first=1) . to_vector . should_equal [1, 3, 5, 7]
alpha.drop (Every 2 first=100) . should_equal alpha
beta.drop (Every 200) . to_vector . should_equal ["B", "C", "D", "E", "F", "G", "H"]
empty_beta.drop (Every 2) . should_equal empty_beta
beta.drop (Every 0) . should_fail_with Illegal_Argument
empty_beta.drop (Every 0) . should_fail_with Illegal_Argument
group_builder.specify "should allow selecting every Nth row" <|
data.alpha.take (Every 1) . should_equal data.alpha
data.alpha.take (Every 3) . to_vector . should_equal [1, 4, 7]
data.alpha.take (Every 3 first=1) . to_vector . should_equal [2, 5, 8]
data.beta.take (Every 2 first=1) . to_vector . should_equal ["B", "D", "F", "H"]
data.alpha.take (Every 2 first=100) . to_vector . should_equal []
data.alpha.take (Every 200) . to_vector . should_equal [1]
data.empty_beta.take (Every 2) . should_equal data.empty_beta
data.beta.take (Every 0) . should_fail_with Illegal_Argument
data.empty_beta.take (Every 0) . should_fail_with Illegal_Argument
data.alpha.drop (Every 1) . should_equal data.empty_alpha
data.alpha.drop (Every 3) . to_vector . should_equal [2, 3, 5, 6, 8]
data.alpha.drop (Every 3 first=1) . to_vector . should_equal [1, 3, 4, 6, 7]
data.alpha.drop (Every 2 first=1) . to_vector . should_equal [1, 3, 5, 7]
data.alpha.drop (Every 2 first=100) . should_equal data.alpha
data.beta.drop (Every 200) . to_vector . should_equal ["B", "C", "D", "E", "F", "G", "H"]
data.empty_beta.drop (Every 2) . should_equal data.empty_beta
data.beta.drop (Every 0) . should_fail_with Illegal_Argument
data.empty_beta.drop (Every 0) . should_fail_with Illegal_Argument
if setup.is_database.not then
Test.specify "should allow sampling rows" <|
group_builder.specify "should allow sampling rows" <|
three = table_builder [["X", ["a", "a", "a"]]] . at "X"
two = table_builder [["X", ["a", "a"]]] . at "X"
one_table = table_builder [["X", ["a"]]]
@ -361,13 +410,13 @@ spec setup =
three.drop (Sample 1) . should_equal two
three.drop (Sample 100) . should_equal empty
rnd = alpha.take (Sample 3 seed=42)
rnd = data.alpha.take (Sample 3 seed=42)
random_indices = [5, 6, 2]
sample = alpha.take (Index_Sub_Range.By_Index random_indices)
sample = data.alpha.take (Index_Sub_Range.By_Index random_indices)
rnd.should_equal sample
if setup.is_database.not then
Test.specify "should allow selecting rows as long as they satisfy a predicate" <|
group_builder.specify "should allow selecting rows as long as they satisfy a predicate" <|
col = table_builder [["X", [1, 3, 5, 6, 8, 9, 10, 11, 13]]] . at "X"
col.take (While (x-> x%2 == 1)) . to_vector . should_equal [1, 3, 5]
col.drop (While (x-> x%2 == 1)) . to_vector . should_equal [6, 8, 9, 10, 11, 13]
@ -381,7 +430,7 @@ spec setup =
three.drop (While (_ > 10)) . should_equal three
three.drop (While (_ < 10)) . should_equal empty
Test.specify "should gracefully handle missing constructor arguments" <|
group_builder.specify "should gracefully handle missing constructor arguments" <|
c = table_builder [["X", [1, 2, 3]]] . at "X"
c.take "FOO" . should_fail_with Type_Error
c.drop "FOO" . should_fail_with Type_Error
@ -398,7 +447,7 @@ spec setup =
r3.should_fail_with Illegal_Argument
r3.catch.to_display_text . should_contain "Got a Function instead of a range, is a constructor argument missing?"
Test.specify "unordered table" <|
group_builder.specify "unordered table" <|
unordered_table =
col1 = ["alpha", [1,2,3,4,5,6,7,8]]
col2 = ["beta", ["A","B","C","D","E","F","G","H"]]
@ -407,4 +456,3 @@ spec setup =
True -> unordered_table.at "alpha" . take . should_fail_with Illegal_Argument
False -> unordered_table.at "alpha" . take . to_vector . should_equal [1]
main = run_default_backend spec

View File

@ -1,21 +1,40 @@
from Standard.Base import all
from Standard.Test import Test, Problems
import Standard.Test.Extensions
from Standard.Test_New import all
from project.Common_Table_Operations.Util import run_default_backend
main = run_default_backend spec
main = run_default_backend add_specs
spec setup =
type Data
Value ~connection
setup create_connection_fn = Data.Value <|
connection = create_connection_fn Nothing
connection
teardown self =
self.connection.close
add_specs suite_builder setup =
prefix = setup.prefix
table_builder = setup.table_builder
create_connection_fn = setup.create_connection_func
Test.group prefix+"Temp column" <|
Test.specify "Can generate a temp column" <|
suite_builder.group prefix+"Temp column" group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
table_builder cols =
setup.table_builder cols connection=data.connection
group_builder.specify "Can generate a temp column" <|
t1 = table_builder [["X", ['a', 'b']], ["Y", ['c', 'd']], ["Z", [40, 20]]]
t1.make_temp_column_name . should_equal "temp"
Test.specify "Can generate a temp column without name conflicts" <|
group_builder.specify "Can generate a temp column without name conflicts" <|
t1 = table_builder [["X", ['a', 'b']], ["Y", ['c', 'd']], ["temp", [40, 20]]]
t1.make_temp_column_name . should_equal "temp 1"

View File

@ -2,19 +2,36 @@ from Standard.Base import all
from Standard.Table.Errors import all
from Standard.Test import Test, Problems
import Standard.Test.Extensions
from Standard.Test_New import all
from project.Common_Table_Operations.Util import run_default_backend
main = run_default_backend spec
spec setup =
type Data
Value ~connection
setup create_connection_fn = Data.Value <|
create_connection_fn Nothing
teardown self =
self.connection.close
add_specs suite_builder setup =
prefix = setup.prefix
table_builder = setup.table_builder
create_connection_fn = setup.create_connection_func
db_todo = if setup.is_database.not then Nothing else "Table.transpose is not implemented yet in Database."
Test.group prefix+"Table.transpose" pending=db_todo <|
Test.specify "should transpose all columns by default" <|
suite_builder.group prefix+"Table.transpose" pending=db_todo group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
table_builder cols =
setup.table_builder cols connection=data.connection
group_builder.specify "should transpose all columns by default" <|
t = table_builder [["Key", ["x", "y", "z"]], ["Value", [1, 2, 3]], ["Another", [10, Nothing, 20]], ["Yet Another", [Nothing, "Hello", "World"]]]
t1 = t.transpose
t1.row_count . should_equal 12
@ -22,7 +39,7 @@ spec setup =
t1.at "Name" . to_vector . should_equal ["Key", "Value", "Another", "Yet Another", "Key", "Value", "Another", "Yet Another", "Key", "Value", "Another", "Yet Another"]
t1.at "Value" . to_vector . should_equal ["x", 1, 10, Nothing, "y", 2, Nothing, "Hello", "z", 3, 20, "World"]
Test.specify "should allow custom names" <|
group_builder.specify "should allow custom names" <|
t = table_builder [["Key", ["x", "y", "z"]], ["Value", [1, 2, 3]], ["Another", [10, Nothing, 20]], ["Yet Another", [Nothing, "Hello", "World"]]]
t1 = t.transpose attribute_column_name="Key" value_column_name="Object"
t1.row_count . should_equal 12
@ -30,7 +47,7 @@ spec setup =
t1.at "Key" . to_vector . should_equal ["Key", "Value", "Another", "Yet Another", "Key", "Value", "Another", "Yet Another", "Key", "Value", "Another", "Yet Another"]
t1.at "Object" . to_vector . should_equal ["x", 1, 10, Nothing, "y", 2, Nothing, "Hello", "z", 3, 20, "World"]
Test.specify "should allow id fields" <|
group_builder.specify "should allow id fields" <|
t = table_builder [["Key", ["x", "y", "z"]], ["Value", [1, 2, 3]], ["Another", [10, Nothing, 20]], ["Yet Another", [Nothing, "Hello", "World"]]]
t1 = t.transpose ["Key"]
t1.row_count . should_equal 9
@ -39,7 +56,7 @@ spec setup =
t1.at "Name" . to_vector . should_equal ["Value", "Another", "Yet Another", "Value", "Another", "Yet Another", "Value", "Another", "Yet Another"]
t1.at "Value" . to_vector . should_equal [1, 10, Nothing, 2, Nothing, "Hello", 3, 20, "World"]
Test.specify "should allow single id field" <|
group_builder.specify "should allow single id field" <|
t = table_builder [["Key", ["x", "y", "z"]], ["Value", [1, 2, 3]], ["Another", [10, Nothing, 20]], ["Yet Another", [Nothing, "Hello", "World"]]]
t1 = t.transpose "Key"
t1.row_count . should_equal 9
@ -48,7 +65,7 @@ spec setup =
t1.at "Name" . to_vector . should_equal ["Value", "Another", "Yet Another", "Value", "Another", "Yet Another", "Value", "Another", "Yet Another"]
t1.at "Value" . to_vector . should_equal [1, 10, Nothing, 2, Nothing, "Hello", 3, 20, "World"]
Test.specify "should allow fields selected by index" <|
group_builder.specify "should allow fields selected by index" <|
t = table_builder [["Key", ["x", "y", "z"]], ["Value", [1, 2, 3]], ["Another", [10, Nothing, 20]], ["Yet Another", [Nothing, "Hello", "World"]]]
t1 = t.transpose [0, -1]
t1.column_names . should_equal ["Key", "Yet Another", "Name", "Value"]
@ -58,7 +75,7 @@ spec setup =
t1.at "Name" . to_vector . should_equal ["Value", "Another", "Value", "Another", "Value", "Another"]
t1.at "Value" . to_vector . should_equal [1, 10, 2, Nothing, 3, 20]
Test.specify "should allow all current columns to become id fields, without warning" <|
group_builder.specify "should allow all current columns to become id fields, without warning" <|
t = table_builder [["Key", ["x", "y", "z"]], ["Foo", [1, 2, 3]], ["Another", [10, Nothing, 20]], ["Yet Another", [Nothing, "Hello", "World"]]]
t1 = t.transpose t.column_names
t1.row_count . should_equal 3
@ -70,7 +87,7 @@ spec setup =
Problems.assume_no_problems t1
# ToDo: Verify the warnings and error handling within transpose.
Test.specify "should handle missing columns" <|
group_builder.specify "should handle missing columns" <|
t1 = table_builder [["Key", ["x", "y", "z"]], ["Value", [1, 2, 3]], ["Another", [10, Nothing, 20]]]
err1 = t1.transpose ["Key", "Missing", "Missing 2"]
@ -93,7 +110,7 @@ spec setup =
problems2 = [Missing_Input_Columns.Error [42, -100]]
Problems.test_problem_handling action2 problems2 tester2
Test.specify "should warn on column name clashes" <|
group_builder.specify "should warn on column name clashes" <|
t1 = table_builder [["X", ["x", "y", "z"]], ["Y", [1, 2, 3]], ["Z", [10, Nothing, 20]]]
action1 = t1.transpose ["X", "Y", "Z"] attribute_column_name="Y" value_column_name="Z" on_problems=_

View File

@ -1,18 +1,56 @@
from Standard.Base import all
from Standard.Table import Table
from Standard.Test import Test, Test_Suite
import Standard.Test.Extensions
from Standard.Test_New import all
import project.In_Memory.Common_Spec as In_Memory_Table_Spec
import project.Common_Table_Operations.Main.Test_Setup
import project.Common_Table_Operations.Main.Test_Selection
import project.Common_Table_Operations.Aggregate_Spec
expect_column_names names table =
table.columns . map .name . should_equal names frames_to_skip=2
type Dummy_Connection
Value
close self = Nothing
drop_table self table =
_ = table
Nothing
## These tests are parametrized by various backends and so they should be run in
context of a specific backend. However, for the purpose of testing we provide
a shortcut that allows to run these tests with the in-memory backend.
run_default_backend spec =
Test_Suite.run_main (In_Memory_Table_Spec.run_common_spec spec)
Arguments:
- add_specs: A function that takes two parameters: a suite builder and Test_Setup.
And adds test specs to the suite builder.
run_default_backend add_specs =
selection = Test_Selection.Config supports_case_sensitive_columns=True order_by=True natural_ordering=True case_insensitive_ordering=True order_by_unicode_normalization_by_default=True supports_unicode_normalization=True supports_time_duration=True supports_nanoseconds_in_time=True supports_mixed_columns=True fixed_length_text_columns=True supports_8bit_integer=True
aggregate_selection = Aggregate_Spec.Test_Selection.Config
table_fn _ = (enso_project.data / "data.csv") . read
empty_table_fn _ =
table = table_fn Nothing
table.take 0
materialize = x->x
table_builder cols connection=Nothing =
_ = connection
Table.new cols
create_connection_func _ =
Dummy_Connection.Value
setup = Test_Setup.Config "[In-Memory] " table_fn empty_table_fn table_builder materialize is_database=False test_selection=selection aggregate_test_selection=aggregate_selection create_connection_func
suite = Test.build suite_builder->
add_specs suite_builder setup
suite.run_with_filter
## Adds a clue which will display the provided table next to the failed test
description.

View File

@ -10,22 +10,40 @@ import Standard.Database.Data.Dialect
import Standard.Database.Data.SQL_Type.SQL_Type
from Standard.Database.Errors import Unsupported_Database_Operation
from Standard.Test import Test, Test_Suite, Problems
import Standard.Test.Extensions
from Standard.Test_New import all
polyglot java import java.sql.Types as Java_SQL_Types
spec =
test_connection =
type Data
Value ~data
connection self = self.data.at 0
t1 self = self.data.at 1
setup = Data.Value <|
c = Database.connect (SQLite In_Memory)
c.create_table "T1" [Column_Description.Value "A" Value_Type.Integer, Column_Description.Value "B" Value_Type.Char, Column_Description.Value "C" Value_Type.Boolean]
c.create_table "T2" [Column_Description.Value "D" Value_Type.Integer, Column_Description.Value "E" Value_Type.Integer, Column_Description.Value "F" Value_Type.Boolean]
c.create_table "T3" [Column_Description.Value "A" Value_Type.Integer, Column_Description.Value "E" Value_Type.Boolean, Column_Description.Value "F" Value_Type.Integer]
c
t1 = test_connection.query (SQL_Query.Table_Name "T1")
Test.group "[Codegen] JSON serialization" <|
Test.specify "should serialize Tables and Columns to their SQL representation" pending="ToDo: decide on how we handle ==, see https://github.com/enso-org/enso/issues/5241" <|
q1 = t1.filter (t1.at "A" == 42) . to_json
t1 = c.query (SQL_Query.Table_Name "T1")
[c, t1]
teardown self =
self.connection.close
add_specs suite_builder =
suite_builder.group "[Codegen] JSON serialization" group_builder->
data = Data.setup
group_builder.teardown <|
data.teardown
group_builder.specify "should serialize Tables and Columns to their SQL representation" pending="ToDo: decide on how we handle ==, see https://github.com/enso-org/enso/issues/5241" <|
q1 = data.t1.filter (data.t1.at "A" == 42) . to_json
part1 = JS_Object.from_pairs [["sql_code", 'SELECT "T1"."A" AS "A", "T1"."B" AS "B", "T1"."C" AS "C" FROM "T1" AS "T1" WHERE ("T1"."A" = ']]
part2_sub = JS_Object.from_pairs [["value", 42]]
@ -34,86 +52,115 @@ spec =
expected = JS_Object.from_pairs [["query", [part1, part2, part3]]] . to_text
q1.should_equal expected
q2 = t1.at "A" . to_json
q2 = data.t1.at "A" . to_json
expected_2 = JS_Object.from_pairs [["query", [JS_Object.from_pairs [["sql_code", 'SELECT "T1"."A" AS "A" FROM "T1" AS "T1"']]]]] . to_text
q2.should_equal expected_2
Test.group "[Codegen] Basic Select" <|
Test.specify "should select columns from a table" <|
t1.to_sql.prepare . should_equal ['SELECT "T1"."A" AS "A", "T1"."B" AS "B", "T1"."C" AS "C" FROM "T1" AS "T1"', []]
t2 = t1.select_columns ["C", "B", "undefined"] reorder=True error_on_missing_columns=False
suite_builder.group "[Codegen] Basic Select" group_builder->
data = Data.setup
group_builder.teardown <|
data.teardown
group_builder.specify "should select columns from a table" <|
data.t1.to_sql.prepare . should_equal ['SELECT "T1"."A" AS "A", "T1"."B" AS "B", "T1"."C" AS "C" FROM "T1" AS "T1"', []]
t2 = data.t1.select_columns ["C", "B", "undefined"] reorder=True error_on_missing_columns=False
t2.to_sql.prepare . should_equal ['SELECT "T1"."C" AS "C", "T1"."B" AS "B" FROM "T1" AS "T1"', []]
foo = t1.at "A" . rename "FOO"
foo = data.t1.at "A" . rename "FOO"
foo.to_sql.prepare . should_equal ['SELECT "T1"."A" AS "FOO" FROM "T1" AS "T1"', []]
t3 = t2.set foo new_name="bar"
t3.to_sql.prepare . should_equal ['SELECT "T1"."C" AS "C", "T1"."B" AS "B", "T1"."A" AS "bar" FROM "T1" AS "T1"', []]
Test.specify "should fail if at is called for a non-existent column" <|
t1.at "undefined" . should_fail_with No_Such_Column
group_builder.specify "should fail if at is called for a non-existent column" <|
data.t1.at "undefined" . should_fail_with No_Such_Column
Test.specify "should allow to limit the amount of returned results" <|
t2 = t1.limit 5
group_builder.specify "should allow to limit the amount of returned results" <|
t2 = data.t1.limit 5
t2.to_sql.prepare . should_equal ['SELECT "T1"."A" AS "A", "T1"."B" AS "B", "T1"."C" AS "C" FROM "T1" AS "T1" LIMIT 5', []]
Test.group "[Codegen] Masking Tables and Columns" <|
Test.specify "should generate a single BETWEEN expression" <|
t2 = t1.filter "A" (Filter_Condition.Between 10 20)
suite_builder.group "[Codegen] Masking Tables and Columns" group_builder->
data = Data.setup
group_builder.teardown <|
data.teardown
group_builder.specify "should generate a single BETWEEN expression" <|
t2 = data.t1.filter "A" (Filter_Condition.Between 10 20)
t2.to_sql.prepare . should_equal ['SELECT "T1"."A" AS "A", "T1"."B" AS "B", "T1"."C" AS "C" FROM "T1" AS "T1" WHERE ("T1"."A" BETWEEN ? AND ?)', [10, 20]]
Test.specify "should generate an IN expression" <|
t2 = t1.filter "A" (Filter_Condition.Is_In [1, 2, 'foo'])
group_builder.specify "should generate an IN expression" <|
t2 = data.t1.filter "A" (Filter_Condition.Is_In [1, 2, 'foo'])
t2.to_sql.prepare . should_equal ['SELECT "T1"."A" AS "A", "T1"."B" AS "B", "T1"."C" AS "C" FROM "T1" AS "T1" WHERE "T1"."A" IN (?, ?, ?)', [1, 2, "foo"]]
t3 = t1.filter "A" (Filter_Condition.Is_In [1])
t3 = data.t1.filter "A" (Filter_Condition.Is_In [1])
t3.to_sql.prepare . should_equal ['SELECT "T1"."A" AS "A", "T1"."B" AS "B", "T1"."C" AS "C" FROM "T1" AS "T1" WHERE "T1"."A" IN (?)', [1]]
t4 = t1.filter "A" (Filter_Condition.Is_In [])
t4 = data.t1.filter "A" (Filter_Condition.Is_In [])
t4.to_sql.prepare . should_equal ['SELECT "T1"."A" AS "A", "T1"."B" AS "B", "T1"."C" AS "C" FROM "T1" AS "T1" WHERE (FALSE)', []]
t5 = t1.filter "A" (Filter_Condition.Is_In [Nothing])
t5 = data.t1.filter "A" (Filter_Condition.Is_In [Nothing])
t5.to_sql.prepare . should_equal ['SELECT "T1"."A" AS "A", "T1"."B" AS "B", "T1"."C" AS "C" FROM "T1" AS "T1" WHERE "T1"."A" IN (?)', [Nothing]]
Test.group "[Codegen] Handling Missing Values" <|
Test.specify "fill_nothing should allow to replace missing values in a column with a constant" <|
c = t1.at "B" . fill_nothing "not-applicable"
suite_builder.group "[Codegen] Handling Missing Values" group_builder->
data = Data.setup
group_builder.teardown <|
data.teardown
group_builder.specify "fill_nothing should allow to replace missing values in a column with a constant" <|
c = data.t1.at "B" . fill_nothing "not-applicable"
c.to_sql.prepare . should_equal ['SELECT CAST(COALESCE("T1"."B", ?) AS TEXT) AS "B" FROM "T1" AS "T1"', ["not-applicable"]]
Test.specify "filter_blank_rows should drop rows that contain at least one missing column in a Table" <|
t2 = t1.filter_blank_rows when=Blank_Selector.Any_Cell
group_builder.specify "filter_blank_rows should drop rows that contain at least one missing column in a Table" <|
t2 = data.t1.filter_blank_rows when=Blank_Selector.Any_Cell
t2.to_sql.prepare . should_equal ['SELECT "T1"."A" AS "A", "T1"."B" AS "B", "T1"."C" AS "C" FROM "T1" AS "T1" WHERE (NOT ((("T1"."A" IS NULL) OR (("T1"."B" IS NULL) OR ("T1"."B" = \'\'))) OR ("T1"."C" IS NULL)))', []]
t3 = t1.filter_blank_rows when=Blank_Selector.All_Cells
t3 = data.t1.filter_blank_rows when=Blank_Selector.All_Cells
t3.to_sql.prepare . should_equal ['SELECT "T1"."A" AS "A", "T1"."B" AS "B", "T1"."C" AS "C" FROM "T1" AS "T1" WHERE (NOT ((("T1"."A" IS NULL) AND (("T1"."B" IS NULL) OR ("T1"."B" = \'\'))) AND ("T1"."C" IS NULL)))', []]
Test.group "[Codegen] Sorting" <|
Test.specify "should allow sorting by a single column name" <|
r1 = t1.order_by ([Sort_Column.Name "A"]) . at "B"
suite_builder.group "[Codegen] Sorting" group_builder->
data = Data.setup
group_builder.teardown <|
data.teardown
group_builder.specify "should allow sorting by a single column name" <|
r1 = data.t1.order_by ([Sort_Column.Name "A"]) . at "B"
r1.to_sql.prepare . should_equal ['SELECT "T1"."B" AS "B" FROM "T1" AS "T1" ORDER BY "T1"."A" ASC', []]
r2 = t1.order_by ([Sort_Column.Name "B" Sort_Direction.Descending]) . at "A"
r2 = data.t1.order_by ([Sort_Column.Name "B" Sort_Direction.Descending]) . at "A"
r2.to_sql.prepare . should_equal ['SELECT "T1"."A" AS "A" FROM "T1" AS "T1" ORDER BY "T1"."B" DESC', []]
Test.specify 'should allow sorting by multiple column names' <|
r1 = t1.order_by ([Sort_Column.Name 'A', Sort_Column.Name 'B'])
group_builder.specify 'should allow sorting by multiple column names' <|
r1 = data.t1.order_by ([Sort_Column.Name 'A', Sort_Column.Name 'B'])
r1.to_sql.prepare . should_equal ['SELECT "T1"."A" AS "A", "T1"."B" AS "B", "T1"."C" AS "C" FROM "T1" AS "T1" ORDER BY "T1"."A" ASC, "T1"."B" ASC', []]
Test.specify 'should allow sorting with specific by-column rules' <|
r1 = t1.order_by ([Sort_Column.Name "A", Sort_Column.Name "B" Sort_Direction.Descending])
group_builder.specify 'should allow sorting with specific by-column rules' <|
r1 = data.t1.order_by ([Sort_Column.Name "A", Sort_Column.Name "B" Sort_Direction.Descending])
r1.to_sql.prepare . should_equal ['SELECT "T1"."A" AS "A", "T1"."B" AS "B", "T1"."C" AS "C" FROM "T1" AS "T1" ORDER BY "T1"."A" ASC, "T1"."B" DESC', []]
Test.specify 'should return warnings and errors when passed a non-existent column' <|
t2 = t1.order_by ([Sort_Column.Name 'foobar'])
group_builder.specify 'should return warnings and errors when passed a non-existent column' <|
t2 = data.t1.order_by ([Sort_Column.Name 'foobar'])
t2.should_fail_with Missing_Input_Columns
Test.group "[Codegen] Aggregation" <|
Test.specify "should allow to count rows" <|
code = t1.aggregate [Group_By "A" "A grp", Count "counter"] . to_sql . prepare
suite_builder.group "[Codegen] Aggregation" group_builder->
data = Data.setup
group_builder.teardown <|
data.teardown
group_builder.specify "should allow to count rows" <|
code = data.t1.aggregate [Group_By "A" "A grp", Count "counter"] . to_sql . prepare
code . should_equal ['SELECT "T1"."A grp" AS "A grp", "T1"."counter" AS "counter" FROM (SELECT "T1"."A" AS "A grp", COUNT(*) AS "counter" FROM "T1" AS "T1" GROUP BY "T1"."A") AS "T1"', []]
Test.specify "should allow to group by multiple fields" <|
code = t1.aggregate [Sum "A" "sum_a", Group_By "C", Group_By "B" "B grp"] . to_sql . prepare
group_builder.specify "should allow to group by multiple fields" <|
code = data.t1.aggregate [Sum "A" "sum_a", Group_By "C", Group_By "B" "B grp"] . to_sql . prepare
code . should_equal ['SELECT "T1"."sum_a" AS "sum_a", "T1"."C" AS "C", "T1"."B grp" AS "B grp" FROM (SELECT SUM("T1"."A") AS "sum_a", "T1"."C" AS "C", "T1"."B" AS "B grp" FROM "T1" AS "T1" GROUP BY "T1"."C", "T1"."B") AS "T1"', []]
main = Test_Suite.run_main spec
main =
suite = Test.build suite_builder->
add_specs suite_builder
suite.run_with_filter

View File

@ -8,8 +8,8 @@ from Standard.Table.Errors import all
from Standard.Database import all
from Standard.Database.Errors import all
from Standard.Test import Test, Test_Suite, Problems
import Standard.Test.Extensions
from Standard.Test_New import all
import Standard.Test_New.Suite.Suite_Builder
import project.Database.Common.Default_Ordering_Spec
import project.Database.Common.Names_Length_Limits_Spec
@ -17,84 +17,33 @@ import project.Database.Common.Names_Length_Limits_Spec
import project.Util
import project.Database.Helpers.Name_Generator
spec prefix connection =
tables_to_clean = Vector.new_builder
upload prefix data temporary=True =
upload connection prefix data temporary=True =
name = Name_Generator.random_name prefix
table = data.select_into_database_table connection name temporary=temporary primary_key=Nothing
tables_to_clean.append table.name
table
clean_tables =
tables_to_clean.to_vector.each name->
drop_table connection name =
Panic.catch Any (connection.drop_table name) caught_panic->
IO.println "Failed to drop table: " + name + " because of: " + caught_panic.payload.to_display_text
Panic.with_finalizer clean_tables <|
run_tests prefix connection upload
IO.println <| "Failed to drop table: " + name + " because of: " + caught_panic.payload.to_display_text
run_tests prefix connection upload =
t1 = upload "T1" (Table.new [["a", [1, 4]], ["b", [2, 5]], ["c", [3, 6]]])
Test.group prefix+"Basic Table Access" <|
Test.specify "should allow to materialize tables and columns into local memory" <|
df = t1.read
a = t1.at 'a' . read
df.at 'a' . to_vector . should_equal [1, 4]
a.to_vector . should_equal [1, 4]
Test.specify "should allow to materialize columns directly into a Vector" <|
v = t1.at 'a' . to_vector
v . should_equal [1, 4]
Test.specify "should handle bigger result sets" <|
n = 1000
original = Table.new [["a", Vector.new n ix->ix], ["b", Vector.new n ix-> ix * 3.1415926], ["c", Vector.new n ix-> ix.to_text]]
table = upload "Big" original
table.read.row_count . should_equal n
Test.specify "should not allow to set a column coming from another table" <|
t2 = upload "T2" (Table.new [["d", [100, 200]]])
t1.set (t2.at "d") . should_fail_with Integrity_Error
Test.group prefix+"Connection.query" <|
name = t1.name
Test.specify "should allow to access a Table by name" <|
t2 = connection.query (SQL_Query.Table_Name name)
t2.read . should_equal t1.read
type Basic_Data
Value ~data
Test.specify "should allow to access a Table by an SQL query" <|
t2 = connection.query (SQL_Query.Raw_SQL ('SELECT a, b FROM "' + name + '" WHERE a >= 3'))
m2 = t2.read
m2.column_names . should_equal ["a", "b"]
m2.at "a" . to_vector . should_equal [4]
m2.at "b" . to_vector . should_equal [5]
m2.at "c" . should_fail_with No_Such_Column
Test.specify "should allow to access a Table by an SQL query" <|
t2 = connection.query (SQL_Query.Raw_SQL ('SELECT a, b FROM "' + name + '" WHERE a >= 3'))
m2 = t2.read
m2.column_names . should_equal ["a", "b"]
m2.at "a" . to_vector . should_equal [4]
m2.at "b" . to_vector . should_equal [5]
m2.at "c" . should_fail_with No_Such_Column
t3 = connection.query (SQL_Query.Raw_SQL ('SELECT 1+2'))
m3 = t3.read
m3.at 0 . to_vector . should_equal [3]
Test.specify "should use labels for column names" <|
t2 = connection.query (SQL_Query.Raw_SQL ('SELECT a AS c, b FROM "' + name + '" WHERE a >= 3'))
m2 = t2.read
m2.column_names . should_equal ["c", "b"]
m2.at "c" . to_vector . should_equal [4]
m2.at "b" . to_vector . should_equal [5]
m2.at "a" . should_fail_with No_Such_Column
Test.specify "should allow a shorthand trying to deduce if the query is a table name or an SQL query" <|
t2 = connection.query name
t2.read . should_equal t1.read
t3 = connection.query ('SELECT a, b FROM "' + name + '" WHERE a >= 3')
m3 = t3.read
m3.column_names . should_equal ["a", "b"]
m3.at "a" . to_vector . should_equal [4]
connection self = self.data.at 0
t1 self = self.data.at 1
t2 self = self.data.at 2
t4 self = self.data.at 3
big_table self = self.data.at 4
big_size self = self.data.at 5
setup create_connection_fn = Basic_Data.Value <|
big_size = 1000
connection = create_connection_fn Nothing
t1 = upload connection "T1" (Table.new [["a", [1, 4]], ["b", [2, 5]], ["c", [3, 6]]])
t2 = upload connection "T2" (Table.new [["d", [100, 200]]])
## The effective name may get a deduplication prefix/suffix so we
need to use `t4.name` instead of the literal string. Still it
will contain the weird characters we wanted.
@ -102,166 +51,56 @@ run_tests prefix connection upload =
Also, the table name cannot be too long as Postgres truncates at
63 chars (and we append 37 chars of uniqueness suffix) and the
test logic will break then.
t4 = upload 'aSELECT "A",\'B\' FROM t;--' (Table.new [["X", ["a", "B"]], ["Y", [2, 5]]])
t5 = connection.query t4.name
m5 = t5.read
m5.column_names . should_equal ["X", "Y"]
m5.at "X" . to_vector . should_equal ["a", "B"]
m5.at "Y" . to_vector . should_equal [2, 5]
t4 = upload connection 'aSELECT "A",\'B\' FROM t;--' (Table.new [["X", ["a", "B"]], ["Y", [2, 5]]])
big = Table.new [["a", Vector.new big_size ix->ix], ["b", Vector.new big_size ix-> ix * 3.1415926], ["c", Vector.new big_size ix-> ix.to_text]]
big_table = upload connection "Big" big
[connection, t1, t2, t4, big_table, big_size]
Test.specify "should report an error depending on input SQL_Query type" <|
r2 = connection.query (SQL_Query.Table_Name "NONEXISTENT-TABLE")
r2.should_fail_with Table_Not_Found
r2.catch.name . should_equal "NONEXISTENT-TABLE"
r2.catch.to_display_text . should_equal "Table NONEXISTENT-TABLE was not found in the database."
teardown self =
drop_table self.connection self.t1.name
drop_table self.connection self.t2.name
drop_table self.connection self.t4.name
drop_table self.connection self.big_table.name
self.connection.close
r3 = connection.query (SQL_Query.Raw_SQL "MALFORMED-QUERY")
r3.should_fail_with SQL_Error
Test.specify "should not allow interpolations in raw user-built queries" <|
r = connection.query (SQL_Query.Raw_SQL "SELECT 1 + ?")
r.should_fail_with Illegal_Argument
type Sorting_Data
Value ~data
Test.specify "should make a best-effort attempt at returning a reasonable error for the short-hand" <|
r2 = connection.query "NONEXISTENT-TABLE"
r2.should_fail_with Table_Not_Found
r2.catch.name . should_equal "NONEXISTENT-TABLE"
r2.catch.treated_as_query . should_be_true
error_text = r2.catch.to_display_text
Test.with_clue "r2.catch.to_display_text = "+error_text <|
error_text.starts_with "The name NONEXISTENT-TABLE was treated as a query, but the query failed" . should_be_true
error_text.ends_with "wrap it in `SQL_Query.Table_Name`." . should_be_true
connection self = self.data.at 0
df self = self.data.at 1
ints self = self.data.at 2
reals self = self.data.at 3
bools self = self.data.at 4
texts self = self.data.at 5
t8 self = self.data.at 6
r3 = connection.query "SELECT * FROM ........"
r3.should_fail_with SQL_Error
Test.specify "will fail if the table is modified and a column gets removed" <|
name = Name_Generator.random_name "removing-column"
Problems.assume_no_problems <|
(Table.new [["a", [1, 2, 3]], ["b", [4, 5, 6]]]).select_into_database_table connection name temporary=True
t1 = connection.query name
m1 = t1.read
Problems.assume_no_problems m1
m1.at "a" . to_vector . should_equal [1, 2, 3]
m1.at "b" . to_vector . should_equal [4, 5, 6]
Problems.assume_no_problems <| connection.drop_table name
Problems.assume_no_problems <|
(Table.new [["a", [100, 200]]]).select_into_database_table connection name temporary=True
# Reading a column that was kept will work OK
t1.at "a" . to_vector . should_equal [100, 200]
# But reading the whole table will fail on the missing column:
m2 = t1.read
m2.should_fail_with SQL_Error
Test.specify "will not fail if the table is modified and a column gets added" <|
name = Name_Generator.random_name "adding-column"
Problems.assume_no_problems <|
(Table.new [["a", [1, 2, 3]], ["b", [4, 5, 6]]]).select_into_database_table connection name temporary=True
t1 = connection.query name
m1 = t1.read
Problems.assume_no_problems m1
m1.at "a" . to_vector . should_equal [1, 2, 3]
m1.at "b" . to_vector . should_equal [4, 5, 6]
Problems.assume_no_problems <| connection.drop_table name
Problems.assume_no_problems <|
(Table.new [["a", [100, 200]], ["b", [300, 400]], ["c", [500, 600]]]).select_into_database_table connection name temporary=True
m2 = t1.read
Problems.assume_no_problems m2
m2.column_names . should_equal ["a", "b"]
m2.at "a" . to_vector . should_equal [100, 200]
m2.at "b" . to_vector . should_equal [300, 400]
t1.at "c" . should_fail_with No_Such_Column
t2 = connection.query name
t2.column_names . should_equal ["a", "b", "c"]
Test.group prefix+"Masking Tables" <|
Test.specify "should allow to select rows from a table or column based on an expression" <|
t2 = t1.filter (t1.at "a" == 1)
df = t2.read
df.at "a" . to_vector . should_equal [1]
df.at "b" . to_vector . should_equal [2]
df.at "c" . to_vector . should_equal [3]
t2.at "a" . to_vector . should_equal [1]
t2.at "b" . to_vector . should_equal [2]
t2.at "c" . to_vector . should_equal [3]
Test.group prefix+"Missing Values" <|
t4 = upload "T4" <|
Table.new [["a", [0, 1, Nothing, 42, Nothing]], ["b", [True, Nothing, True, False, Nothing]], ["c", ["", "foo", "bar", Nothing, Nothing]]]
Test.specify "fill_nothing should replace nulls" <|
t4.at 'a' . fill_nothing 10 . to_vector . should_equal [0, 1, 10, 42, 10]
t4.at 'b' . fill_nothing False . to_vector . should_equal [True, False, True, False, False]
t4.at 'c' . fill_nothing "NA" . to_vector . should_equal ["", "foo", "bar", "NA", "NA"]
Test.specify "should correctly be counted" <|
t4.row_count . should_equal 5
col = t4.at 'a'
col.length . should_equal 5
col.count . should_equal 3
col.count_nothing . should_equal 2
Test.group prefix+"Sorting" <|
df = upload "clothes" <|
Table.new [["id", [1,2,3,4,5,6]], ["name", ["shoes","trousers","dress","skirt","blouse","t-shirt"]], ["quantity", [20,10,20,10,30,30]], ["rating", [3.0,Nothing,7.3,3.0,2.2,Nothing]], ["price", [37.2,42.1,64.1,87.4,13.5,64.2]]]
Test.specify "should allow sorting by a single column name" <|
r_1 = df.order_by ([Sort_Column.Name 'quantity'])
r_1.at 'id' . to_vector . should_equal [2,4,1,3,5,6]
r_3 = df.order_by ([Sort_Column.Name 'rating' Sort_Direction.Descending])
r_3.at 'id' . to_vector . should_equal [3,1,4,5,2,6]
Test.specify 'should allow sorting by multiple column names' <|
r_1 = df.order_by ([Sort_Column.Name 'quantity', Sort_Column.Name 'rating'])
r_1.at 'id' . to_vector . should_equal [2,4,1,3,6,5]
r_2 = df.order_by ([Sort_Column.Name 'rating' Sort_Direction.Descending, Sort_Column.Name 'quantity' Sort_Direction.Descending])
r_2.at 'id' . to_vector . should_equal [3,1,4,5,6,2]
Test.specify 'should allow sorting with specific by-column rules' <|
r_1 = df.order_by ([Sort_Column.Name "quantity", Sort_Column.Name "price" Sort_Direction.Descending])
r_1.at 'id' . to_vector . should_equal [4,2,3,1,6,5]
Test.specify 'should correctly reorder all kinds of columns and leave the original columns untouched' <|
setup create_connection_fn = Sorting_Data.Value <|
connection = create_connection_fn Nothing
ints = [1, 2, 3, 4, 5]
reals = [1.3, 4.6, 3.2, 5.2, 1.6]
bools = [False, False, True, True, False]
texts = ["foo", "foo", "bar", "baz", "spam"]
df = upload "T8" <|
df = upload connection "clothes" <|
Table.new [["id", [1,2,3,4,5,6]], ["name", ["shoes","trousers","dress","skirt","blouse","t-shirt"]], ["quantity", [20,10,20,10,30,30]], ["rating", [3.0,Nothing,7.3,3.0,2.2,Nothing]], ["price", [37.2,42.1,64.1,87.4,13.5,64.2]]]
t8 = upload connection "T8" <|
Table.new [["ord", [0,3,2,4,1]], ["ints", ints], ["reals", reals], ["bools", bools], ["texts", texts]]
r = df.order_by ([Sort_Column.Name 'ord'])
[connection, df, ints, reals, bools, texts, t8]
r.at 'ints' . to_vector . should_equal [1, 5, 3, 2, 4]
df.at 'ints' . to_vector . should_equal ints
teardown self =
drop_table self.connection self.df.name
drop_table self.connection self.t8.name
self.connection.close
r.at 'reals' . to_vector . should_equal [1.3, 1.6, 3.2, 4.6, 5.2]
df.at 'reals' . to_vector . should_equal reals
r.at 'bools' . to_vector . should_equal [False, False, True, False, True]
df.at 'bools' . to_vector . should_equal bools
type Aggregation_Data
Value ~data
r.at 'texts' . to_vector . should_equal ['foo', 'spam', 'bar', 'foo', 'baz']
df.at 'texts' . to_vector . should_equal texts
connection self = self.data.first
t9 self = self.data.second
Test.specify 'should sort columns with specified ordering and missing placement' <|
c = df.at 'rating'
r_1 = c.sort
r_1.to_vector.should_equal [Nothing, Nothing, 2.2, 3.0, 3.0, 7.3]
r_2 = c.sort Sort_Direction.Descending
r_2.to_vector.should_equal [7.3, 3.0, 3.0, 2.2, Nothing, Nothing]
Test.group prefix+"Aggregation" <|
setup create_connection_fn = Aggregation_Data.Value <|
connection = create_connection_fn Nothing
builders = [Vector.new_builder,Vector.new_builder,Vector.new_builder]
insert v =
builders.zip v .append
@ -281,8 +120,287 @@ run_tests prefix connection upload =
insert ["zzzz", 1, 0]
insert ["zzzz", 0, 0]
insert ["zzzz", Nothing, Nothing]
t = upload "T9" <|
t9 = upload connection "T9" <|
Table.new [["name", builders.at 0 . to_vector], ["price", builders.at 1 . to_vector], ["quantity", builders.at 2 . to_vector]]
[connection, t9]
teardown self =
drop_table self.connection self.t9.name
self.connection.close
type Missing_Values_Data
Value ~data
connection self = self.data.first
t4 self = self.data.second
setup create_connection_fn = Missing_Values_Data.Value <|
connection = create_connection_fn Nothing
t4 = upload connection "T4" <|
Table.new [["a", [0, 1, Nothing, 42, Nothing]], ["b", [True, Nothing, True, False, Nothing]], ["c", ["", "foo", "bar", Nothing, Nothing]]]
[connection, t4]
teardown self =
drop_table self.connection self.t4.name
self.connection.close
## Adds common database tests specs to the suite builder.
Arguments:
- create_connection_fn: A function that creates an appropriate Connection to the database backend.
add_specs (suite_builder : Suite_Builder) (prefix : Text) (create_connection_fn : (Nothing -> Any)) =
Default_Ordering_Spec.add_specs suite_builder prefix create_connection_fn
Names_Length_Limits_Spec.add_specs suite_builder prefix create_connection_fn
suite_builder.group (prefix + "Basic Table Access") group_builder->
data = Basic_Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
group_builder.specify "should allow to materialize tables and columns into local memory" <|
df = data.t1.read
a = data.t1.at 'a' . read
df.at 'a' . to_vector . should_equal [1, 4]
a.to_vector . should_equal [1, 4]
group_builder.specify "should allow to materialize columns directly into a Vector" <|
v = data.t1.at 'a' . to_vector
v . should_equal [1, 4]
group_builder.specify "should handle bigger result sets" <|
data.big_table.read.row_count . should_equal data.big_size
group_builder.specify "should not allow to set a column coming from another table" <|
data.t1.set (data.t2.at "d") . should_fail_with Integrity_Error
suite_builder.group (prefix + "Connection.query") group_builder->
data = Basic_Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
group_builder.specify "should allow to access a Table by name" <|
name = data.t1.name
tmp = data.connection.query (SQL_Query.Table_Name name)
tmp.read . should_equal data.t1.read
group_builder.specify "should allow to access a Table by an SQL query" <|
name = data.t1.name
t2 = data.connection.query (SQL_Query.Raw_SQL ('SELECT a, b FROM "' + name + '" WHERE a >= 3'))
m2 = t2.read
m2.column_names . should_equal ["a", "b"]
m2.at "a" . to_vector . should_equal [4]
m2.at "b" . to_vector . should_equal [5]
m2.at "c" . should_fail_with No_Such_Column
group_builder.specify "should allow to access a Table by an SQL query" <|
name = data.t1.name
t2 = data.connection.query (SQL_Query.Raw_SQL ('SELECT a, b FROM "' + name + '" WHERE a >= 3'))
m2 = t2.read
m2.column_names . should_equal ["a", "b"]
m2.at "a" . to_vector . should_equal [4]
m2.at "b" . to_vector . should_equal [5]
m2.at "c" . should_fail_with No_Such_Column
t3 = data.connection.query (SQL_Query.Raw_SQL ('SELECT 1+2'))
m3 = t3.read
m3.at 0 . to_vector . should_equal [3]
group_builder.specify "should use labels for column names" <|
name = data.t1.name
t2 = data.connection.query (SQL_Query.Raw_SQL ('SELECT a AS c, b FROM "' + name + '" WHERE a >= 3'))
m2 = t2.read
m2.column_names . should_equal ["c", "b"]
m2.at "c" . to_vector . should_equal [4]
m2.at "b" . to_vector . should_equal [5]
m2.at "a" . should_fail_with No_Such_Column
group_builder.specify "should allow a shorthand trying to deduce if the query is a table name or an SQL query" <|
name = data.t1.name
t2 = data.connection.query name
t2.read . should_equal data.t1.read
t3 = data.connection.query ('SELECT a, b FROM "' + name + '" WHERE a >= 3')
m3 = t3.read
m3.column_names . should_equal ["a", "b"]
m3.at "a" . to_vector . should_equal [4]
t5 = data.connection.query data.t4.name
m5 = t5.read
m5.column_names . should_equal ["X", "Y"]
m5.at "X" . to_vector . should_equal ["a", "B"]
m5.at "Y" . to_vector . should_equal [2, 5]
group_builder.specify "should report an error depending on input SQL_Query type" <|
r2 = data.connection.query (SQL_Query.Table_Name "NONEXISTENT-TABLE")
r2.should_fail_with Table_Not_Found
r2.catch.name . should_equal "NONEXISTENT-TABLE"
r2.catch.to_display_text . should_equal "Table NONEXISTENT-TABLE was not found in the database."
r3 = data.connection.query (SQL_Query.Raw_SQL "MALFORMED-QUERY")
r3.should_fail_with SQL_Error
group_builder.specify "should not allow interpolations in raw user-built queries" <|
r = data.connection.query (SQL_Query.Raw_SQL "SELECT 1 + ?")
r.should_fail_with Illegal_Argument
group_builder.specify "should make a best-effort attempt at returning a reasonable error for the short-hand" <|
r2 = data.connection.query "NONEXISTENT-TABLE"
r2.should_fail_with Table_Not_Found
r2.catch.name . should_equal "NONEXISTENT-TABLE"
r2.catch.treated_as_query . should_be_true
error_text = r2.catch.to_display_text
Test.with_clue "r2.catch.to_display_text = "+error_text <|
error_text.starts_with "The name NONEXISTENT-TABLE was treated as a query, but the query failed" . should_be_true
error_text.ends_with "wrap it in `SQL_Query.Table_Name`." . should_be_true
r3 = data.connection.query "SELECT * FROM ........"
r3.should_fail_with SQL_Error
group_builder.specify "will fail if the table is modified and a column gets removed" <|
name = Name_Generator.random_name "removing-column"
Problems.assume_no_problems <|
(Table.new [["a", [1, 2, 3]], ["b", [4, 5, 6]]]).select_into_database_table data.connection name temporary=True
t1 = data.connection.query name
m1 = t1.read
Problems.assume_no_problems m1
m1.at "a" . to_vector . should_equal [1, 2, 3]
m1.at "b" . to_vector . should_equal [4, 5, 6]
Problems.assume_no_problems <| data.connection.drop_table name
Problems.assume_no_problems <|
(Table.new [["a", [100, 200]]]).select_into_database_table data.connection name temporary=True
# Reading a column that was kept will work OK
t1.at "a" . to_vector . should_equal [100, 200]
# But reading the whole table will fail on the missing column:
m2 = t1.read
m2.should_fail_with SQL_Error
group_builder.specify "will not fail if the table is modified and a column gets added" <|
name = Name_Generator.random_name "adding-column"
Problems.assume_no_problems <|
(Table.new [["a", [1, 2, 3]], ["b", [4, 5, 6]]]).select_into_database_table data.connection name temporary=True
t1 = data.connection.query name
m1 = t1.read
Problems.assume_no_problems m1
m1.at "a" . to_vector . should_equal [1, 2, 3]
m1.at "b" . to_vector . should_equal [4, 5, 6]
Problems.assume_no_problems <| data.connection.drop_table name
Problems.assume_no_problems <|
(Table.new [["a", [100, 200]], ["b", [300, 400]], ["c", [500, 600]]]).select_into_database_table data.connection name temporary=True
m2 = t1.read
Problems.assume_no_problems m2
m2.column_names . should_equal ["a", "b"]
m2.at "a" . to_vector . should_equal [100, 200]
m2.at "b" . to_vector . should_equal [300, 400]
t1.at "c" . should_fail_with No_Such_Column
t2 = data.connection.query name
t2.column_names . should_equal ["a", "b", "c"]
suite_builder.group (prefix + "Masking Tables") group_builder->
data = Basic_Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
group_builder.specify "should allow to select rows from a table or column based on an expression" <|
t2 = data.t1.filter (data.t1.at "a" == 1)
df = t2.read
df.at "a" . to_vector . should_equal [1]
df.at "b" . to_vector . should_equal [2]
df.at "c" . to_vector . should_equal [3]
t2.at "a" . to_vector . should_equal [1]
t2.at "b" . to_vector . should_equal [2]
t2.at "c" . to_vector . should_equal [3]
suite_builder.group (prefix + "Missing Values") group_builder->
data = Missing_Values_Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
group_builder.specify "fill_nothing should replace nulls" <|
data.t4.at 'a' . fill_nothing 10 . to_vector . should_equal [0, 1, 10, 42, 10]
data.t4.at 'b' . fill_nothing False . to_vector . should_equal [True, False, True, False, False]
data.t4.at 'c' . fill_nothing "NA" . to_vector . should_equal ["", "foo", "bar", "NA", "NA"]
group_builder.specify "should correctly be counted" <|
data.t4.row_count . should_equal 5
col = data.t4.at 'a'
col.length . should_equal 5
col.count . should_equal 3
col.count_nothing . should_equal 2
suite_builder.group (prefix + "Sorting") group_builder->
data = Sorting_Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
group_builder.specify "should allow sorting by a single column name" <|
r_1 = data.df.order_by ([Sort_Column.Name 'quantity'])
r_1.at 'id' . to_vector . should_equal [2,4,1,3,5,6]
r_3 = data.df.order_by ([Sort_Column.Name 'rating' Sort_Direction.Descending])
r_3.at 'id' . to_vector . should_equal [3,1,4,5,2,6]
group_builder.specify 'should allow sorting by multiple column names' <|
r_1 = data.df.order_by ([Sort_Column.Name 'quantity', Sort_Column.Name 'rating'])
r_1.at 'id' . to_vector . should_equal [2,4,1,3,6,5]
r_2 = data.df.order_by ([Sort_Column.Name 'rating' Sort_Direction.Descending, Sort_Column.Name 'quantity' Sort_Direction.Descending])
r_2.at 'id' . to_vector . should_equal [3,1,4,5,6,2]
group_builder.specify 'should allow sorting with specific by-column rules' <|
r_1 = data.df.order_by ([Sort_Column.Name "quantity", Sort_Column.Name "price" Sort_Direction.Descending])
r_1.at 'id' . to_vector . should_equal [4,2,3,1,6,5]
group_builder.specify 'should correctly reorder all kinds of columns and leave the original columns untouched' <|
r = data.t8.order_by ([Sort_Column.Name 'ord'])
r.at 'ints' . to_vector . should_equal [1, 5, 3, 2, 4]
data.t8.at 'ints' . to_vector . should_equal data.ints
r.at 'reals' . to_vector . should_equal [1.3, 1.6, 3.2, 4.6, 5.2]
data.t8.at 'reals' . to_vector . should_equal data.reals
r.at 'bools' . to_vector . should_equal [False, False, True, False, True]
data.t8.at 'bools' . to_vector . should_equal data.bools
r.at 'texts' . to_vector . should_equal ['foo', 'spam', 'bar', 'foo', 'baz']
data.t8.at 'texts' . to_vector . should_equal data.texts
group_builder.specify 'should sort columns with specified ordering and missing placement' <|
c = data.df.at 'rating'
r_1 = c.sort
r_1.to_vector.should_equal [Nothing, Nothing, 2.2, 3.0, 3.0, 7.3]
r_2 = c.sort Sort_Direction.Descending
r_2.to_vector.should_equal [7.3, 3.0, 3.0, 2.2, Nothing, Nothing]
suite_builder.group prefix+"Aggregation" group_builder->
data = Aggregation_Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
## A helper which makes sure that the groups in a materialized
(InMemory) table are ordered according to a specified column or list
@ -290,45 +408,45 @@ run_tests prefix connection upload =
determinize_by order_column table =
table.order_by ([Sort_Column.Name order_column])
Test.specify "should allow counting group sizes and elements" <|
group_builder.specify "should allow counting group sizes and elements" <|
## Names set to lower case to avoid issue with Redshift where columns are
returned in lower case.
aggregates = [Count "count", Count_Not_Nothing "price" "count not nothing price", Count_Nothing "price" "count nothing price"]
t1 = determinize_by "name" (t.aggregate ([Group_By "name"] + aggregates) . read)
t1 = determinize_by "name" (data.t9.aggregate ([Group_By "name"] + aggregates) . read)
t1.at "name" . to_vector . should_equal ["bar", "baz", "foo", "quux", "zzzz"]
t1.at "count" . to_vector . should_equal [2, 1, 5, 1, 7]
t1.at "count not nothing price" . to_vector . should_equal [2, 1, 3, 0, 5]
t1.at "count nothing price" . to_vector . should_equal [0, 0, 2, 1, 2]
t2 = t.aggregate aggregates . read
t2 = data.t9.aggregate aggregates . read
t2.at "count" . to_vector . should_equal [16]
t2.at "count not nothing price" . to_vector . should_equal [11]
t2.at "count nothing price" . to_vector . should_equal [5]
Test.specify "should allow simple arithmetic aggregations" <|
group_builder.specify "should allow simple arithmetic aggregations" <|
## Names set to lower case to avoid issue with Redshift where columns are
returned in lower case.
aggregates = [Sum "price" "sum price", Sum "quantity" "sum quantity", Average "price" "avg price"]
## TODO can check the datatypes
t1 = determinize_by "name" (t.aggregate ([Group_By "name"] + aggregates) . read)
t1 = determinize_by "name" (data.t9.aggregate ([Group_By "name"] + aggregates) . read)
t1.at "name" . to_vector . should_equal ["bar", "baz", "foo", "quux", "zzzz"]
t1.at "sum price" . to_vector . should_equal [100.5, 6.7, 1, Nothing, 2]
t1.at "sum quantity" . to_vector . should_equal [80, 40, 120, 70, 2]
t1.at "avg price" . to_vector . should_equal [50.25, 6.7, (1/3), Nothing, (2/5)]
t2 = t.aggregate aggregates . read
t2 = data.t9.aggregate aggregates . read
t2.at "sum price" . to_vector . should_equal [110.2]
t2.at "sum quantity" . to_vector . should_equal [312]
t2.at "avg price" . to_vector . should_equal [(110.2 / 11)]
Test.group prefix+"Table.filter" <|
Test.specify "report error when trying to filter by a custom predicate" <|
t1.filter "a" (x -> x % 2 == 0) . should_fail_with Unsupported_Database_Operation
suite_builder.group prefix+"Table.filter" group_builder->
data = Basic_Data.setup create_connection_fn
Default_Ordering_Spec.spec prefix connection
Names_Length_Limits_Spec.spec prefix connection
group_builder.teardown <|
data.teardown
group_builder.specify "report error when trying to filter by a custom predicate" <|
data.t1.filter "a" (x -> x % 2 == 0) . should_fail_with Unsupported_Database_Operation
main = Test_Suite.run_main <|
spec "[SQLite] " (Database.connect (SQLite In_Memory))

View File

@ -1,5 +1,6 @@
from Standard.Base import all
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
from Standard.Base.Runtime import assert
from Standard.Table import Table, Sort_Column, Aggregate_Column
from Standard.Table.Errors import all
@ -7,48 +8,78 @@ from Standard.Table.Errors import all
from Standard.Database import all
from Standard.Database.Errors import all
from Standard.Test import Test, Test_Suite, Problems
import Standard.Test.Extensions
from Standard.Test_New import all
import Standard.Test_New.Suite.Suite_Builder
import project.Util
import project.Database.Helpers.Name_Generator
spec prefix connection =
Test.group prefix+"Table.default_ordering" <|
main =
suite = Test.build suite_builder->
add_specs suite_builder "[SQLite]" (_-> Database.connect (SQLite In_Memory))
suite.run_with_filter
type Data
Value ~data
connection self = self.data.at 0
db_table_without_key self = self.data.at 1
db_table_with_key self = self.data.at 2
setup create_connection_func = Data.Value <|
connection = create_connection_func Nothing
src_table = Table.new [["X", [1, 2, 3]], ["Y", [30, 20, 10]]]
db_table_without_key = src_table.select_into_database_table connection (Name_Generator.random_name "default-ordering-1") temporary=True primary_key=Nothing
db_table_with_key = src_table.select_into_database_table connection (Name_Generator.random_name "default-ordering-1") temporary=True primary_key=["X"]
[connection, db_table_without_key, db_table_with_key]
Test.specify "will return Nothing if no primary key is defined" <|
db_table_without_key.default_ordering . should_equal Nothing
teardown self = self.connection.close
Test.specify "will return the key for a table with a primary key" <|
v1 = db_table_with_key.default_ordering
## Adds test specifications for default ordering to the given `suite_builder`. Adds it as
group with the given `prefix` as its name prefix.
Arguments:
- suite_builder: A Suite_Builder in which a new group will be created
- create_connection_fn: A function that creates an appropriate Connection to the database backend.
add_specs (suite_builder : Suite_Builder) (prefix : Text) (create_connection_fn : Any) =
group_name = prefix + "Table.default_ordering"
suite_builder.group group_name group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
group_builder.specify "will return Nothing if no primary key is defined" <|
data.db_table_without_key.default_ordering . should_equal Nothing
group_builder.specify "will return the key for a table with a primary key" <|
v1 = data.db_table_with_key.default_ordering
v1.length . should_equal 1
v1.first.expression.name . should_equal "X"
v1.first.direction . should_equal Sort_Direction.Ascending
t2 = db_table_with_key.set "10 - [X]" "X"
t2 = data.db_table_with_key.set "10 - [X]" "X"
v2 = t2.default_ordering
v2.length . should_equal 1
v2.first.expression.name . should_equal "X"
Test.specify "will return Nothing for composite tables (join, aggregate)"
db_table_with_key.join db_table_with_key . default_ordering . should_equal Nothing
db_table_with_key.aggregate [Aggregate_Column.Group_By "X"] . default_ordering . should_equal Nothing
group_builder.specify "will return Nothing for composite tables (join, aggregate)"
data.db_table_with_key.join data.db_table_with_key . default_ordering . should_equal Nothing
data.db_table_with_key.aggregate [Aggregate_Column.Group_By "X"] . default_ordering . should_equal Nothing
Test.specify "will return the ordering determined by order_by" <|
v1 = db_table_with_key.order_by ["Y", Sort_Column.Name "X" Sort_Direction.Descending] . default_ordering
group_builder.specify "will return the ordering determined by order_by" <|
v1 = data.db_table_with_key.order_by ["Y", Sort_Column.Name "X" Sort_Direction.Descending] . default_ordering
v1.length . should_equal 2
v1.first.expression.name . should_equal "Y"
v1.first.direction . should_equal Sort_Direction.Ascending
v1.second.expression.name . should_equal "X"
v1.second.direction . should_equal Sort_Direction.Descending
v2 = db_table_without_key.order_by ["Y"] . default_ordering
v2 = data.db_table_without_key.order_by ["Y"] . default_ordering
v2.length . should_equal 1
v2.first.expression.name . should_equal "Y"
v2.first.direction . should_equal Sort_Direction.Ascending
main = Test_Suite.run_main <|
spec "[SQLite] " (Database.connect (SQLite In_Memory))

View File

@ -6,16 +6,30 @@ from Standard.Table.Errors import No_Such_Column, Name_Too_Long, Truncated_Colum
from Standard.Database import all
from Standard.Database.Errors import Table_Not_Found, Unsupported_Database_Operation
import Standard.Test.Extensions
from Standard.Test import Test, Test_Suite, Problems
from Standard.Test.Execution_Context_Helpers import run_with_and_without_output
from Standard.Test_New import all
from Standard.Test_New.Execution_Context_Helpers import run_with_and_without_output
import project.Util
import project.Database.Helpers.Name_Generator
spec prefix connection =
Test.group prefix+"Support for Long Column/Table names" <|
entity_naming_properties = connection.base_connection.entity_naming_properties
type Data
Value ~connection
setup create_connection_func =
Data.Value (create_connection_func Nothing)
teardown self = self.connection.close
add_specs suite_builder prefix create_connection_func =
suite_builder.group prefix+"Support for Long Column/Table names" group_builder->
data = Data.setup create_connection_func
group_builder.teardown <|
data.teardown
entity_naming_properties = data.connection.base_connection.entity_naming_properties
max_table_name_length = entity_naming_properties.for_table_names.size_limit
max_column_name_length = entity_naming_properties.for_column_names.size_limit
has_maximum_table_name_length = max_table_name_length.is_nothing.not
@ -23,34 +37,34 @@ spec prefix connection =
big_name_length = 10000
if has_maximum_table_name_length.not then
Test.specify "should allow to create tables with very long names" <|
group_builder.specify "should allow to create tables with very long names" <|
name = "a" * big_name_length
src = Table.new [["X", [1, 2, 3]]]
dest = src.select_into_database_table connection name temporary=True
dest = src.select_into_database_table data.connection name temporary=True
Problems.assume_no_problems dest
dest_fetched = connection.query name
dest_fetched = data.connection.query name
dest_fetched.at "X" . to_vector . should_equal [1, 2, 3]
if has_maximum_table_name_length then
Test.specify "should not allow to create a table with a name that is too long" <|
group_builder.specify "should not allow to create a table with a name that is too long" <|
name = "a" * (max_table_name_length + 1)
src = Table.new [["X", [1, 2, 3]]]
run_with_and_without_output <|
r = src.select_into_database_table connection name temporary=True
r = src.select_into_database_table data.connection name temporary=True
r.should_fail_with Name_Too_Long
r.catch.entity_kind . should_equal "table"
r.catch.name . should_equal name
r.catch.to_display_text . should_contain "The table name"
r.catch.to_display_text . should_contain "is too long"
r2 = connection.create_table name [Column_Description.Value "X" Value_Type.Integer] temporary=True
r2 = data.connection.create_table name [Column_Description.Value "X" Value_Type.Integer] temporary=True
r2.should_fail_with Name_Too_Long
connection.query (SQL_Query.Table_Name name) . should_fail_with Table_Not_Found
data.connection.query (SQL_Query.Table_Name name) . should_fail_with Table_Not_Found
Test.specify "should ensure length is measured in small units, even if grapheme length is lower" <|
group_builder.specify "should ensure length is measured in small units, even if grapheme length is lower" <|
big_grapheme = '\u{1F926}\u{1F3FC}\u200D\u2642\uFE0F'
big_grapheme.length . should_equal 1
big_grapheme.char_vector . length . should_equal 7
@ -60,7 +74,7 @@ spec prefix connection =
name_ok = "a" * max_table_name_length
src = Table.new [["X", [1, 2, 3]]]
t1 = src.select_into_database_table connection name_ok temporary=True
t1 = src.select_into_database_table data.connection name_ok temporary=True
Problems.assume_no_problems t1
## This is a name that has Enso-length equal to maximum.
@ -69,7 +83,7 @@ spec prefix connection =
grapheme clusters.
name_large = big_grapheme * max_table_name_length
t2 = src.select_into_database_table connection name_large temporary=True
t2 = src.select_into_database_table data.connection name_large temporary=True
t2.should_fail_with Name_Too_Long
t2.catch.entity_kind . should_equal "table"
t2.catch.name . should_equal name_large
@ -86,25 +100,25 @@ spec prefix connection =
For example it seems to work fine on MySQL.
name_medium = "ą" * max_table_name_length
name_medium_encoded = entity_naming_properties.for_table_names.encoded_size name_medium
t3 = src.select_into_database_table connection name_medium temporary=True
t3 = src.select_into_database_table data.connection name_medium temporary=True
case name_medium_encoded > max_table_name_length of
True ->
t3.should_fail_with Name_Too_Long
False ->
Problems.assume_no_problems t3
Test.specify "should not mangle a too long name when accessing tables, returning a different table" <|
group_builder.specify "should not mangle a too long name when accessing tables, returning a different table" <|
long_name = "z" * max_table_name_length
src = Table.new [["X", [1, 2, 3]]]
t1 = src.select_into_database_table connection long_name temporary=True
t1 = src.select_into_database_table data.connection long_name temporary=True
Problems.assume_no_problems t1
connection.query long_name . at "X" . to_vector . should_equal [1, 2, 3]
data.connection.query long_name . at "X" . to_vector . should_equal [1, 2, 3]
longer_name_with_same_prefix = long_name + ("z" * 10)
connection.query longer_name_with_same_prefix . should_fail_with Table_Not_Found
connection.query (SQL_Query.Table_Name longer_name_with_same_prefix) . should_fail_with Table_Not_Found
data.connection.query longer_name_with_same_prefix . should_fail_with Table_Not_Found
data.connection.query (SQL_Query.Table_Name longer_name_with_same_prefix) . should_fail_with Table_Not_Found
Test.specify "should be fine joining tables with long names" <|
group_builder.specify "should be fine joining tables with long names" <|
## If we know the maximum length, we choose a length that will be
just short enough to fit in the limit, but long enough that after
concatenating two of such names, the result will exceed the limit.
@ -116,9 +130,9 @@ spec prefix connection =
name_1 = ("x" * (name_length - 1)) + "1"
name_2 = ("x" * (name_length - 1)) + "2"
name_3 = ("x" * (name_length - 1)) + "3"
t1 = (Table.new [["X", [1, 2]]]).select_into_database_table connection name_1 temporary=True
t2 = (Table.new [["X", [1, 2]]]).select_into_database_table connection name_2 temporary=True
t3 = (Table.new [["X", [1, 2]]]).select_into_database_table connection name_3 temporary=True
t1 = (Table.new [["X", [1, 2]]]).select_into_database_table data.connection name_1 temporary=True
t2 = (Table.new [["X", [1, 2]]]).select_into_database_table data.connection name_2 temporary=True
t3 = (Table.new [["X", [1, 2]]]).select_into_database_table data.connection name_3 temporary=True
Test.with_clue "join: " <|
t12 = t1.join t2
@ -160,7 +174,7 @@ spec prefix connection =
t11.at "X" . to_vector . should_contain_the_same_elements_as [1, 2]
Problems.assume_no_problems t11
Test.specify "should be fine operating on columns with long names" <|
group_builder.specify "should be fine operating on columns with long names" <|
## If we know the maximum length, we choose a length that will be
just short enough to fit in the limit, but long enough that after
concatenating two of such names, the result will exceed the limit.
@ -172,7 +186,7 @@ spec prefix connection =
name_1 = ("x" * (name_length - 1)) + "1"
name_2 = ("x" * (name_length - 1)) + "2"
src = Table.new [[name_1, [1, 2, 3]], [name_2, [4, 5, 6]]]
t1 = src.select_into_database_table connection (Name_Generator.random_name "long-column-names") temporary=True
t1 = src.select_into_database_table data.connection (Name_Generator.random_name "long-column-names") temporary=True
# We create 2 Maximum columns that if wrongly truncated will have the same name, introducing possible ambiguity to further queries.
t2 = t1.aggregate [Aggregate_Column.Group_By name_1, Aggregate_Column.Maximum name_2, Aggregate_Column.Maximum name_1]
@ -189,9 +203,9 @@ spec prefix connection =
last_column.to_vector . should_contain_the_same_elements_as [4, 10, 18]
if has_maximum_column_name_length.not then
Test.specify "should allow to create very long column names" <|
group_builder.specify "should allow to create very long column names" <|
src = Table.new [["X", [1, 2, 3]]]
db_table = src.select_into_database_table connection (Name_Generator.random_name "long-column-names") temporary=True
db_table = src.select_into_database_table data.connection (Name_Generator.random_name "long-column-names") temporary=True
c = db_table.at "X"
long_name = "a" * big_name_length
c2 = c.rename long_name
@ -214,9 +228,9 @@ spec prefix connection =
v3 . should_equal [1, 4, 9]
Problems.assume_no_problems v3
Test.specify "should allow to use long names in complex queries" <|
group_builder.specify "should allow to use long names in complex queries" <|
src = Table.new [["X", [1, 2, 3]]]
db_table = src.select_into_database_table connection (Name_Generator.random_name "long-column-names") temporary=True
db_table = src.select_into_database_table data.connection (Name_Generator.random_name "long-column-names") temporary=True
c = db_table.at "X"
long_name = "a" * big_name_length
c2 = c.rename long_name
@ -229,23 +243,23 @@ spec prefix connection =
t3.at long_name . to_vector . should_equal [1, 4, 9]
t3.at ("Right_" + long_name) . to_vector . should_equal [1, 4, 9]
Test.specify "should allow to upload tables with very long column names" <|
group_builder.specify "should allow to upload tables with very long column names" <|
name_a = "x" * big_name_length + "a"
name_b = "x" * big_name_length + "b"
src = Table.new [[name_a, [1, 2, 3]], [name_b, [4, 5, 6]]]
db_table = src.select_into_database_table connection (Name_Generator.random_name "long-column-names") temporary=True
db_table = src.select_into_database_table data.connection (Name_Generator.random_name "long-column-names") temporary=True
Problems.assume_no_problems db_table
db_table.at name_a . to_vector . should_equal [1, 2, 3]
db_table.at name_b . to_vector . should_equal [4, 5, 6]
db_table_2 = connection.create_table (Name_Generator.random_name "long-column-names") [Column_Description.Value name_a Value_Type.Integer] temporary=True
db_table_2 = data.connection.create_table (Name_Generator.random_name "long-column-names") [Column_Description.Value name_a Value_Type.Integer] temporary=True
Problems.assume_no_problems db_table_2
db_table_2.row_count . should_equal 0
if has_maximum_column_name_length then
Test.specify "should raise an error when renaming a column to a name that is too long" <|
group_builder.specify "should raise an error when renaming a column to a name that is too long" <|
src = Table.new [["X", [1, 2, 3]], ["Y", [4, 5, 6]]]
db_table = src.select_into_database_table connection (Name_Generator.random_name "long-column-names") temporary=True
db_table = src.select_into_database_table data.connection (Name_Generator.random_name "long-column-names") temporary=True
c = db_table.at "X"
long_name = "a" * (max_column_name_length + 1)
c2 = c.rename long_name
@ -262,7 +276,7 @@ spec prefix connection =
Problems.assume_no_problems <| db_table.set "[X] + [Y] * 10" "Z"
db_table.set "[X] + [Y] * 10" long_name . should_fail_with Name_Too_Long
Test.specify "should prevent upload if column names are too long" <|
group_builder.specify "should prevent upload if column names are too long" <|
name_a = "a" * (max_column_name_length + 1)
name_b = "b" * (max_column_name_length + 1)
@ -270,17 +284,17 @@ spec prefix connection =
src = Table.new [[name_a, [1, 2, 3]], [name_b, [4, 5, 6]]]
run_with_and_without_output <|
r1 = src.select_into_database_table connection table_name temporary=True
r1 = src.select_into_database_table data.connection table_name temporary=True
r1.should_fail_with Name_Too_Long
r1.catch.entity_kind . should_equal "column"
[name_a, name_b].should_contain r1.catch.name
r2 = connection.create_table table_name [Column_Description.Value name_a Value_Type.Integer] temporary=True
r2 = data.connection.create_table table_name [Column_Description.Value name_a Value_Type.Integer] temporary=True
r2.should_fail_with Name_Too_Long
connection.query (SQL_Query.Table_Name table_name) . should_fail_with Table_Not_Found
data.connection.query (SQL_Query.Table_Name table_name) . should_fail_with Table_Not_Found
Test.specify "should ensure length is measured in code units, even if grapheme length is lower" <|
group_builder.specify "should ensure length is measured in code units, even if grapheme length is lower" <|
big_grapheme = '\u{1F926}\u{1F3FC}\u200D\u2642\uFE0F'
big_grapheme.length . should_equal 1
big_grapheme.char_vector . length . should_equal 7
@ -289,7 +303,7 @@ spec prefix connection =
ASCII names at max length should always work.
name_ok = "a" * max_column_name_length
src1 = Table.new [["X", [1, 2, 3]]]
db_table1 = src1.select_into_database_table connection (Name_Generator.random_name "long-column-names") temporary=True
db_table1 = src1.select_into_database_table data.connection (Name_Generator.random_name "long-column-names") temporary=True
Problems.assume_no_problems <|
db_table1.at "X" . rename name_ok
@ -306,7 +320,7 @@ spec prefix connection =
r1.catch.to_display_text.should_contain "too long"
src2 = Table.new [[name_ok, [1, 2, 3]], [name_large, [4, 5, 6]]]
r2 = src2.select_into_database_table connection (Name_Generator.random_name "long-column-names") temporary=True
r2 = src2.select_into_database_table data.connection (Name_Generator.random_name "long-column-names") temporary=True
r2.should_fail_with Name_Too_Long
r2.catch.name . should_equal name_large
@ -315,7 +329,7 @@ spec prefix connection =
name_medium_encoded = entity_naming_properties.for_table_names.encoded_size name_medium
r3 = db_table1.at "X" . rename name_medium
src3 = Table.new [[name_ok, [1, 2, 3]], [name_medium, [4, 5, 6]]]
r4 = src3.select_into_database_table connection (Name_Generator.random_name "long-column-names") temporary=True
r4 = src3.select_into_database_table data.connection (Name_Generator.random_name "long-column-names") temporary=True
case name_medium_encoded > max_column_name_length of
True ->
r3.should_fail_with Name_Too_Long
@ -324,11 +338,11 @@ spec prefix connection =
Problems.assume_no_problems r3
Problems.assume_no_problems r4
Test.specify "should truncate the column name if the resulting operation-generated name is too long, without warnings" <|
group_builder.specify "should truncate the column name if the resulting operation-generated name is too long, without warnings" <|
name_a = "a" * max_column_name_length
name_b = "b" * max_column_name_length
src = Table.new [[name_a, [1]], [name_b, [2]]]
db_table = src.select_into_database_table connection (Name_Generator.random_name "long-column-names") temporary=True
db_table = src.select_into_database_table data.connection (Name_Generator.random_name "long-column-names") temporary=True
a = db_table.at name_a
b = db_table.at name_b
@ -354,20 +368,20 @@ spec prefix connection =
c3.name.should_contain "..., "
c3.name.should_contain ")"
Test.specify "raise an error if name provided by the user in aggregate is too long" <|
group_builder.specify "raise an error if name provided by the user in aggregate is too long" <|
src = Table.new [["X", [1, 2, 3]]]
db_table = src.select_into_database_table connection (Name_Generator.random_name "long-column-names") temporary=True
db_table = src.select_into_database_table data.connection (Name_Generator.random_name "long-column-names") temporary=True
long_name = "a" * (max_column_name_length + 1)
r = db_table.aggregate [Aggregate_Column.Maximum "X" new_name=long_name]
r.should_fail_with Name_Too_Long
r.catch.entity_kind . should_equal "column"
r.catch.name . should_equal long_name
Test.specify "should truncate and deduplicate autogenerated names in aggregate, and issue a warning" <|
group_builder.specify "should truncate and deduplicate autogenerated names in aggregate, and issue a warning" <|
name_a = "x" * (max_column_name_length - 1) + "A"
name_b = "x" * (max_column_name_length - 1) + "B"
src = Table.new [[name_a, [1, 2, 3]], [name_b, [4, 5, 6]]]
db_table = src.select_into_database_table connection (Name_Generator.random_name "long-column-names") temporary=True
db_table = src.select_into_database_table data.connection (Name_Generator.random_name "long-column-names") temporary=True
t2 = db_table.aggregate [Aggregate_Column.Maximum name_a, Aggregate_Column.Maximum name_b]
w1 = Problems.expect_warning Truncated_Column_Names t2
@ -381,7 +395,7 @@ spec prefix connection =
letter = Text.from_codepoints [65 + i]
"x" * (max_column_name_length - 1) + letter
src2 = Table.new (names.map_with_index i-> name-> [name, [100 + i, 200 + i]])
db_table2 = src2.select_into_database_table connection (Name_Generator.random_name "long-column-names") temporary=True
db_table2 = src2.select_into_database_table data.connection (Name_Generator.random_name "long-column-names") temporary=True
Problems.assume_no_problems db_table2
t3 = db_table2.aggregate (names.map name-> Aggregate_Column.Maximum name)
w2 = Problems.expect_warning Truncated_Column_Names t3
@ -390,11 +404,11 @@ spec prefix connection =
(0.up_to 15).each i->
t3.at (w2.find_truncated_name ("Maximum " + names.at i)) . to_vector . should_equal [200 + i]
Test.specify "should truncate and deduplicate the names in join/cross_join, and issue a warning" <|
group_builder.specify "should truncate and deduplicate the names in join/cross_join, and issue a warning" <|
name_a = "x" * (max_column_name_length - 1) + "A"
name_b = "x" * (max_column_name_length - 1) + "B"
src = Table.new [[name_a, [1, 2, 3]], [name_b, [4, 5, 6]]]
db_table = src.select_into_database_table connection (Name_Generator.random_name "long-column-names") temporary=True
db_table = src.select_into_database_table data.connection (Name_Generator.random_name "long-column-names") temporary=True
Test.with_clue "join: " <|
t2 = db_table.join db_table join_kind=Join_Kind.Left_Outer on=name_a right_prefix="RIGHT_"
@ -422,13 +436,13 @@ spec prefix connection =
m2.at (w.truncated_names.at 0) . to_vector . should_contain_the_same_elements_as [1, 2, 3]
m2.at (w.truncated_names.at 1) . to_vector . should_contain_the_same_elements_as [4, 5, 6]
Test.specify "should truncate new column names in other operations" <|
group_builder.specify "should truncate new column names in other operations" <|
name_a = "x" * (max_column_name_length - 1) + "A"
name_b = "x" * (max_column_name_length - 1) + "B"
src_a = Table.new [[name_a, ["1", "2", "3"]]]
src_b = Table.new [[name_b, [4, 5, 6]]]
db_a = src_a.select_into_database_table connection (Name_Generator.random_name "long-column-names-a") temporary=True
db_b = src_b.select_into_database_table connection (Name_Generator.random_name "long-column-names-b") temporary=True
db_a = src_a.select_into_database_table data.connection (Name_Generator.random_name "long-column-names-a") temporary=True
db_b = src_b.select_into_database_table data.connection (Name_Generator.random_name "long-column-names-b") temporary=True
Test.with_clue "zip test will have to be amended once it is implemented: " <|
db_a.zip db_b . should_fail_with Unsupported_Database_Operation
@ -446,14 +460,14 @@ spec prefix connection =
name_1 = "x" * (max_column_name_length + 1) + "A"
name_2 = "x" * (max_column_name_length + 1) + "D"
src = Table.new [["X", [name_1, "B", "C"]], ["Y", [name_2, "E", "F"]]]
db_table = src.select_into_database_table connection (Name_Generator.random_name "first-row-as-names") temporary=True primary_key=[]
db_table = src.select_into_database_table data.connection (Name_Generator.random_name "first-row-as-names") temporary=True primary_key=[]
db_table.use_first_row_as_names . should_fail_with Unsupported_Database_Operation
Test.specify "should fail other operations that create new columns if the column names are too long" <|
group_builder.specify "should fail other operations that create new columns if the column names are too long" <|
name_a = "x" * (max_column_name_length + 1) + "A"
name_b = "x" * (max_column_name_length + 1) + "B"
src = Table.new [["X", [name_a, name_b, name_a]], ["Y", [4, 5, 6]]]
db_table = src.select_into_database_table connection (Name_Generator.random_name "cross-tab") temporary=True primary_key=[]
db_table = src.select_into_database_table data.connection (Name_Generator.random_name "cross-tab") temporary=True primary_key=[]
Test.with_clue "cross_tab test will have to be amended once it is implemented: " <|
db_table.cross_tab group_by=[] name_column="X" . should_fail_with Unsupported_Database_Operation
@ -462,5 +476,8 @@ spec prefix connection =
db_table.transpose attribute_column_name=name_a . should_fail_with Unsupported_Database_Operation
db_table.transpose value_column_name=name_a . should_fail_with Unsupported_Database_Operation
main = Test_Suite.run_main <|
spec "[SQLite] " (Database.connect (SQLite In_Memory))
main =
suite = Test.build suite_builder->
add_specs suite_builder "[SQLite]" (_-> Database.connect (SQLite In_Memory))
suite.run_with_filter

View File

@ -1,16 +1,19 @@
from Standard.Base import all
from Standard.Test import Test_Suite
from Standard.Test_New import Test
import project.Database.Codegen_Spec
import project.Database.SQLite_Spec
import project.Database.Postgres_Spec
import project.Database.Redshift_Spec
spec =
Codegen_Spec.spec
SQLite_Spec.spec
Postgres_Spec.spec
Redshift_Spec.spec
add_specs suite_builder =
Codegen_Spec.add_specs suite_builder
SQLite_Spec.add_specs suite_builder
Postgres_Spec.add_specs suite_builder
Redshift_Spec.add_specs suite_builder
main = Test_Suite.run_main spec
main =
suite = Test.build suite_builder->
add_specs suite_builder
suite.run_with_filter

View File

@ -16,9 +16,8 @@ import Standard.Database.Internal.Replace_Params.Replace_Params
from Standard.Database import all
from Standard.Database.Errors import all
from Standard.Test import Test, Test_Suite, Problems
import Standard.Test.Extensions
import Standard.Test.Test_Environment
from Standard.Test_New import all
import Standard.Test_New.Test_Environment
import project.Database.Common.Common_Spec
import project.Database.Transaction_Spec
@ -32,34 +31,27 @@ from project.Database.Types.Postgres_Type_Mapping_Spec import default_text
import enso_dev.Base_Tests.Network.Enso_Cloud.Cloud_Tests_Setup.Cloud_Tests_Setup
from enso_dev.Base_Tests.Network.Enso_Cloud.Cloud_Tests_Setup import with_retries
postgres_specific_spec connection db_name setup =
Test.group "[PostgreSQL] Schemas and Databases" <|
Test.specify "should be able to get current database and list databases" <|
connection.database . should_equal db_name
connection.databases.length . should_not_equal 0
connection.databases.contains db_name . should_be_true
Meta.is_same_object connection (connection.set_database db_name) . should_be_true
Test.specify "should be able to get current schema and list schemas" <|
connection.schema . should_equal "public"
connection.schemas.length . should_not_equal 0
connection.schemas.contains "public" . should_be_true
connection.schemas.contains "information_schema" . should_be_true
Meta.is_same_object connection (connection.set_schema "public") . should_be_true
type Basic_Test_Data
Value ~connection
Test.specify "should allow changing schema" <|
new_connection = connection.set_schema "information_schema"
new_schema = new_connection.read (SQL_Query.Raw_SQL "SELECT current_schema()") . at 0 . to_vector . first
new_schema . should_equal "information_schema"
setup create_connection_fn =
Basic_Test_Data.Value (create_connection_fn Nothing)
databases = connection.databases.filter d->((d!=db_name) && (d!='rdsadmin'))
pending_database = if databases.length != 0 then Nothing else "Cannot test changing database unless two databases defined."
Test.specify "should allow changing database" pending=pending_database <|
new_connection = connection.set_database databases.first
new_database = new_connection.read (SQL_Query.Raw_SQL "SELECT current_database()") . at 0 . to_vector . first
new_database . should_equal databases.first
teardown self =
self.connection.close
Test.group "[PostgreSQL] Tables and Table Types" <|
type Postgres_Tables_Data
Value ~data
connection self = self.data.at 0
tinfo self = self.data.at 1
vinfo self = self.data.at 2
temporary_table self = self.data.at 3
setup create_connection_fn = Postgres_Tables_Data.Value <|
connection = create_connection_fn Nothing
tinfo = Name_Generator.random_name "TestTable"
connection.execute_update 'CREATE TABLE "'+tinfo+'" ("A" VARCHAR)'
@ -68,48 +60,141 @@ postgres_specific_spec connection db_name setup =
temporary_table = Name_Generator.random_name "TemporaryTable"
(Table.new [["X", [1, 2, 3]]]).select_into_database_table connection temporary_table temporary=True
[connection, tinfo, vinfo, temporary_table]
Test.specify "should be able to list table types" <|
table_types = connection.table_types
teardown self =
self.connection.execute_update 'DROP VIEW "'+self.vinfo+'";'
self.connection.execute_update 'DROP TABLE "'+self.tinfo+'";'
self.connection.close
type Postgres_Info_Data
Value ~data
connection self = self.data.at 0
tinfo self = self.data.at 1
t self = self.data.at 2
setup create_connection_fn = Postgres_Info_Data.Value <|
connection = create_connection_fn Nothing
tinfo = Name_Generator.random_name "Tinfo"
connection.execute_update 'CREATE TEMPORARY TABLE "'+tinfo+'" ("strs" VARCHAR, "ints" BIGINT, "bools" BOOLEAN, "doubles" DOUBLE PRECISION)'
t = connection.query (SQL_Query.Table_Name tinfo)
row1 = ["a", Nothing, False, 1.2]
row2 = ["abc", Nothing, Nothing, 1.3]
row3 = ["def", 42, True, 1.4]
Panic.rethrow <|
t.update_rows (Table.from_rows ["strs", "ints", "bools", "doubles"] [row1, row2, row3]) update_action=Update_Action.Insert
[connection, tinfo, t]
teardown self =
self.connection.execute_update 'DROP TABLE "'+self.tinfo+'"'
self.connection.close
type Postgres_Aggregate_Data
Value ~data
connection self = self.data.at 0
name self = self.data.at 1
t self = self.data.at 2
setup create_connection_fn = Postgres_Aggregate_Data.Value <|
connection = create_connection_fn Nothing
name = Name_Generator.random_name "Ttypes"
connection.execute_update 'CREATE TEMPORARY TABLE "'+name+'" ("txt" VARCHAR, "i1" SMALLINT, "i2" INT, "i3" BIGINT, "i4" NUMERIC, "r1" REAL, "r2" DOUBLE PRECISION, "bools" BOOLEAN)'
t = connection.query (SQL_Query.Table_Name name)
[connection, name, t]
teardown self =
self.connection.execute_update 'DROP TABLE "'+self.name+'"'
self.connection.close
postgres_specific_spec suite_builder create_connection_fn db_name setup =
table_builder = setup.table_builder
materialize = setup.materialize
suite_builder.group "[PostgreSQL] Schemas and Databases" group_builder->
data = Basic_Test_Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
group_builder.specify "should be able to get current database and list databases" <|
data.connection.database . should_equal db_name
data.connection.databases.length . should_not_equal 0
data.connection.databases.contains db_name . should_be_true
Meta.is_same_object data.connection (data.connection.set_database db_name) . should_be_true
group_builder.specify "should be able to get current schema and list schemas" <|
data.connection.schema . should_equal "public"
data.connection.schemas.length . should_not_equal 0
data.connection.schemas.contains "public" . should_be_true
data.connection.schemas.contains "information_schema" . should_be_true
Meta.is_same_object data.connection (data.connection.set_schema "public") . should_be_true
group_builder.specify "should allow changing schema" <|
new_connection = data.connection.set_schema "information_schema"
new_schema = new_connection.read (SQL_Query.Raw_SQL "SELECT current_schema()") . at 0 . to_vector . first
new_schema . should_equal "information_schema"
group_builder.specify "should allow changing database" <|
databases = data.connection.databases.filter d->((d!=db_name) && (d!='rdsadmin'))
pending_database = if databases.length != 0 then Nothing else "Cannot test changing database unless two databases defined."
case pending_database of
Nothing ->
new_connection = data.connection.set_database databases.first
new_database = new_connection.read (SQL_Query.Raw_SQL "SELECT current_database()") . at 0 . to_vector . first
new_database . should_equal databases.first
# Nop - skip the test
_ -> Nothing
suite_builder.group "[PostgreSQL] Tables and Table Types" group_builder->
data = Postgres_Tables_Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
group_builder.specify "should be able to list table types" <|
table_types = data.connection.table_types
table_types.length . should_not_equal 0
table_types.contains "TABLE" . should_be_true
table_types.contains "VIEW" . should_be_true
Test.specify "should be able to list tables" <|
tables = connection.tables
group_builder.specify "should be able to list tables" <|
tables = data.connection.tables
tables.row_count . should_not_equal 0
tables.columns.map .name . should_equal ["Database", "Schema", "Name", "Type", "Description"]
table_names = tables.at "Name" . to_vector
table_names.should_contain tinfo
table_names.should_contain vinfo
table_names.should_contain temporary_table
table_names.should_contain data.tinfo
table_names.should_contain data.vinfo
table_names.should_contain data.temporary_table
Test.specify "should be able to filter tables by name" <|
tables = connection.tables tinfo
group_builder.specify "should be able to filter tables by name" <|
tables = data.connection.tables data.tinfo
tables.row_count . should_equal 1
## The database check is disabled as the Postgres JDBC driver does not return the database name.
## tables.at "Database" . to_vector . at 0 . should_equal db_name
tables.at "Schema" . to_vector . at 0 . should_equal "public"
tables.at "Name" . to_vector . at 0 . should_equal tinfo
tables.at "Name" . to_vector . at 0 . should_equal data.tinfo
tables.at "Type" . to_vector . at 0 . should_equal "TABLE"
connection.tables "TestT_ble%" . row_count . should_equal 1
connection.tables "Temporary%ble%" . row_count . should_equal 1
connection.tables "Temporary%ble%" . at "Type" . to_vector . should_equal ["TEMPORARY TABLE"]
connection.tables "N_nexistent%" . row_count . should_equal 0
data.connection.tables "TestT_ble%" . row_count . should_equal 1
data.connection.tables "Temporary%ble%" . row_count . should_equal 1
data.connection.tables "Temporary%ble%" . at "Type" . to_vector . should_equal ["TEMPORARY TABLE"]
data.connection.tables "N_nexistent%" . row_count . should_equal 0
Test.specify "should be able to filter tables by type" <|
tables = connection.tables types=["VIEW"]
group_builder.specify "should be able to filter tables by type" <|
tables = data.connection.tables types=["VIEW"]
tables.row_count . should_not_equal 0
tables.at "Name" . to_vector . contains tinfo . should_be_false
tables.at "Name" . to_vector . contains vinfo . should_be_true
tables.at "Name" . to_vector . contains data.tinfo . should_be_false
tables.at "Name" . to_vector . contains data.vinfo . should_be_true
connection.execute_update 'DROP VIEW "'+vinfo+'";'
connection.execute_update 'DROP TABLE "'+tinfo+'";'
Test.group "[PostgreSQL] Database Encoding" <|
Test.specify "connector should support all known Postgres encodings" <|
suite_builder.group "[PostgreSQL] Database Encoding" group_builder->
group_builder.specify "connector should support all known Postgres encodings" <|
known_encodings = (enso_project.data / "postgres_known_encodings.txt") . read . lines
known_encodings.length . should_equal 41
@ -133,40 +218,35 @@ postgres_specific_spec connection db_name setup =
False ->
Problems.assume_no_problems encoding
suite_builder.group "[PostgreSQL] Info" group_builder->
data = Postgres_Info_Data.setup create_connection_fn
tinfo = Name_Generator.random_name "Tinfo"
connection.execute_update 'CREATE TEMPORARY TABLE "'+tinfo+'" ("strs" VARCHAR, "ints" BIGINT, "bools" BOOLEAN, "doubles" DOUBLE PRECISION)'
Test.group "[PostgreSQL] Info" <|
t = connection.query (SQL_Query.Table_Name tinfo)
row1 = ["a", Nothing, False, 1.2]
row2 = ["abc", Nothing, Nothing, 1.3]
row3 = ["def", 42, True, 1.4]
Panic.rethrow <|
t.update_rows (Table.from_rows ["strs", "ints", "bools", "doubles"] [row1, row2, row3]) update_action=Update_Action.Insert
group_builder.teardown <|
data.teardown
Test.specify "should return Table information" <|
i = t.info
group_builder.specify "should return Table information" <|
i = data.t.info
i.at "Column" . to_vector . should_equal ["strs", "ints", "bools", "doubles"]
i.at "Items Count" . to_vector . should_equal [3, 1, 2, 3]
i.at "Value Type" . to_vector . should_equal [default_text, Value_Type.Integer, Value_Type.Boolean, Value_Type.Float]
Test.specify "should return Table information, also for aggregated results" <|
i = t.aggregate [Concatenate "strs", Sum "ints", Count_Distinct "bools"] . info
group_builder.specify "should return Table information, also for aggregated results" <|
i = data.t.aggregate [Concatenate "strs", Sum "ints", Count_Distinct "bools"] . info
i.at "Column" . to_vector . should_equal ["Concatenate strs", "Sum ints", "Count Distinct bools"]
i.at "Items Count" . to_vector . should_equal [1, 1, 1]
i.at "Value Type" . to_vector . should_equal [default_text, Value_Type.Decimal, Value_Type.Integer]
Test.specify "should infer standard types correctly" <|
t.at "strs" . value_type . is_text . should_be_true
t.at "ints" . value_type . is_integer . should_be_true
t.at "bools" . value_type . is_boolean . should_be_true
t.at "doubles" . value_type . is_floating_point . should_be_true
group_builder.specify "should infer standard types correctly" <|
data.t.at "strs" . value_type . is_text . should_be_true
data.t.at "ints" . value_type . is_integer . should_be_true
data.t.at "bools" . value_type . is_boolean . should_be_true
data.t.at "doubles" . value_type . is_floating_point . should_be_true
Test.specify "should preserve Postgres types when table is materialized, where possible" <|
group_builder.specify "should preserve Postgres types when table is materialized, where possible" <|
name = Name_Generator.random_name "types-test"
Problems.assume_no_problems <|
connection.execute_update 'CREATE TEMPORARY TABLE "'+name+'" ("int4" int4, "int2" int2, "txt-limited" varchar(10), "txt-fixed" char(3))'
t1 = connection.query (SQL_Query.Table_Name name)
data.connection.execute_update 'CREATE TEMPORARY TABLE "'+name+'" ("int4" int4, "int2" int2, "txt-limited" varchar(10), "txt-fixed" char(3))'
t1 = data.connection.query (SQL_Query.Table_Name name)
t1.at "int4" . value_type . should_equal (Value_Type.Integer Bits.Bits_32)
t1.at "int2" . value_type . should_equal (Value_Type.Integer Bits.Bits_16)
t1.at "txt-limited" . value_type . should_equal (Value_Type.Char size=10 variable_length=True)
@ -178,32 +258,38 @@ postgres_specific_spec connection db_name setup =
in_memory.at "txt-limited" . value_type . should_equal (Value_Type.Char size=10 variable_length=True)
in_memory.at "txt-fixed" . value_type . should_equal (Value_Type.Char size=3 variable_length=False)
Test.group "[PostgreSQL] Dialect-specific codegen" <|
Test.specify "should generate queries for the Distinct operation" <|
t = connection.query (SQL_Query.Table_Name tinfo)
code_template = 'SELECT "{Tinfo}"."strs" AS "strs", "{Tinfo}"."ints" AS "ints", "{Tinfo}"."bools" AS "bools", "{Tinfo}"."doubles" AS "doubles" FROM (SELECT DISTINCT ON ("{Tinfo}_inner"."strs") "{Tinfo}_inner"."strs" AS "strs", "{Tinfo}_inner"."ints" AS "ints", "{Tinfo}_inner"."bools" AS "bools", "{Tinfo}_inner"."doubles" AS "doubles" FROM (SELECT "{Tinfo}"."strs" AS "strs", "{Tinfo}"."ints" AS "ints", "{Tinfo}"."bools" AS "bools", "{Tinfo}"."doubles" AS "doubles" FROM "{Tinfo}" AS "{Tinfo}") AS "{Tinfo}_inner") AS "{Tinfo}"'
expected_code = code_template.replace "{Tinfo}" tinfo
t.distinct ["strs"] . to_sql . prepare . should_equal [expected_code, []]
connection.execute_update 'DROP TABLE "'+tinfo+'"'
suite_builder.group "[PostgreSQL] Dialect-specific codegen" group_builder->
data = Postgres_Info_Data.setup create_connection_fn
Test.group "[PostgreSQL] Table.aggregate should correctly infer result types" <|
name = Name_Generator.random_name "Ttypes"
connection.execute_update 'CREATE TEMPORARY TABLE "'+name+'" ("txt" VARCHAR, "i1" SMALLINT, "i2" INT, "i3" BIGINT, "i4" NUMERIC, "r1" REAL, "r2" DOUBLE PRECISION, "bools" BOOLEAN)'
t = connection.query (SQL_Query.Table_Name name)
Test.specify "Concatenate, Shortest and Longest" <|
r = t.aggregate [Concatenate "txt", Shortest "txt", Longest "txt"]
group_builder.teardown <|
data.teardown
group_builder.specify "should generate queries for the Distinct operation" <|
t = data.connection.query (SQL_Query.Table_Name data.tinfo)
code_template = 'SELECT "{Tinfo}"."strs" AS "strs", "{Tinfo}"."ints" AS "ints", "{Tinfo}"."bools" AS "bools", "{Tinfo}"."doubles" AS "doubles" FROM (SELECT DISTINCT ON ("{Tinfo}_inner"."strs") "{Tinfo}_inner"."strs" AS "strs", "{Tinfo}_inner"."ints" AS "ints", "{Tinfo}_inner"."bools" AS "bools", "{Tinfo}_inner"."doubles" AS "doubles" FROM (SELECT "{Tinfo}"."strs" AS "strs", "{Tinfo}"."ints" AS "ints", "{Tinfo}"."bools" AS "bools", "{Tinfo}"."doubles" AS "doubles" FROM "{Tinfo}" AS "{Tinfo}") AS "{Tinfo}_inner") AS "{Tinfo}"'
expected_code = code_template.replace "{Tinfo}" data.tinfo
t.distinct ["strs"] . to_sql . prepare . should_equal [expected_code, []]
suite_builder.group "[PostgreSQL] Table.aggregate should correctly infer result types" group_builder->
data = Postgres_Aggregate_Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
group_builder.specify "Concatenate, Shortest and Longest" <|
r = data.t.aggregate [Concatenate "txt", Shortest "txt", Longest "txt"]
r.columns.at 0 . value_type . should_equal default_text
r.columns.at 1 . value_type . should_equal default_text
r.columns.at 2 . value_type . should_equal default_text
Test.specify "Counts" <|
r = t.aggregate [Count, Count_Empty "txt", Count_Not_Empty "txt", Count_Distinct "i1", Count_Not_Nothing "i2", Count_Nothing "i3"]
group_builder.specify "Counts" <|
r = data.t.aggregate [Count, Count_Empty "txt", Count_Not_Empty "txt", Count_Distinct "i1", Count_Not_Nothing "i2", Count_Nothing "i3"]
r.column_count . should_equal 6
r.columns.each column->
column.value_type . should_equal Value_Type.Integer
Test.specify "Sum" <|
r = t.aggregate [Sum "i1", Sum "i2", Sum "i3", Sum "i4", Sum "r1", Sum "r2"]
group_builder.specify "Sum" <|
r = data.t.aggregate [Sum "i1", Sum "i2", Sum "i3", Sum "i4", Sum "r1", Sum "r2"]
r.columns.at 0 . value_type . should_equal Value_Type.Integer
r.columns.at 1 . value_type . should_equal Value_Type.Integer
r.columns.at 2 . value_type . should_equal Value_Type.Decimal
@ -211,8 +297,8 @@ postgres_specific_spec connection db_name setup =
r.columns.at 4 . value_type . should_equal (Value_Type.Float Bits.Bits_32)
r.columns.at 5 . value_type . should_equal (Value_Type.Float Bits.Bits_64)
Test.specify "Average" <|
r = t.aggregate [Average "i1", Average "i2", Average "i3", Average "i4", Average "r1", Average "r2"]
group_builder.specify "Average" <|
r = data.t.aggregate [Average "i1", Average "i2", Average "i3", Average "i4", Average "r1", Average "r2"]
r.columns.at 0 . value_type . should_equal Value_Type.Decimal
r.columns.at 1 . value_type . should_equal Value_Type.Decimal
r.columns.at 2 . value_type . should_equal Value_Type.Decimal
@ -220,51 +306,58 @@ postgres_specific_spec connection db_name setup =
r.columns.at 4 . value_type . should_equal Value_Type.Float
r.columns.at 5 . value_type . should_equal Value_Type.Float
connection.execute_update 'DROP TABLE "'+name+'"'
Test.group "[PostgreSQL] Warning/Error handling" <|
Test.specify "query warnings should be propagated" <|
suite_builder.group "[PostgreSQL] Warning/Error handling" group_builder->
data = Basic_Test_Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
group_builder.specify "query warnings should be propagated" <|
long_name = (Name_Generator.random_name "T") + ("a" * 100)
r = connection.execute_update 'CREATE TEMPORARY TABLE "'+long_name+'" ("A" VARCHAR)'
r = data.connection.execute_update 'CREATE TEMPORARY TABLE "'+long_name+'" ("A" VARCHAR)'
w1 = Problems.expect_only_warning SQL_Warning r
# The display text may itself be truncated, so we just check the first words.
w1.to_display_text . should_contain "identifier"
# And check the full message for words that could be truncated in short message.
w1.message . should_contain "truncated to"
table = connection.query (SQL_Query.Raw_SQL 'SELECT 1 AS "'+long_name+'"')
table = data.connection.query (SQL_Query.Raw_SQL 'SELECT 1 AS "'+long_name+'"')
w2 = Problems.expect_only_warning SQL_Warning table
w2.message . should_contain "truncated"
effective_name = table.column_names . at 0
effective_name . should_not_equal long_name
long_name.should_contain effective_name
Test.specify "is capable of handling weird tables" <|
connection.execute_update 'CREATE TEMPORARY TABLE "empty-column-name" ("" VARCHAR)' . should_fail_with SQL_Error
group_builder.specify "is capable of handling weird tables" <|
data.connection.execute_update 'CREATE TEMPORARY TABLE "empty-column-name" ("" VARCHAR)' . should_fail_with SQL_Error
Problems.assume_no_problems <|
connection.execute_update 'CREATE TEMPORARY TABLE "clashing-unicode-names" ("ś" VARCHAR, "s\u0301" INTEGER)'
data.connection.execute_update 'CREATE TEMPORARY TABLE "clashing-unicode-names" ("ś" VARCHAR, "s\u0301" INTEGER)'
Problems.assume_no_problems <|
connection.execute_update 'INSERT INTO "clashing-unicode-names" VALUES (\'A\', 2)'
t2 = connection.query (SQL_Query.Table_Name "clashing-unicode-names")
data.connection.execute_update 'INSERT INTO "clashing-unicode-names" VALUES (\'A\', 2)'
t2 = data.connection.query (SQL_Query.Table_Name "clashing-unicode-names")
Problems.expect_only_warning Duplicate_Output_Column_Names t2
t2.column_names . should_equal ["ś", "ś 1"]
m2 = t2.read
m2.at "ś" . to_vector . should_equal ["A"]
m2.at "ś 1" . to_vector . should_equal [2]
r3 = connection.query 'SELECT 1 AS "A", 2 AS "A"'
r3 = data.connection.query 'SELECT 1 AS "A", 2 AS "A"'
r3.should_fail_with Illegal_Argument
r3.catch.cause . should_be_a Duplicate_Output_Column_Names
r4 = connection.query 'SELECT 1 AS ""'
r4 = data.connection.query 'SELECT 1 AS ""'
r4.should_fail_with SQL_Error
table_builder = setup.table_builder
materialize = setup.materialize
Test.group "[PostgreSQL] Edge Cases" <|
Test.specify "materialize should respect the overridden type" <|
t0 = table_builder [["x", [False, True, False]], ["A", ["a", "b", "c"]], ["B", ["xyz", "abc", "def"]]]
suite_builder.group "[PostgreSQL] Edge Cases" group_builder->
data = Basic_Test_Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
group_builder.specify "materialize should respect the overridden type" <|
t0 = table_builder [["x", [False, True, False]], ["A", ["a", "b", "c"]], ["B", ["xyz", "abc", "def"]]] connection=data.connection
t1 = t0 . cast "A" (Value_Type.Char size=1 variable_length=False) . cast "B" (Value_Type.Char size=3 variable_length=False)
x = t1.at "x"
@ -283,12 +376,12 @@ postgres_specific_spec connection db_name setup =
Test.with_clue "d.value_type="+d.value_type.to_display_text+": " <|
d.value_type.variable_length.should_be_true
Test.specify "should be able to round-trip a BigInteger column" <|
group_builder.specify "should be able to round-trip a BigInteger column" <|
x = 2^70
m1 = Table.new [["X", [10, x]]]
m1.at "X" . value_type . should_be_a (Value_Type.Decimal ...)
t1 = m1.select_into_database_table connection (Name_Generator.random_name "BigInteger") primary_key=[] temporary=True
t1 = m1.select_into_database_table data.connection (Name_Generator.random_name "BigInteger") primary_key=[] temporary=True
t1.at "X" . value_type . should_be_a (Value_Type.Decimal ...)
t1.at "X" . value_type . scale . should_equal 0
# If we want to enforce the scale, Postgres requires us to enforce a precision too, so we use the biggest one we can:
@ -325,7 +418,7 @@ postgres_specific_spec connection db_name setup =
super_large = 11^2000
m3 = Table.new [["X", [super_large]]]
m3.at "X" . value_type . should_be_a (Value_Type.Decimal ...)
t3 = m3.select_into_database_table connection (Name_Generator.random_name "BigInteger2") primary_key=[] temporary=True
t3 = m3.select_into_database_table data.connection (Name_Generator.random_name "BigInteger2") primary_key=[] temporary=True
t3 . at "X" . value_type . should_be_a (Value_Type.Decimal ...)
# If we exceed the 1000 digits precision, we cannot enforce neither scale nor precision anymore.
t3 . at "X" . value_type . precision . should_equal Nothing
@ -344,9 +437,9 @@ postgres_specific_spec connection db_name setup =
w4.requested_type . should_equal (Value_Type.Decimal precision=Nothing scale=Nothing)
w4.actual_type . should_equal Value_Type.Float
Test.specify "should round-trip timestamptz column, preserving instant but converting to UTC" <|
group_builder.specify "should round-trip timestamptz column, preserving instant but converting to UTC" <|
table_name = Name_Generator.random_name "TimestampTZ"
table = connection.create_table table_name [Column_Description.Value "A" (Value_Type.Date_Time with_timezone=True)] primary_key=[]
table = data.connection.create_table table_name [Column_Description.Value "A" (Value_Type.Date_Time with_timezone=True)] primary_key=[]
dt1 = Date_Time.new 2022 05 04 15 30
dt2 = Date_Time.new 2022 05 04 15 30 zone=(Time_Zone.utc)
@ -377,9 +470,9 @@ postgres_specific_spec connection db_name setup =
t2.row_count . should_equal local_equals.length
t2.at "A" . to_vector . should_equal_tz_agnostic local_equals
Test.specify "will round-trip timestamp column without timezone by converting it to UTC" <|
group_builder.specify "will round-trip timestamp column without timezone by converting it to UTC" <|
table_name = Name_Generator.random_name "Timestamp"
table = connection.create_table table_name [Column_Description.Value "A" (Value_Type.Date_Time with_timezone=False)] primary_key=[]
table = data.connection.create_table table_name [Column_Description.Value "A" (Value_Type.Date_Time with_timezone=False)] primary_key=[]
Problems.assume_no_problems table
dt1 = Date_Time.new 2022 05 04 15 30
@ -439,9 +532,14 @@ postgres_specific_spec connection db_name setup =
t2.row_count . should_equal 0
t2.at "A" . to_vector . should_equal []
Test.group "[PostgreSQL] math functions" <|
Test.specify "round, trunc, ceil, floor" <|
col = table_builder [["x", [0.1, 0.9, 3.1, 3.9, -0.1, -0.9, -3.1, -3.9]]] . at "x"
suite_builder.group "[PostgreSQL] math functions" group_builder->
data = Basic_Test_Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
group_builder.specify "round, trunc, ceil, floor" <|
col = (table_builder [["x", [0.1, 0.9, 3.1, 3.9, -0.1, -0.9, -3.1, -3.9]]] connection=data.connection) . at "x"
col . cast Value_Type.Integer . ceil . value_type . should_equal Value_Type.Float
col . cast Value_Type.Float . round . value_type . should_equal Value_Type.Float
@ -468,70 +566,77 @@ postgres_specific_spec connection db_name setup =
col . cast Value_Type.Integer . truncate . value_type . should_equal Value_Type.Float
col . cast Value_Type.Decimal . truncate . value_type . should_equal Value_Type.Decimal
do_op n op =
table = table_builder [["x", [n]]]
do_op data n op =
table = table_builder [["x", [n]]] connection=data.connection
result = table.at "x" |> op
result.to_vector.at 0
do_round n dp=0 use_bankers=False = do_op n (_.round dp use_bankers)
Test.specify "Can round correctly near the precision limit" <|
do_round 1.2222222222222225 15 . should_equal 1.222222222222223
do_round -1.2222222222222225 15 . should_equal -1.222222222222223
do_round 1.2222222222222235 15 . should_equal 1.222222222222224
do_round -1.2222222222222235 15 . should_equal -1.222222222222224
do_round data n dp=0 use_bankers=False = do_op data n (_.round dp use_bankers)
Test.specify "Can round correctly near the precision limit, using banker's rounding" <|
do_round 1.2222222222222225 15 use_bankers=True . should_equal 1.222222222222222
do_round -1.2222222222222225 15 use_bankers=True . should_equal -1.222222222222222
do_round 1.2222222222222235 15 use_bankers=True . should_equal 1.222222222222224
do_round -1.2222222222222235 15 use_bankers=True . should_equal -1.222222222222224
group_builder.specify "Can round correctly near the precision limit" <|
do_round data 1.2222222222222225 15 . should_equal 1.222222222222223
do_round data -1.2222222222222225 15 . should_equal -1.222222222222223
do_round data 1.2222222222222235 15 . should_equal 1.222222222222224
do_round data -1.2222222222222235 15 . should_equal -1.222222222222224
Test.specify "Can handle NaN/Infinity" <|
group_builder.specify "Can round correctly near the precision limit, using banker's rounding" <|
do_round data 1.2222222222222225 15 use_bankers=True . should_equal 1.222222222222222
do_round data -1.2222222222222225 15 use_bankers=True . should_equal -1.222222222222222
do_round data 1.2222222222222235 15 use_bankers=True . should_equal 1.222222222222224
do_round data -1.2222222222222235 15 use_bankers=True . should_equal -1.222222222222224
group_builder.specify "Can handle NaN/Infinity" <|
nan_result = if setup.test_selection.is_nan_and_nothing_distinct then Number.nan else Nothing
ops = [.round, .truncate, .ceil, .floor]
ops.each op->
do_op Number.nan op . should_equal nan_result
do_op Number.positive_infinity op . should_equal Number.positive_infinity
do_op Number.negative_infinity op . should_equal Number.negative_infinity
do_op data Number.nan op . should_equal nan_result
do_op data Number.positive_infinity op . should_equal Number.positive_infinity
do_op data Number.negative_infinity op . should_equal Number.negative_infinity
Test.specify "round returns the correct type" <|
do_round 231.2 1 . should_be_a Float
do_round 231.2 0 . should_be_a Float
do_round 231.2 . should_be_a Float
do_round 231.2 -1 . should_be_a Float
group_builder.specify "round returns the correct type" <|
do_round data 231.2 1 . should_be_a Float
do_round data 231.2 0 . should_be_a Float
do_round data 231.2 . should_be_a Float
do_round data 231.2 -1 . should_be_a Float
Test.specify "round returns the correct type" <|
do_round 231 1 . should_be_a Float
do_round 231 0 . should_be_a Float
do_round 231 . should_be_a Float
do_round 231 -1 . should_be_a Float
group_builder.specify "round returns the correct type" <|
do_round data 231 1 . should_be_a Float
do_round data 231 0 . should_be_a Float
do_round data 231 . should_be_a Float
do_round data 231 -1 . should_be_a Float
run_tests connection db_name =
add_postgres_specs suite_builder create_connection_fn db_name =
prefix = "[PostgreSQL] "
name_counter = Ref.new 0
tables = Vector.new_builder
table_builder columns =
table_builder columns connection=(create_connection_fn Nothing) =
ix = name_counter.get
name_counter . put ix+1
name = Name_Generator.random_name "table_"+ix.to_text
in_mem_table = Table.new columns
in_mem_table.select_into_database_table connection name primary_key=Nothing temporary=True
materialize = .read
Common_Spec.spec prefix connection
Common_Spec.add_specs suite_builder prefix create_connection_fn
common_selection = Common_Table_Operations.Main.Test_Selection.Config supports_case_sensitive_columns=True order_by_unicode_normalization_by_default=True allows_mixed_type_comparisons=False fixed_length_text_columns=True removes_trailing_whitespace_casting_from_char_to_varchar=True supports_decimal_type=True supported_replace_params=supported_replace_params
aggregate_selection = Common_Table_Operations.Aggregate_Spec.Test_Selection.Config first_last_row_order=False aggregation_problems=False
agg_in_memory_table = (enso_project.data / "data.csv") . read
agg_table = agg_in_memory_table.select_into_database_table connection (Name_Generator.random_name "Agg1") primary_key=Nothing temporary=True
tables.append agg_table.name
empty_agg_table = (agg_in_memory_table.take (First 0)).select_into_database_table connection (Name_Generator.random_name "Agg_Empty") primary_key=Nothing temporary=True
tables.append empty_agg_table.name
setup = Common_Table_Operations.Main.Test_Setup.Config prefix agg_table empty_agg_table table_builder materialize is_database=True test_selection=common_selection aggregate_test_selection=aggregate_selection connection=connection
postgres_specific_spec connection db_name setup
Common_Table_Operations.Main.spec setup
agg_table_fn = _->
connection = create_connection_fn Nothing
agg_in_memory_table.select_into_database_table connection (Name_Generator.random_name "Agg1") primary_key=Nothing temporary=True
empty_agg_table_fn = _->
connection = create_connection_fn Nothing
(agg_in_memory_table.take (First 0)).select_into_database_table connection (Name_Generator.random_name "Agg_Empty") primary_key=Nothing temporary=True
setup = Common_Table_Operations.Main.Test_Setup.Config prefix agg_table_fn empty_agg_table_fn table_builder materialize is_database=True test_selection=common_selection aggregate_test_selection=aggregate_selection create_connection_func=create_connection_fn
postgres_specific_spec suite_builder create_connection_fn db_name setup
Common_Table_Operations.Main.add_specs suite_builder setup
## PRIVATE
supported_replace_params : Set Replace_Params
@ -543,7 +648,7 @@ supported_replace_params =
e4 = [Replace_Params.Value Column Case_Sensitivity.Default False, Replace_Params.Value Column Case_Sensitivity.Sensitive False]
Set.from_vector <| e0 + e1 + e2 + e3 + e4
table_spec =
add_table_specs suite_builder =
db_name = Environment.get "ENSO_DATABASE_TEST_DB_NAME"
db_host_port = (Environment.get "ENSO_DATABASE_TEST_HOST").if_nothing "localhost" . split ':'
db_host = db_host_port.at 0
@ -553,14 +658,15 @@ table_spec =
ca_cert_file = Environment.get "ENSO_DATABASE_TEST_CA_CERT_FILE"
ssl_pending = if ca_cert_file.is_nothing then "PostgreSQL SSL test not configured." else Nothing
Test.group "[PostgreSQL] SSL connectivity tests" pending=ssl_pending <|
Test.specify "should connect without ssl parameter" <|
suite_builder.group "[PostgreSQL] SSL connectivity tests" pending=ssl_pending group_builder->
group_builder.specify "should connect without ssl parameter" <|
Database.connect (Postgres db_host db_port db_name credentials=(Credentials.Username_And_Password db_user db_password)) . should_succeed
Test.specify "should connect, requiring SSL" <|
group_builder.specify "should connect, requiring SSL" <|
Database.connect (Postgres db_host db_port db_name credentials=(Credentials.Username_And_Password db_user db_password) use_ssl=SSL_Mode.Require) . should_succeed
Test.specify "should connect be able to verify the certificate" <|
group_builder.specify "should connect be able to verify the certificate" <|
Database.connect (Postgres db_host db_port db_name credentials=(Credentials.Username_And_Password db_user db_password) use_ssl=(SSL_Mode.Verify_CA ca_cert_file)) . should_succeed
## Default certificate should not accept the self signed certificate.
@ -568,13 +674,13 @@ table_spec =
ca_fail.is_error . should_equal True
ca_fail.catch SQL_Error . is_a SQL_Error . should_equal True
Test.specify "should connect be able to verify the host name against the certificate" <|
group_builder.specify "should connect be able to verify the host name against the certificate" <|
Database.connect (Postgres db_host db_port db_name credentials=(Credentials.Username_And_Password db_user db_password) use_ssl=(SSL_Mode.Full_Verification ca_cert_file)) . should_succeed
alternate_host = Environment.get "ENSO_DATABASE_TEST_ALTERNATE_HOST" . if_nothing <|
if db_host == "127.0.0.1" then "localhost" else Nothing
pending_alternate = if alternate_host.is_nothing then "Alternative host name not configured." else Nothing
Test.specify "should fail to connect with alternate host name not valid in certificate" pending=pending_alternate <|
group_builder.specify "should fail to connect with alternate host name not valid in certificate" pending=pending_alternate <|
ca_fail = Database.connect (Postgres alternate_host db_port db_name credentials=(Credentials.Username_And_Password db_user db_password) use_ssl=(SSL_Mode.Full_Verification ca_cert_file))
ca_fail.is_error . should_equal True
ca_fail.catch SQL_Error . is_a SQL_Error . should_equal True
@ -582,18 +688,18 @@ table_spec =
case create_connection_builder of
Nothing ->
message = "PostgreSQL test database is not configured. See README.md for instructions."
Test.group "[PostgreSQL] Database tests" pending=message Nothing
suite_builder.group "[PostgreSQL] Database tests" pending=message (_-> Nothing)
connection_builder ->
connection = connection_builder Nothing
run_tests connection db_name
Postgres_Type_Mapping_Spec.spec connection
add_postgres_specs suite_builder connection_builder db_name
Postgres_Type_Mapping_Spec.add_specs suite_builder connection_builder
Transaction_Spec.spec connection "[PostgreSQL] "
Upload_Spec.spec connection_builder "[PostgreSQL] "
Transaction_Spec.add_specs suite_builder connection_builder "[PostgreSQL] "
Upload_Spec.add_specs suite_builder connection_builder "[PostgreSQL] "
suite_builder.group "[PostgreSQL] Secrets in connection settings" group_builder->
cloud_setup = Cloud_Tests_Setup.prepare
cloud_setup.with_prepared_environment <| Test.group "[PostgreSQL] Secrets in connection settings" pending=cloud_setup.pending <|
Test.specify "should allow to set up a connection with the password passed as a secret" <|
group_builder.specify "should allow to set up a connection with the password passed as a secret" pending=cloud_setup.pending <|
cloud_setup.with_prepared_environment <|
with_secret "my_postgres_username" db_user username_secret-> with_secret "my_postgres_password" db_password password_secret->
my_secret_name = "Enso Test: My Secret App NAME " + (Random.uuid.take 5)
with_secret "my_postgres_app_name" my_secret_name app_name_secret-> with_retries <|
@ -625,14 +731,15 @@ create_connection_builder =
True -> Nothing
False ->
_ -> Panic.rethrow <|
# TODO: Ensure that this returns a function and does not initialize the connection
Database.connect (Postgres db_host db_port db_name credentials=(Credentials.Username_And_Password db_user db_password))
pgpass_file = enso_project.data / "pgpass.conf"
pgpass_spec = Test.group "[PostgreSQL] .pgpass" <|
add_pgpass_specs suite_builder = suite_builder.group "[PostgreSQL] .pgpass" group_builder->
make_pair username password =
[Pair.new "user" username, Pair.new "password" password]
Test.specify "should correctly parse the file, including escapes, blank lines and comments" <|
group_builder.specify "should correctly parse the file, including escapes, blank lines and comments" <|
result = Pgpass.parse_file pgpass_file
result.length . should_equal 12
e1 = Pgpass.Pgpass_Entry.Value "localhost" "5432" "postgres" "postgres" "postgres"
@ -651,7 +758,7 @@ pgpass_spec = Test.group "[PostgreSQL] .pgpass" <|
result.should_equal entries
if Platform.is_unix then
Test.specify "should only accept the .pgpass file if it has correct permissions" <|
group_builder.specify "should only accept the .pgpass file if it has correct permissions" <|
Process.run "chmod" ["0777", pgpass_file.absolute.path] . exit_code . should_equal Exit_Code.Success
Test_Environment.unsafe_with_environment_override "PGPASSFILE" (pgpass_file.absolute.path) <|
Pgpass.verify pgpass_file . should_equal False
@ -662,7 +769,7 @@ pgpass_spec = Test.group "[PostgreSQL] .pgpass" <|
Pgpass.verify pgpass_file . should_equal True
Pgpass.read "passwords should preserve leading space" "1" "some database name that is really : weird" . should_equal (make_pair "*" " pass")
Test.specify "should correctly match wildcards and use the first matching entry" <|
group_builder.specify "should correctly match wildcards and use the first matching entry" <|
Test_Environment.unsafe_with_environment_override "PGPASSFILE" (pgpass_file.absolute.path) <|
Pgpass.read "localhost" 5432 "postgres" . should_equal (make_pair "postgres" "postgres")
Pgpass.read "192.168.4.0" "1234" "foo" . should_equal (make_pair "bar" "baz")
@ -673,8 +780,8 @@ pgpass_spec = Test.group "[PostgreSQL] .pgpass" <|
Pgpass.read "\:" "1234" "blah" . should_equal (make_pair "*" "\:")
Pgpass.read ":" ":" ":" . should_equal (make_pair ":" ":")
connection_setup_spec = Test.group "[PostgreSQL] Connection setup" <|
Test.specify "should use environment variables as host, port and database defaults and fall back to hardcoded defaults" <|
add_connection_setup_specs suite_builder = suite_builder.group "[PostgreSQL] Connection setup" group_builder->
group_builder.specify "should use environment variables as host, port and database defaults and fall back to hardcoded defaults" <|
c1 = Postgres "example.com" 12345 "my_db"
c2 = Postgres
c3 = Test_Environment.unsafe_with_environment_override "PGHOST" "192.168.0.1" <|
@ -707,12 +814,12 @@ connection_setup_spec = Test.group "[PostgreSQL] Connection setup" <|
c4.jdbc_url . should_equal "jdbc:postgresql://localhost:5432/postgres"
add_ssl props = props+[Pair.new 'sslmode' 'prefer']
Test.specify "should use the given credentials" <|
group_builder.specify "should use the given credentials" <|
c = Postgres credentials=(Credentials.Username_And_Password "myuser" "mypass")
c.jdbc_url . should_equal "jdbc:postgresql://localhost:5432/postgres"
c.jdbc_properties . should_equal <| add_ssl [Pair.new "user" "myuser", Pair.new "password" "mypass"]
Test.specify "should fallback to environment variables and fill-out missing information based on the PGPASS file (if available)" <|
group_builder.specify "should fallback to environment variables and fill-out missing information based on the PGPASS file (if available)" <|
c1 = Postgres
c1.jdbc_url . should_equal "jdbc:postgresql://localhost:5432/postgres"
@ -751,9 +858,13 @@ connection_setup_spec = Test.group "[PostgreSQL] Connection setup" <|
[c2, c3, c4].each c->
c.jdbc_properties . should_equal <| add_ssl [Pair.new "user" "other user", Pair.new "password" "other password"]
spec =
table_spec
pgpass_spec
connection_setup_spec
add_specs suite_builder =
add_table_specs suite_builder
add_pgpass_specs suite_builder
add_connection_setup_specs suite_builder
main =
suite = Test.build suite_builder->
add_specs suite_builder
suite.run_with_filter
main = Test_Suite.run_main spec

View File

@ -29,6 +29,16 @@ If the database server is remote, `ENSO_DATABASE_TEST_HOST` may be set to tell
the test suite where to connect. If that variable is not set, the test suite
will attempt to connect to a local server.
### Setup via Docker
The following commands will download the latest Postgres Docker image and run it
with the default configuration with password "pwd":
```sh
docker pull postgres:latest
docker run -it -e POSTGRES_PASSWORD=pwd -p 5432:5432 postgres
```
### Testing SSL connectivity
The SSL connection by providing a root certificate file. The path to this is

View File

@ -8,15 +8,20 @@ from Standard.Database import all
from Standard.AWS import Redshift_Details, AWS_Credential
from Standard.Test import Test, Test_Suite
import Standard.Test.Extensions
from Standard.Test_New import all
import project.Database.Common.Common_Spec
import project.Database.Helpers.Name_Generator
import project.Common_Table_Operations
redshift_specific_spec connection =
Test.group "[Redshift] Info" <|
type Data
Value ~data
connection self = self.data.at 0
t self = self.data.at 1
setup create_connection_fn = Data.Value <|
connection = create_connection_fn Nothing
tinfo = Name_Generator.random_name "Tinfo"
connection.execute_update 'CREATE TEMPORARY TABLE "'+tinfo+'" ("strs" VARCHAR, "ints" INTEGER, "bools" BOOLEAN, "reals" REAL)'
t = connection.query (SQL_Query.Table_Name tinfo)
@ -25,45 +30,59 @@ redshift_specific_spec connection =
row3 = ["def", 42, True, 1.4]
Panic.rethrow <|
t.update_rows (Table.from_rows ["strs", "ints", "bools", "reals"] [row1, row2, row3]) update_action=Update_Action.Insert
[connection, t]
Test.specify "should return Table information" <|
i = t.info
teardown self = self.connection.close
add_redshift_specific_specs suite_builder create_connection_fn =
suite_builder.group "[Redshift] Info" group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
group_builder.specify "should return Table information" <|
i = data.t.info
i.at "Column" . to_vector . should_equal ["strs", "ints", "bools", "reals"]
i.at "Items Count" . to_vector . should_equal [3, 1, 2, 3]
i.at "Value Type" . to_vector . should_equal [Value_Type.Char, Value_Type.Integer Bits.Bits_32, Value_Type.Boolean, Value_Type.Float Bits.Bits_32]
Test.specify "should infer standard types correctly" <|
t.at "strs" . value_type . is_text . should_be_true
t.at "ints" . value_type . is_integer . should_be_true
t.at "bools" . value_type . is_boolean . should_be_true
t.at "reals" . value_type . is_floating_point . should_be_true
connection.execute_update 'DROP TABLE "'+tinfo+'"'
run_tests connection =
group_builder.specify "should infer standard types correctly" <|
data.t.at "strs" . value_type . is_text . should_be_true
data.t.at "ints" . value_type . is_integer . should_be_true
data.t.at "bools" . value_type . is_boolean . should_be_true
data.t.at "reals" . value_type . is_floating_point . should_be_true
add_database_specs suite_builder create_connection_fn =
prefix = "[Redshift] "
name_counter = Ref.new 0
tables = Vector.new_builder
table_builder columns =
table_builder columns connection=(create_connection_fn Nothing) =
ix = name_counter.get
name_counter . put ix+1
name = Name_Generator.random_name "table_"+ix.to_text
in_mem_table = Table.new columns
in_mem_table.select_into_database_table connection name primary_key=Nothing temporary=True
materialize = .read
Common_Spec.spec prefix connection
redshift_specific_spec connection
Common_Spec.add_specs suite_builder prefix create_connection_fn
add_redshift_specific_specs suite_builder create_connection_fn
common_selection = Common_Table_Operations.Main.Test_Selection.Config supports_case_sensitive_columns=True order_by_unicode_normalization_by_default=True allows_mixed_type_comparisons=False supports_decimal_type=True
aggregate_selection = Common_Table_Operations.Aggregate_Spec.Test_Selection.Config first_last_row_order=False aggregation_problems=False date_support=False
agg_in_memory_table = (enso_project.data / "data.csv") . read
agg_table = agg_in_memory_table.select_into_database_table connection (Name_Generator.random_name "Agg1") primary_key=Nothing temporary=True
tables.append agg_table.name
empty_agg_table = (agg_in_memory_table.take (First 0)).select_into_database_table connection (Name_Generator.random_name "Agg_Empty") primary_key=Nothing temporary=True
tables.append empty_agg_table.name
agg_table_fn = _->
connection = create_connection_fn Nothing
agg_in_memory_table.select_into_database_table connection (Name_Generator.random_name "Agg1") primary_key=Nothing temporary=True
setup = Common_Table_Operations.Main.Test_Setup.Config prefix agg_table empty_agg_table table_builder materialize is_database=True test_selection=common_selection aggregate_test_selection=aggregate_selection connection=connection
Common_Table_Operations.Main.spec setup
empty_agg_table_fn = _->
connection = create_connection_fn Nothing
(agg_in_memory_table.take (First 0)).select_into_database_table connection (Name_Generator.random_name "Agg_Empty") primary_key=Nothing temporary=True
setup = Common_Table_Operations.Main.Test_Setup.Config prefix agg_table_fn empty_agg_table_fn table_builder materialize is_database=True test_selection=common_selection aggregate_test_selection=aggregate_selection create_connection_func=create_connection_fn
Common_Table_Operations.Main.add_specs suite_builder setup
connect_via_json_config =
credentials = enso_project.data / 'redshift_credentials.json'
@ -107,16 +126,20 @@ uri_parse uri =
db_name = if host_db_split.length == 1 then '' else host_db_split.at 1
[db_host, db_port, db_name]
spec =
add_specs suite_builder =
db_host_port = Environment.get "ENSO_REDSHIFT_URI"
connection_details = if db_host_port.is_nothing then connect_via_json_config else
connect_via_aws_environment db_host_port
case connection_details of
_ : Text ->
Test.group "[Redshift] Database tests" pending=connection_details Nothing
suite_builder.group "[Redshift] Database tests" pending=connection_details (_-> Nothing)
_ ->
connection = Database.connect connection_details
run_tests connection
create_connection_fn = _->
Database.connect connection_details
add_database_specs suite_builder create_connection_fn
main = Test_Suite.run_main spec
main =
suite = Test.build suite_builder->
add_specs suite_builder
suite.run_with_filter

View File

@ -1,8 +1,10 @@
from Standard.Base import all
import Standard.Base.Runtime.Ref.Ref
from Standard.Base.Runtime import assert
import Standard.Base.Errors.File_Error.File_Error
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
import Standard.Table.Data.Type.Value_Type.Bits
from Standard.Table import Table, Value_Type
from Standard.Table.Errors import Invalid_Column_Names, Duplicate_Output_Column_Names
@ -12,8 +14,7 @@ import Standard.Database.Internal.Replace_Params.Replace_Params
from Standard.Database import all
from Standard.Database.Errors import SQL_Error, Unsupported_Database_Operation
from Standard.Test import Test, Test_Suite, Problems
import Standard.Test.Extensions
from Standard.Test_New import all
import project.Database.Common.Common_Spec
import project.Database.Transaction_Spec
@ -22,25 +23,51 @@ import project.Database.Types.SQLite_Type_Mapping_Spec
import project.Database.Helpers.Name_Generator
import project.Common_Table_Operations
sqlite_specific_spec prefix connection setup =
table_builder = setup.table_builder
type Test_Data
Value ~connection
Test.group prefix+"Schemas and Databases" <|
Test.specify "should be able to get current database and list databases" <|
connection.database . should_equal Nothing
connection.databases . should_equal [Nothing]
Meta.is_same_object connection (connection.set_database Nothing) . should_be_true
setup create_connection_func =
connection = create_connection_func Nothing
Test_Data.Value connection
Test.specify "should be able to get current schema and list schemas" <|
connection.schema . should_equal Nothing
connection.schemas . should_equal [Nothing]
Meta.is_same_object connection (connection.set_schema Nothing) . should_be_true
teardown self =
self.connection.close
Test.specify "does not allow changing schema or database" <|
connection.set_schema "foo" . should_fail_with SQL_Error
connection.set_database "foo" . should_fail_with SQL_Error
Test.group prefix+"Tables and Table Types" <|
type Metadata_Data
Value ~data
connection self = self.data.at 0
tinfo self = self.data.at 1
t self = self.data.at 2
setup create_connection_func =
connection = create_connection_func Nothing
tinfo = Name_Generator.random_name "Tinfo"
connection.execute_update 'CREATE TABLE "'+tinfo+'" ("strs" VARCHAR, "ints" INTEGER, "bools" BOOLEAN, "reals" REAL)'
t = connection.query (SQL_Query.Table_Name tinfo)
row1 = ["a", Nothing, False, 1.2]
row2 = ["abc", Nothing, Nothing, 1.3]
row3 = ["def", 42, True, 1.4]
Panic.rethrow <|
t.update_rows (Table.from_rows ["strs", "ints", "bools", "reals"] [row1, row2, row3]) update_action=Update_Action.Insert
Metadata_Data.Value [connection, tinfo, t]
teardown self =
self.connection.drop_table self.t.name
self.connection.close
type Tables_And_Table_Types_Data
Value ~data
connection self = self.data.at 0
tinfo self = self.data.at 1
vinfo self = self.data.at 2
temporary_table self = self.data.at 3
setup create_connection_func = Tables_And_Table_Types_Data.Value <|
connection = create_connection_func Nothing
tinfo = Name_Generator.random_name "TestTable"
connection.execute_update 'CREATE TABLE "'+tinfo+'" ("A" VARCHAR)'
@ -50,114 +77,160 @@ sqlite_specific_spec prefix connection setup =
temporary_table = Name_Generator.random_name "TemporaryTable"
(Table.new [["X", [1, 2, 3]]]).select_into_database_table connection temporary_table temporary=True
Test.specify "should be able to list table types" <|
table_types = connection.table_types
[connection, tinfo, vinfo, temporary_table]
teardown self =
self.connection.close
sqlite_specific_spec suite_builder prefix create_connection_func setup =
table_builder = setup.table_builder
suite_builder.group prefix+"Schemas and Databases" group_builder->
data = Test_Data.setup create_connection_func
group_builder.teardown <|
data.teardown
group_builder.specify "should be able to get current database and list databases" <|
data.connection.database . should_equal Nothing
data.connection.databases . should_equal [Nothing]
Meta.is_same_object data.connection (data.connection.set_database Nothing) . should_be_true
group_builder.specify "should be able to get current schema and list schemas" <|
data.connection.schema . should_equal Nothing
data.connection.schemas . should_equal [Nothing]
Meta.is_same_object data.connection (data.connection.set_schema Nothing) . should_be_true
group_builder.specify "does not allow changing schema or database" <|
data.connection.set_schema "foo" . should_fail_with SQL_Error
data.connection.set_database "foo" . should_fail_with SQL_Error
suite_builder.group prefix+"Tables and Table Types" group_builder->
data = Tables_And_Table_Types_Data.setup create_connection_func
group_builder.teardown <|
data.teardown
group_builder.specify "should be able to list table types" <|
table_types = data.connection.table_types
table_types.length . should_not_equal 0
table_types.contains "TABLE" . should_be_true
table_types.contains "VIEW" . should_be_true
Test.specify "should be able to list tables" <|
tables = connection.tables
group_builder.specify "should be able to list tables" <|
tables = data.connection.tables
tables.row_count . should_not_equal 0
tables.columns.map .name . should_equal ["Database", "Schema", "Name", "Type", "Description"]
table_names = tables.at "Name" . to_vector
table_names.should_contain tinfo
table_names.should_contain vinfo
table_names.should_contain temporary_table
table_names.should_contain data.tinfo
table_names.should_contain data.vinfo
table_names.should_contain data.temporary_table
Test.specify "should be able to filter tables by name" <|
tables = connection.tables tinfo
group_builder.specify "should be able to filter tables by name" <|
tables = data.connection.tables data.tinfo
tables.row_count . should_equal 1
tables.at "Database" . to_vector . at 0 . should_equal Nothing
tables.at "Schema" . to_vector . at 0 . should_equal Nothing
tables.at "Name" . to_vector . at 0 . should_equal tinfo
tables.at "Name" . to_vector . at 0 . should_equal data.tinfo
tables.at "Type" . to_vector . at 0 . should_equal "TABLE"
connection.tables "TestT_ble%" . row_count . should_equal 1
connection.tables "Temporary%ble%" . row_count . should_equal 1
connection.tables "Temporary%ble%" . at "Type" . to_vector . should_equal ["GLOBAL TEMPORARY"]
connection.tables "N_nexistent%" . row_count . should_equal 0
data.connection.tables "TestT_ble%" . row_count . should_equal 1
data.connection.tables "Temporary%ble%" . row_count . should_equal 1
data.connection.tables "Temporary%ble%" . at "Type" . to_vector . should_equal ["GLOBAL TEMPORARY"]
data.connection.tables "N_nexistent%" . row_count . should_equal 0
Test.specify "should be able to filter tables by type" <|
tables = connection.tables types=["VIEW"]
group_builder.specify "should be able to filter tables by type" <|
tables = data.connection.tables types=["VIEW"]
tables.row_count . should_not_equal 0
tables.at "Name" . to_vector . contains tinfo . should_be_false
tables.at "Name" . to_vector . contains vinfo . should_be_true
tables.at "Name" . to_vector . contains data.tinfo . should_be_false
tables.at "Name" . to_vector . contains data.vinfo . should_be_true
Test.group prefix+"Error Handling" <|
Test.specify "should wrap errors" <|
connection.read (SQL_Query.Raw_SQL "foobar") . should_fail_with SQL_Error
connection.execute_update "foobar" . should_fail_with SQL_Error
suite_builder.group prefix+"Error Handling" group_builder->
data = Test_Data.setup create_connection_func
action = connection.read (SQL_Query.Raw_SQL "SELECT A FROM undefined_table")
group_builder.teardown <|
data.teardown
group_builder.specify "should wrap errors" <|
data.connection.read (SQL_Query.Raw_SQL "foobar") . should_fail_with SQL_Error
data.connection.execute_update "foobar" . should_fail_with SQL_Error
action = data.connection.read (SQL_Query.Raw_SQL "SELECT A FROM undefined_table")
action . should_fail_with SQL_Error
action.catch.to_text . should_equal "There was an SQL error: [SQLITE_ERROR] SQL error or missing database (no such table: undefined_table). [Query was: SELECT A FROM undefined_table]"
Test.specify "is capable of handling weird tables" <|
group_builder.specify "is capable of handling weird tables" <|
Problems.assume_no_problems <|
connection.execute_update 'CREATE TEMPORARY TABLE "empty-column-name" ("" VARCHAR)'
t1 = connection.query (SQL_Query.Table_Name "empty-column-name")
data.connection.execute_update 'CREATE TEMPORARY TABLE "empty-column-name" ("" VARCHAR)'
t1 = data.connection.query (SQL_Query.Table_Name "empty-column-name")
Problems.expect_only_warning Invalid_Column_Names t1
t1.column_names . should_equal ["Column 1"]
m1 = t1.read
m1.at "Column 1" . to_vector . should_equal []
Problems.assume_no_problems <|
connection.execute_update 'CREATE TEMPORARY TABLE "clashing-unicode-names" ("ś" VARCHAR, "s\u0301" INTEGER)'
data.connection.execute_update 'CREATE TEMPORARY TABLE "clashing-unicode-names" ("ś" VARCHAR, "s\u0301" INTEGER)'
Problems.assume_no_problems <|
connection.execute_update 'INSERT INTO "clashing-unicode-names" VALUES (\'A\', 2)'
t2 = connection.query (SQL_Query.Table_Name "clashing-unicode-names")
data.connection.execute_update 'INSERT INTO "clashing-unicode-names" VALUES (\'A\', 2)'
t2 = data.connection.query (SQL_Query.Table_Name "clashing-unicode-names")
Problems.expect_only_warning Duplicate_Output_Column_Names t2
t2.column_names . should_equal ["ś", "ś 1"]
m2 = t2.read
m2.at "ś" . to_vector . should_equal ["A"]
m2.at "ś 1" . to_vector . should_equal [2]
r3 = connection.query 'SELECT 1 AS "A", 2 AS "A"'
r3 = data.connection.query 'SELECT 1 AS "A", 2 AS "A"'
r3.should_fail_with Illegal_Argument
r3.catch.cause . should_be_a Duplicate_Output_Column_Names
r4 = connection.query 'SELECT 1 AS ""'
r4 = data.connection.query 'SELECT 1 AS ""'
r4.should_fail_with Illegal_Argument
r4.catch.cause . should_be_a Invalid_Column_Names
tinfo = Name_Generator.random_name "Tinfo"
connection.execute_update 'CREATE TABLE "'+tinfo+'" ("strs" VARCHAR, "ints" INTEGER, "bools" BOOLEAN, "reals" REAL)'
Test.group prefix+"Metadata" <|
t = connection.query (SQL_Query.Table_Name tinfo)
row1 = ["a", Nothing, False, 1.2]
row2 = ["abc", Nothing, Nothing, 1.3]
row3 = ["def", 42, True, 1.4]
Panic.rethrow <|
t.update_rows (Table.from_rows ["strs", "ints", "bools", "reals"] [row1, row2, row3]) update_action=Update_Action.Insert
suite_builder.group prefix+"Metadata" group_builder->
data = Metadata_Data.setup create_connection_func
Test.specify "should return Table information" <|
i = t.info
group_builder.teardown <|
data.teardown
group_builder.specify "should return Table information" <|
i = data.t.info
i.at "Column" . to_vector . should_equal ["strs", "ints", "bools", "reals"]
i.at "Items Count" . to_vector . should_equal [3, 1, 2, 3]
i.at "Value Type" . to_vector . should_equal [Value_Type.Char, Value_Type.Integer, Value_Type.Boolean, Value_Type.Float]
Test.specify "should infer standard types correctly" <|
t.at "strs" . value_type . is_text . should_be_true
t.at "ints" . value_type . is_integer . should_be_true
t.at "bools" . value_type . is_boolean . should_be_true
t.at "reals" . value_type . is_floating_point . should_be_true
group_builder.specify "should infer standard types correctly" <|
data.t.at "strs" . value_type . is_text . should_be_true
data.t.at "ints" . value_type . is_integer . should_be_true
data.t.at "bools" . value_type . is_boolean . should_be_true
data.t.at "reals" . value_type . is_floating_point . should_be_true
t.at "ints" . value_type . is_text . should_be_false
t.at "strs" . value_type . is_integer . should_be_false
t.at "reals" . value_type . is_boolean . should_be_false
t.at "bools" . value_type . is_floating_point . should_be_false
data.t.at "ints" . value_type . is_text . should_be_false
data.t.at "strs" . value_type . is_integer . should_be_false
data.t.at "reals" . value_type . is_boolean . should_be_false
data.t.at "bools" . value_type . is_floating_point . should_be_false
Test.group prefix+"Dialect-specific codegen" <|
Test.specify "should generate queries for the Distinct operation" <|
t = connection.query (SQL_Query.Table_Name tinfo)
suite_builder.group prefix+"Dialect-specific codegen" group_builder->
data = Metadata_Data.setup create_connection_func
group_builder.teardown <|
data.teardown
group_builder.specify "should generate queries for the Distinct operation" <|
t = data.connection.query (SQL_Query.Table_Name data.tinfo)
code_template = 'SELECT "{Tinfo}"."strs" AS "strs", "{Tinfo}"."ints" AS "ints", "{Tinfo}"."bools" AS "bools", "{Tinfo}"."reals" AS "reals" FROM (SELECT "{Tinfo}_inner"."strs" AS "strs", "{Tinfo}_inner"."ints" AS "ints", "{Tinfo}_inner"."bools" AS "bools", "{Tinfo}_inner"."reals" AS "reals" FROM (SELECT "{Tinfo}"."strs" AS "strs", "{Tinfo}"."ints" AS "ints", "{Tinfo}"."bools" AS "bools", "{Tinfo}"."reals" AS "reals" FROM "{Tinfo}" AS "{Tinfo}") AS "{Tinfo}_inner" GROUP BY "{Tinfo}_inner"."strs") AS "{Tinfo}"'
expected_code = code_template.replace "{Tinfo}" tinfo
expected_code = code_template.replace "{Tinfo}" data.tinfo
t.distinct ["strs"] . to_sql . prepare . should_equal [expected_code, []]
Test.group prefix+"math functions" <|
Test.specify "round, trunc, ceil, floor" <|
col = table_builder [["x", [0.1, 0.9, 3.1, 3.9, -0.1, -0.9, -3.1, -3.9]]] . at "x"
suite_builder.group prefix+"math functions" group_builder->
data = Test_Data.setup create_connection_func
group_builder.teardown <|
data.teardown
group_builder.specify "round, trunc, ceil, floor" <|
col = (table_builder [["x", [0.1, 0.9, 3.1, 3.9, -0.1, -0.9, -3.1, -3.9]]] connection=data.connection) . at "x"
col . cast Value_Type.Float . round . value_type . should_equal Value_Type.Float
col . cast Value_Type.Integer . round . value_type . should_equal Value_Type.Float
@ -183,53 +256,63 @@ sqlite_specific_spec prefix connection setup =
col . cast Value_Type.Integer . truncate . value_type . should_equal Value_Type.Integer
col . cast Value_Type.Decimal . truncate . value_type . should_equal Value_Type.Float
do_op n op =
table = table_builder [["x", [n]]]
do_op data n op =
table = table_builder [["x", [n]]] connection=data.connection
result = table.at "x" |> op
result.to_vector.at 0
do_round n dp=0 use_bankers=False = do_op n (_.round dp use_bankers)
Test.specify "Can round correctly near the precision limit" <|
do_round data n dp=0 use_bankers=False = do_op data n (_.round dp use_bankers)
group_builder.specify "Can round correctly near the precision limit" <|
# This value varies depending on the version of SQLite.
do_round 1.2222222222222225 15 . should_equal 1.222222222222223 0.000000000000002
do_round -1.2222222222222225 15 . should_equal -1.222222222222223 0.000000000000002
do_round 1.2222222222222235 15 . should_equal 1.222222222222223
do_round -1.2222222222222235 15 . should_equal -1.222222222222223
do_round data 1.2222222222222225 15 . should_equal 1.222222222222223 0.000000000000002
do_round data -1.2222222222222225 15 . should_equal -1.222222222222223 0.000000000000002
do_round data 1.2222222222222235 15 . should_equal 1.222222222222223
do_round data -1.2222222222222235 15 . should_equal -1.222222222222223
Test.specify "Can round correctly near the precision limit, using banker's rounding" <|
do_round 1.2222222222222225 15 use_bankers=True . should_equal 1.222222222222222
do_round -1.2222222222222225 15 use_bankers=True . should_equal -1.222222222222222
do_round 1.2222222222222235 15 use_bankers=True . should_equal 1.222222222222224
do_round -1.2222222222222235 15 use_bankers=True . should_equal -1.222222222222224
group_builder.specify "Can round correctly near the precision limit, using banker's rounding" <|
do_round data 1.2222222222222225 15 use_bankers=True . should_equal 1.222222222222222
do_round data -1.2222222222222225 15 use_bankers=True . should_equal -1.222222222222222
do_round data 1.2222222222222235 15 use_bankers=True . should_equal 1.222222222222224
do_round data -1.2222222222222235 15 use_bankers=True . should_equal -1.222222222222224
Test.specify "Can handle NaN/Infinity" <|
group_builder.specify "Can handle NaN/Infinity" <|
nan_result = if setup.test_selection.is_nan_and_nothing_distinct then Number.nan else Nothing
ops = [.round, .truncate, .ceil, .floor]
ops.each op->
do_op Number.nan op . should_equal nan_result
do_op Number.positive_infinity op . should_equal Number.positive_infinity
do_op Number.negative_infinity op . should_equal Number.negative_infinity
do_op data Number.nan op . should_equal nan_result
do_op data Number.positive_infinity op . should_equal Number.positive_infinity
do_op data Number.negative_infinity op . should_equal Number.negative_infinity
Test.specify "round returns the correct type" <|
do_round 231.2 1 . should_be_a Float
do_round 231.2 0 . should_be_a Float
do_round 231.2 . should_be_a Float
do_round 231.2 -1 . should_be_a Float
group_builder.specify "round returns the correct type" <|
do_round data 231.2 1 . should_be_a Float
do_round data 231.2 0 . should_be_a Float
do_round data 231.2 . should_be_a Float
do_round data 231.2 -1 . should_be_a Float
Test.specify "round returns the correct type" <|
do_round 231 1 . should_be_a Float
do_round 231 0 . should_be_a Float
do_round 231 . should_be_a Float
do_round 231 -1 . should_be_a Float
group_builder.specify "round returns the correct type" <|
do_round data 231 1 . should_be_a Float
do_round data 231 0 . should_be_a Float
do_round data 231 . should_be_a Float
do_round data 231 -1 . should_be_a Float
Test.group prefix+"Column.const" <|
Test.specify "Does not support making a constant column from a Date" <|
t = table_builder [["x", ["1", "2", "3"]]]
suite_builder.group prefix+"Column.const" group_builder->
data = Test_Data.setup create_connection_func
group_builder.teardown <|
data.teardown
group_builder.specify "Does not support making a constant column from a Date" <|
t = table_builder [["x", ["1", "2", "3"]]] connection=data.connection
t.at "x" . const (Date.new 12 4 12) . should_fail_with Unsupported_Database_Operation
sqlite_spec connection prefix =
sqlite_spec suite_builder prefix create_connection_func =
name_counter = Ref.new 0
table_builder columns =
# The default `connection` parameter always create a new connection.
# In some tests, for example, where we are joining tables, we have to specify
# exactly the same connection.
table_builder columns connection=(create_connection_func Nothing) =
ix = name_counter.get
name_counter . put ix+1
name = Name_Generator.random_name "table_"+ix.to_text
@ -238,7 +321,7 @@ sqlite_spec connection prefix =
in_mem_table.select_into_database_table connection name primary_key=Nothing
materialize = .read
Common_Spec.spec prefix connection
Common_Spec.add_specs suite_builder prefix create_connection_func
common_selection = Common_Table_Operations.Main.Test_Selection.Config supports_case_sensitive_columns=False order_by=True natural_ordering=False case_insensitive_ordering=True case_insensitive_ascii_only=True is_nan_and_nothing_distinct=False date_time=False supported_replace_params=supported_replace_params different_size_integer_types=False length_restricted_text_columns=False
@ -252,14 +335,19 @@ sqlite_spec connection prefix =
the missing statistics.
aggregate_selection = Common_Table_Operations.Aggregate_Spec.Test_Selection.Config advanced_stats=False text_shortest_longest=False first_last=False first_last_row_order=False multi_distinct=False aggregation_problems=False nan=False date_support=False
agg_in_memory_table = (enso_project.data / "data.csv") . read
agg_table = agg_in_memory_table.select_into_database_table connection (Name_Generator.random_name "Agg1") primary_key=Nothing temporary=True
empty_agg_table = (agg_in_memory_table.take (First 0)).select_into_database_table connection (Name_Generator.random_name "Agg_Empty") primary_key=Nothing temporary=True
setup = Common_Table_Operations.Main.Test_Setup.Config prefix agg_table empty_agg_table table_builder materialize is_database=True test_selection=common_selection aggregate_test_selection=aggregate_selection connection=connection
sqlite_specific_spec prefix connection setup
Common_Table_Operations.Main.spec setup
agg_table_fn = _ ->
connection = create_connection_func Nothing
agg_in_memory_table.select_into_database_table connection (Name_Generator.random_name "Agg1") primary_key=Nothing temporary=True
empty_agg_table_fn = _ ->
connection = create_connection_func Nothing
(agg_in_memory_table.take (First 0)).select_into_database_table connection (Name_Generator.random_name "Agg_Empty") primary_key=Nothing temporary=True
setup = Common_Table_Operations.Main.Test_Setup.Config prefix agg_table_fn empty_agg_table_fn table_builder materialize is_database=True test_selection=common_selection aggregate_test_selection=aggregate_selection create_connection_func=create_connection_func
sqlite_specific_spec suite_builder prefix create_connection_func setup
Common_Table_Operations.Main.add_specs suite_builder setup
connection.close
## PRIVATE
supported_replace_params : Set Replace_Params
@ -267,38 +355,66 @@ supported_replace_params =
e = [Replace_Params.Value Text Case_Sensitivity.Default False, Replace_Params.Value Text Case_Sensitivity.Sensitive False, Replace_Params.Value Text Case_Sensitivity.Default True, Replace_Params.Value Text Case_Sensitivity.Sensitive True, Replace_Params.Value Text Case_Sensitivity.Insensitive True]
Set.from_vector e
spec =
enso_project.data.create_directory
file = enso_project.data / "transient" / "sqlite_test.db"
file.delete_if_exists
in_file_prefix = "[SQLite File] "
sqlite_spec (Database.connect (SQLite file)) in_file_prefix
Transaction_Spec.spec (Database.connect (SQLite file)) in_file_prefix
Upload_Spec.spec (_ -> Database.connect (SQLite file)) in_file_prefix
file.delete
backing_file =
transient_dir = enso_project.data / "transient"
assert transient_dir.exists ("There should be .gitignore file in the " + transient_dir.path + " directory")
transient_dir / "sqlite_test.db"
in_memory_prefix = "[SQLite In-Memory] "
sqlite_spec (Database.connect (SQLite In_Memory)) in_memory_prefix
Transaction_Spec.spec (Database.connect (SQLite In_Memory)) in_memory_prefix
Upload_Spec.spec (_ -> Database.connect (SQLite In_Memory)) in_memory_prefix persistent_connector=False
SQLite_Type_Mapping_Spec.spec
create_inmem_connection =
Database.connect (SQLite In_Memory)
Test.group "SQLite_Format should allow connecting to SQLite files" <|
file.delete_if_exists
create_file_connection file =
connection = Database.connect (SQLite file)
connection.execute_update 'CREATE TABLE "Dummy" ("strs" VARCHAR, "ints" INTEGER, "bools" BOOLEAN, "reals" REAL)'
connection.close
connection
Test.specify "should recognise a SQLite database file" <|
Auto_Detect.get_reading_format file . should_be_a SQLite_Format
Test.specify "should recognise a sqlite file by extension for writing" <|
type File_Connection
Value ~file
setup = File_Connection.Value <|
tmp_file = backing_file
con = create_file_connection backing_file
con.close
assert tmp_file.exists
tmp_file
teardown self =
assert self.file.exists
self.file.delete
add_specs suite_builder =
in_file_prefix = "[SQLite File] "
sqlite_spec suite_builder in_file_prefix (_ -> create_file_connection backing_file)
Transaction_Spec.add_specs suite_builder (_ -> create_file_connection backing_file) in_file_prefix
Upload_Spec.add_specs suite_builder (_ -> create_file_connection backing_file) in_file_prefix
in_memory_prefix = "[SQLite In-Memory] "
sqlite_spec suite_builder in_memory_prefix (_ -> create_inmem_connection)
Transaction_Spec.add_specs suite_builder (_ -> create_inmem_connection) in_memory_prefix
Upload_Spec.add_specs suite_builder (_ -> create_inmem_connection) in_memory_prefix persistent_connector=False
SQLite_Type_Mapping_Spec.add_specs suite_builder
suite_builder.group "SQLite_Format should allow connecting to SQLite files" group_builder->
data = File_Connection.setup
group_builder.teardown <|
data.teardown
group_builder.specify "should recognise a SQLite database file" <|
Auto_Detect.get_reading_format data.file . should_be_a SQLite_Format
group_builder.specify "should recognise a sqlite file by extension for writing" <|
Auto_Detect.get_writing_format (enso_project.data / "nonexistent-data.db") . should_be_a SQLite_Format
Auto_Detect.get_writing_format (enso_project.data / "nonexistent-data.sqlite") . should_be_a SQLite_Format
Test.specify "should not recognise nonexistent or empty files for reading" <|
group_builder.specify "should not recognise nonexistent or empty files for reading" <|
r1 = Data.read (enso_project.data / "nonexistent-data.db")
r1.should_fail_with File_Error
r1.catch . should_be_a File_Error.Not_Found
@ -317,19 +433,20 @@ spec =
r3.catch . should_be_a File_Error.Unsupported_Type
broken.delete_if_exists
Test.specify "should connect to a db file" <|
connection = Data.read file
group_builder.specify "should connect to a db file" <|
connection = Data.read data.file
tables = connection.tables
tables.row_count . should_not_equal 0
connection.close
file.delete_if_exists
Test.specify 'should not duplicate warnings' <|
group_builder.specify 'should not duplicate warnings' <|
c = Database.connect (SQLite In_Memory)
t0 = Table.new [["X", ["a", "bc", "def"]]]
t1 = t0.select_into_database_table c "Tabela"
t2 = t1.cast "X" (Value_Type.Char size=1)
Warning.get_all t2 . length . should_equal 1
main = Test_Suite.run_main spec
main =
suite = Test.build suite_builder->
add_specs suite_builder
suite.run_with_filter

View File

@ -7,65 +7,80 @@ from Standard.Database.Errors import all
import Standard.Database.Internal.IR.Query.Query
import Standard.Database.Internal.IR.SQL_Expression.SQL_Expression
from Standard.Test import Test, Test_Suite, Problems
import Standard.Test.Extensions
from Standard.Test_New import all
import project.Database.Helpers.Name_Generator
type My_Error
Error
main = Test_Suite.run_main <|
spec (Database.connect (SQLite In_Memory)) "[SQLite] "
type Data
Value ~data
spec connection prefix =
Test.group prefix+"Transaction Support" <|
connection self = self.data.at 0
simple_table_structure self = self.data.at 1
setup create_connection_func = Data.Value <|
connection = create_connection_func Nothing
simple_table_structure = [Column_Description.Value "X" Value_Type.Integer]
[connection, simple_table_structure]
teardown self =
self.connection.close
add_specs suite_builder create_connection_func prefix =
suite_builder.group prefix+"Transaction Support" group_builder->
data = Data.setup create_connection_func
group_builder.teardown <|
data.teardown
insert_value table_name x =
pairs = [["X", SQL_Expression.Constant x]]
sql = connection.dialect.generate_sql <| Query.Insert table_name pairs
connection.execute_update sql . should_succeed
Test.specify "should commit the changes after the transaction returns a regular value" <|
sql = data.connection.dialect.generate_sql <| Query.Insert table_name pairs
data.connection.execute_update sql . should_succeed
group_builder.specify "should commit the changes after the transaction returns a regular value" <|
table_name = Name_Generator.random_name "transaction-test-1"
t1 = connection.create_table table_name=table_name structure=simple_table_structure temporary=True
t1 = data.connection.create_table table_name=table_name structure=data.simple_table_structure temporary=True
t1.should_succeed
r1 = connection.jdbc_connection.run_within_transaction <|
r1 = data.connection.jdbc_connection.run_within_transaction <|
insert_value table_name 1
42
r1.should_equal 42
t1.at "X" . to_vector . should_equal [1]
connection.drop_table table_name
data.connection.drop_table table_name
Test.specify "should rollback the changes after the inner action panics" <|
group_builder.specify "should rollback the changes after the inner action panics" <|
table_name = Name_Generator.random_name "transaction-test-2"
t1 = connection.create_table table_name=table_name structure=simple_table_structure temporary=True
t1 = data.connection.create_table table_name=table_name structure=data.simple_table_structure temporary=True
t1.should_succeed
Test.expect_panic_with matcher=My_Error <|
connection.jdbc_connection.run_within_transaction <|
data.connection.jdbc_connection.run_within_transaction <|
insert_value table_name 1
Panic.throw My_Error.Error
t1.at "X" . to_vector . should_equal []
connection.drop_table table_name
data.connection.drop_table table_name
Test.specify "should rollback the changes if the inner action returns a dataflow error" <|
group_builder.specify "should rollback the changes if the inner action returns a dataflow error" <|
table_name = Name_Generator.random_name "transaction-test-3"
t1 = connection.create_table table_name=table_name structure=simple_table_structure temporary=True
t1 = data.connection.create_table table_name=table_name structure=data.simple_table_structure temporary=True
t1.should_succeed
r1 = connection.jdbc_connection.run_within_transaction <|
r1 = data.connection.jdbc_connection.run_within_transaction <|
insert_value table_name 1
Error.throw My_Error.Error
r1.should_fail_with My_Error
t1.at "X" . to_vector . should_equal []
connection.drop_table table_name
data.connection.drop_table table_name
Test.specify "should commit the changes even if the inner action return value has warnings attached" <|
group_builder.specify "should commit the changes even if the inner action return value has warnings attached" <|
table_name = Name_Generator.random_name "transaction-test-4"
t1 = connection.create_table table_name=table_name structure=simple_table_structure temporary=True
t1 = data.connection.create_table table_name=table_name structure=data.simple_table_structure temporary=True
t1.should_succeed
r1 = connection.jdbc_connection.run_within_transaction <|
r1 = data.connection.jdbc_connection.run_within_transaction <|
insert_value table_name 1
result = 43
with_warnings = Warning.attach My_Error.Error result
@ -74,4 +89,4 @@ spec connection prefix =
Problems.expect_only_warning My_Error r1
t1.at "X" . to_vector . should_equal [1]
connection.drop_table table_name
data.connection.drop_table table_name

View File

@ -7,24 +7,39 @@ from Standard.Table.Errors import Inexact_Type_Coercion
from Standard.Database import all
from Standard.Test import Problems, Test, Test_Suite
import Standard.Test.Extensions
from Standard.Test_New import all
import project.Database.Helpers.Name_Generator
from project.Database.Postgres_Spec import create_connection_builder
spec connection =
make_table prefix columns =
type Data
Value ~connection
setup create_connection_fn =
Data.Value (create_connection_fn Nothing)
make_table self prefix columns =
name = Name_Generator.random_name prefix
column_exprs = columns.map col_def->
col_def.first + " " + col_def.second
stmt = 'CREATE TEMPORARY TABLE "'+name+'" ('+(column_exprs.join ', ')+');'
Problems.assume_no_problems <| connection.execute_update stmt
connection.query (SQL_Query.Table_Name name)
Problems.assume_no_problems <| self.connection.execute_update stmt
self.connection.query (SQL_Query.Table_Name name)
Test.group "[PostgreSQL] Type Mapping" <|
Test.specify "numeric" <|
t = make_table "ints" [["a", "smallint"], ["b", "int2"], ["c", "int"], ["d", "integer"], ["e", "int4"], ["f", "bigint"], ["g", "int8"]]
teardown self =
self.connection.close
add_specs suite_builder create_connection_fn =
suite_builder.group "[PostgreSQL] Type Mapping" group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
group_builder.specify "numeric" <|
t = data.make_table "ints" [["a", "smallint"], ["b", "int2"], ["c", "int"], ["d", "integer"], ["e", "int4"], ["f", "bigint"], ["g", "int8"]]
t.at "a" . value_type . should_equal (Value_Type.Integer Bits.Bits_16)
t.at "b" . value_type . should_equal (Value_Type.Integer Bits.Bits_16)
t.at "c" . value_type . should_equal (Value_Type.Integer Bits.Bits_32)
@ -33,13 +48,13 @@ spec connection =
t.at "f" . value_type . should_equal (Value_Type.Integer Bits.Bits_64)
t.at "g" . value_type . should_equal (Value_Type.Integer Bits.Bits_64)
t2 = make_table "floats" [["a", "real"], ["b", "float4"], ["c", "double precision"], ["d", "float8"]]
t2 = data.make_table "floats" [["a", "real"], ["b", "float4"], ["c", "double precision"], ["d", "float8"]]
t2.at "a" . value_type . should_equal (Value_Type.Float Bits.Bits_32)
t2.at "b" . value_type . should_equal (Value_Type.Float Bits.Bits_32)
t2.at "c" . value_type . should_equal (Value_Type.Float Bits.Bits_64)
t2.at "d" . value_type . should_equal (Value_Type.Float Bits.Bits_64)
t3 = make_table "decimals" [["a", "decimal"], ["b", "numeric"], ["c", "decimal(10, 2)"], ["d", "numeric(20, 4)"], ["e", "decimal(10)"], ["f", "numeric(20)"]]
t3 = data.make_table "decimals" [["a", "decimal"], ["b", "numeric"], ["c", "decimal(10, 2)"], ["d", "numeric(20, 4)"], ["e", "decimal(10)"], ["f", "numeric(20)"]]
t3.at "a" . value_type . should_equal Value_Type.Decimal
t3.at "b" . value_type . should_equal Value_Type.Decimal
t3.at "c" . value_type . should_equal (Value_Type.Decimal precision=10 scale=2)
@ -47,41 +62,41 @@ spec connection =
t3.at "e" . value_type . should_equal (Value_Type.Decimal precision=10 scale=0)
t3.at "f" . value_type . should_equal (Value_Type.Decimal precision=20 scale=0)
Test.specify "text" <|
t = make_table "texts" [["a", "char(10)"], ["b", "varchar"], ["c", "varchar(20)"], ["d", "text"]]
group_builder.specify "text" <|
t = data.make_table "texts" [["a", "char(10)"], ["b", "varchar"], ["c", "varchar(20)"], ["d", "text"]]
t.at "a" . value_type . should_equal (Value_Type.Char size=10 variable_length=False)
t.at "b" . value_type . should_equal default_text
t.at "c" . value_type . should_equal (Value_Type.Char size=20 variable_length=True)
t.at "d" . value_type . should_equal default_text
Test.specify "binary" <|
group_builder.specify "binary" <|
# Postgres does not support a BLOB type, it has `bytea` instead.
t = make_table "binaries" [["a", "bytea"]]
t = data.make_table "binaries" [["a", "bytea"]]
t.at "a" . value_type . should_equal (Value_Type.Binary size=max_int4 variable_length=True)
Test.specify "datetime" <|
t = make_table "dates" [["a", "date"]]
group_builder.specify "datetime" <|
t = data.make_table "dates" [["a", "date"]]
t.at "a" . value_type . should_equal Value_Type.Date
t2 = make_table "times" [["a", "time"], ["b", "timetz"], ["c", "time without time zone"], ["d", "time with time zone"]]
t2 = data.make_table "times" [["a", "time"], ["b", "timetz"], ["c", "time without time zone"], ["d", "time with time zone"]]
t2.at "a" . value_type . should_equal Value_Type.Time
t2.at "b" . value_type . should_equal Value_Type.Time
t2.at "c" . value_type . should_equal Value_Type.Time
t2.at "d" . value_type . should_equal Value_Type.Time
t3 = make_table "timestamps" [["a", "timestamp"], ["b", "timestamptz"], ["c", "timestamp without time zone"], ["d", "timestamp with time zone"]]
t3 = data.make_table "timestamps" [["a", "timestamp"], ["b", "timestamptz"], ["c", "timestamp without time zone"], ["d", "timestamp with time zone"]]
t3.at "a" . value_type . should_equal (Value_Type.Date_Time with_timezone=False)
t3.at "b" . value_type . should_equal (Value_Type.Date_Time with_timezone=True)
t3.at "c" . value_type . should_equal (Value_Type.Date_Time with_timezone=False)
t3.at "d" . value_type . should_equal (Value_Type.Date_Time with_timezone=True)
Test.specify "boolean" <|
t = make_table "bools" [["a", "boolean"], ["b", "bool"]]
group_builder.specify "boolean" <|
t = data.make_table "bools" [["a", "boolean"], ["b", "bool"]]
t.at "a" . value_type . should_equal Value_Type.Boolean
t.at "b" . value_type . should_equal Value_Type.Boolean
Test.specify "should correctly handle types through operations" <|
t = make_table "foo" [["a", "int2"], ["b", "text"], ["c", "boolean"], ["d", "double precision"], ["e", "int4"]]
group_builder.specify "should correctly handle types through operations" <|
t = data.make_table "foo" [["a", "int2"], ["b", "text"], ["c", "boolean"], ["d", "double precision"], ["e", "int4"]]
t.evaluate_expression 'starts_with([b], "1")' . value_type . should_equal Value_Type.Boolean
t.evaluate_expression '[a] * [d]' . value_type . should_equal (Value_Type.Float Bits.Bits_64)
@ -106,33 +121,38 @@ spec connection =
t2.at "First c" . value_type . should_equal Value_Type.Boolean
# Postgres does not try to be clever and two fixed-length columns concatenated get promoted to a varying column.
t3 = make_table "foo2" [["a", "char(5)"], ["b", "char(8)"]]
t3 = data.make_table "foo2" [["a", "char(5)"], ["b", "char(8)"]]
t3.evaluate_expression '[a] + [b]' . value_type . should_equal default_text
Test.specify "other types" <|
t = make_table "other" [["a", "box"], ["b", "polygon"]]
group_builder.specify "other types" <|
t = data.make_table "other" [["a", "box"], ["b", "polygon"]]
t.at "a" . value_type . should_be_a (Value_Type.Unsupported_Data_Type ...)
t.at "a" . value_type . type_name . should_equal "box"
t.at "b" . value_type . should_be_a (Value_Type.Unsupported_Data_Type ...)
t.at "b" . value_type . type_name . should_equal "polygon"
# Bit strings are not supported by Enso tables at the moment.
t2 = make_table "bit strings" [["a", "bit(2)"], ["b", "bit varying"], ["c", "bit varying(10)"]]
t2 = data.make_table "bit strings" [["a", "bit(2)"], ["b", "bit varying"], ["c", "bit varying(10)"]]
t2.at "a" . value_type . should_be_a (Value_Type.Unsupported_Data_Type ...)
t2.at "a" . value_type . type_name . should_equal "bit"
t2.at "b" . value_type . should_be_a (Value_Type.Unsupported_Data_Type ...)
t2.at "c" . value_type . should_be_a (Value_Type.Unsupported_Data_Type ...)
Test.specify "should approximate types to the closest supported one" <|
t = make_table "T" [["b", "INT"]]
group_builder.specify "should approximate types to the closest supported one" <|
t = data.make_table "T" [["b", "INT"]]
t2 = t.cast "b" Value_Type.Byte
t2.at "b" . value_type . should_equal (Value_Type.Integer Bits.Bits_16)
Problems.expect_warning Inexact_Type_Coercion t2
Test.group "[PostgreSQL] Type Edge Cases" <|
Test.specify "will fail to read a BigDecimal column and suggest to cast it to Float" <|
suite_builder.group "[PostgreSQL] Type Edge Cases" group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
group_builder.specify "will fail to read a BigDecimal column and suggest to cast it to Float" <|
table_name = Name_Generator.random_name "BigDecimal"
table = connection.create_table table_name [Column_Description.Value "B" (Value_Type.Decimal precision=100 scale=5)] primary_key=[]
table = data.connection.create_table table_name [Column_Description.Value "B" (Value_Type.Decimal precision=100 scale=5)] primary_key=[]
Problems.assume_no_problems table
Problems.expect_only_warning Inexact_Type_Coercion <|
@ -147,20 +167,20 @@ spec connection =
w2.requested_type . should_equal (Value_Type.Decimal precision=100 scale=5)
w2.actual_type . should_equal Value_Type.Float
Test.specify "should warn when fetching a Binary column and coercing it to Mixed because in-memory does not support Binary" <|
group_builder.specify "should warn when fetching a Binary column and coercing it to Mixed because in-memory does not support Binary" <|
table_name = Name_Generator.random_name "Bin"
table = connection.create_table table_name [Column_Description.Value "B" (Value_Type.Binary size=10)] primary_key=[]
table = data.connection.create_table table_name [Column_Description.Value "B" (Value_Type.Binary size=10)] primary_key=[]
w0 = Problems.expect_only_warning Inexact_Type_Coercion table
w0.requested_type . should_equal (Value_Type.Binary size=10)
w0.actual_type . should_equal (Value_Type.Binary variable_length=True size=2147483647)
table_clean = table.remove_warnings
Problems.assume_no_problems <|
table_clean.update_rows (connection.query 'SELECT decode(\'ffff\', \'hex\') AS "B"') update_action=Update_Action.Insert
table_clean.update_rows (data.connection.query 'SELECT decode(\'ffff\', \'hex\') AS "B"') update_action=Update_Action.Insert
Problems.assume_no_problems <|
table_clean.update_rows (connection.query 'SELECT decode(\'caffee\', \'hex\') AS "B"') update_action=Update_Action.Insert
table_clean.update_rows (data.connection.query 'SELECT decode(\'caffee\', \'hex\') AS "B"') update_action=Update_Action.Insert
Problems.assume_no_problems <|
table_clean.update_rows (connection.query 'SELECT decode(\'beef\', \'hex\') AS "B"') update_action=Update_Action.Insert
table_clean.update_rows (data.connection.query 'SELECT decode(\'beef\', \'hex\') AS "B"') update_action=Update_Action.Insert
materialized_table = table_clean.read
materialized_table.at "B" . value_type . should_equal Value_Type.Mixed
@ -174,9 +194,11 @@ spec connection =
beef = [-66, -17]
materialized_table.at "B" . to_vector . should_equal [ffff, caffee, beef]
main = Test_Suite.run_main <|
connection = create_connection_builder Nothing
spec connection
main =
create_connection_fn = _-> create_connection_builder Nothing
suite = Test.build suite_builder->
add_specs suite_builder create_connection_fn
suite.run_with_filter
max_int4 = 2147483647
default_text = Value_Type.Char size=Nothing variable_length=True

View File

@ -11,23 +11,36 @@ import Standard.Database.Internal.SQLite.SQLite_Type_Mapping
from Standard.Database import Database, SQLite, In_Memory, SQL_Query
from Standard.Database.Errors import Unsupported_Database_Operation
from Standard.Test import Problems, Test, Test_Suite
import Standard.Test.Extensions
from Standard.Test_New import all
import project.Database.Helpers.Name_Generator
spec =
type Data
Value ~connection
setup =
connection = Database.connect (SQLite In_Memory)
Data.Value connection
teardown self = self.connection.close
add_specs suite_builder =
suite_builder.group "[SQLite] Type Mapping" group_builder->
data = Data.setup
group_builder.teardown <|
data.teardown
make_table prefix columns =
name = prefix
column_exprs = columns.map col_def->
col_def.first + " " + col_def.second
stmt = 'CREATE TABLE "'+name+'" ('+(column_exprs.join ', ')+');'
Problems.assume_no_problems <| connection.execute_update stmt
connection.query (SQL_Query.Table_Name name)
Problems.assume_no_problems <| data.connection.execute_update stmt
data.connection.query (SQL_Query.Table_Name name)
Test.group "[SQLite] Type Mapping" <|
Test.specify "should map its basic affinities" <|
group_builder.specify "should map its basic affinities" <|
t = make_table "basics" [["int", "INTEGER"], ["real", "REAL"], ["text", "TEXT"], ["blob", "BLOB"], ["numeric", "NUMERIC"]]
t.at "int" . value_type . should_equal (Value_Type.Integer Bits.Bits_64)
t.at "real" . value_type . should_equal (Value_Type.Float Bits.Bits_64)
@ -36,7 +49,7 @@ spec =
# We treat numeric as a float, since that is what really sits in SQLite under the hood.
t.at "numeric" . value_type . should_equal (Value_Type.Float Bits.Bits_64)
Test.specify "should map complex types to their closest approximation" <|
group_builder.specify "should map complex types to their closest approximation" <|
t = make_table "complex" [["a", "VARCHAR(15)"], ["b", "CHAR(10)"], ["c", "BINARY(10)"], ["d", "BIGINT"], ["e", "SMALLINT"], ["f", "TINYINT"], ["g", "FLOAT"], ["h", "DOUBLE"]]
t.at "a" . value_type . should_equal Value_Type.Char
t.at "b" . value_type . should_equal Value_Type.Char
@ -47,7 +60,7 @@ spec =
t.at "g" . value_type . should_equal (Value_Type.Float Bits.Bits_64)
t.at "h" . value_type . should_equal (Value_Type.Float Bits.Bits_64)
Test.specify "should allow for special handling of booleans to support the Boolean type that does not have a builtin affinity" <|
group_builder.specify "should allow for special handling of booleans to support the Boolean type that does not have a builtin affinity" <|
t = make_table "bools" [["a", "BOOLEAN"], ["b", "BOOLEAN"]]
t.at "b" . value_type . should_equal Value_Type.Boolean
@ -60,7 +73,7 @@ spec =
c3 = t.evaluate_expression "[a] && 10"
c3.should_fail_with Invalid_Value_Type
Test.specify "should correctly handle types through operations" <|
group_builder.specify "should correctly handle types through operations" <|
t = make_table "foo" [["a", "int"], ["b", "text"], ["c", "boolean"], ["d", "double precision"]]
t.evaluate_expression 'starts_with([b], "1")' . value_type . should_equal Value_Type.Boolean
@ -86,7 +99,7 @@ spec =
# First is not currently implemented in SQLite
# t2.at "First c" . value_type . should_equal Value_Type.Boolean
Test.specify "should approximate types to the closest supported one" <|
group_builder.specify "should approximate types to the closest supported one" <|
t = make_table "approx-table" [["a", "BINARY"], ["b", "INT"]]
t1 = t.cast "a" (Value_Type.Binary 10 variable_length=True)
@ -97,23 +110,22 @@ spec =
t2.at "b" . value_type . should_equal Value_Type.Integer
Problems.expect_warning Inexact_Type_Coercion t2
Test.specify "will coerce date/time columns to Text in existing tables" <|
group_builder.specify "will coerce date/time columns to Text in existing tables" <|
t = make_table "datetime-table" [["a", "DATE"], ["b", "TIME"], ["c", "DATETIME"], ["d", "TIMESTAMP"]]
t.at "a" . value_type . should_equal Value_Type.Char
t.at "b" . value_type . should_equal Value_Type.Char
t.at "c" . value_type . should_equal Value_Type.Char
t.at "d" . value_type . should_equal Value_Type.Char
Test.specify "does not support creating tables with date/time values" <|
group_builder.specify "does not support creating tables with date/time values" <|
t = Table.new [["a", [Date.today]], ["b", [Time_Of_Day.now]], ["c", [Date_Time.now]]]
r1 = t.select_into_database_table connection table_name=(Name_Generator.random_name "date-time-table") temporary=True
r1 = t.select_into_database_table data.connection table_name=(Name_Generator.random_name "date-time-table") temporary=True
r1.should_fail_with Unsupported_Database_Operation
Test.specify "should be able to infer types for all supported operations" <|
group_builder.specify "should be able to infer types for all supported operations" <|
dialect = Dialect.sqlite
internal_mapping = dialect.internal_generator_dialect.operation_map
operation_type_mapping = SQLite_Type_Mapping.operations_map
operation_type_mapping.keys.sort . should_equal internal_mapping.keys.sort
main = Test_Suite.run_main spec

File diff suppressed because it is too large Load Diff

View File

@ -6,8 +6,7 @@ from Standard.Base.Data.Time.Errors import Date_Time_Format_Parse_Error, Suspici
from Standard.Table import Table, Column, Data_Formatter, Quote_Style, Value_Type
from Standard.Table.Errors import all
from Standard.Test import Test, Test_Suite, Problems
import Standard.Test.Extensions
from Standard.Test_New import all
type Custom_Type
Value field
@ -30,9 +29,9 @@ type Custom_Type_With_Panic
to_text : Text
to_text self = Panic.throw (Illegal_State.Error "foo_panic")
spec =
Test.group "DataFormatter.parse" <|
Test.specify "should parse numbers" <|
add_specs suite_builder =
suite_builder.group "DataFormatter.parse" group_builder->
group_builder.specify "should parse numbers" <|
formatter = Data_Formatter.Value
formatter.parse "123" . should_equal 123
formatter.parse "1000000" . should_equal 1000000
@ -47,7 +46,7 @@ spec =
formatter.parse "-Infinity" . should_equal (Number.negative_infinity)
formatter.parse "NaN" . is_nan . should_be_true
Test.specify "should prefer the US decimal point in auto mode" <|
group_builder.specify "should prefer the US decimal point in auto mode" <|
formatter = Data_Formatter.Value
formatter.parse "1.5" . should_equal 1.5
formatter.parse "1.25" . should_equal 1.25
@ -61,7 +60,7 @@ spec =
formatter.parse "1'000" . should_equal 1000
formatter.parse "1.000.000" . should_equal 1000000
Test.specify "should allow customizing the decimal point and thousand separator" <|
group_builder.specify "should allow customizing the decimal point and thousand separator" <|
formatter = Data_Formatter.Value thousand_separator="_" decimal_point=","
formatter.parse "123" . should_equal 123
formatter.parse "1_000_000" . should_equal 1000000
@ -73,7 +72,7 @@ spec =
formatter.parse "-1,0" . should_equal -1.0
formatter.parse "1,0001" . should_equal 1.0001
Test.specify "should never infer thousand separator to be equal to decimal point" <|
group_builder.specify "should never infer thousand separator to be equal to decimal point" <|
f1 = Data_Formatter.Value decimal_point="."
f1.parse "1.0" . should_equal 1.0
f1.parse "1.000" . should_equal 1.0
@ -103,7 +102,7 @@ spec =
r6 = Data_Formatter.Value decimal_point="." thousand_separator="."
r6.parse "1.000" . should_fail_with Illegal_Argument
Test.specify "should support exponential notation, but only if explicitly enabled" <|
group_builder.specify "should support exponential notation, but only if explicitly enabled" <|
plain_formatter = Data_Formatter.Value
exponential_formatter = Data_Formatter.Value allow_exponential_notation=True
plain_formatter.parse "1E3" . should_equal "1E3"
@ -121,7 +120,7 @@ spec =
exponential_formatter.parse "1.2E-3" . should_equal 0.0012
exponential_formatter.parse "1.2E-3" Value_Type.Float . should_equal 0.0012
Test.specify "handle leading zeros, only if enabled" <|
group_builder.specify "handle leading zeros, only if enabled" <|
Data_Formatter.Value.parse "0100" . should_equal "0100"
Data_Formatter.Value.parse "000" . should_equal "000"
Data_Formatter.Value.parse "000.0" . should_equal "000.0"
@ -130,18 +129,18 @@ spec =
formatter.parse "000" . should_equal 0
formatter.parse "000.0" . should_equal 0.0
Test.specify "should parse booleans" <|
group_builder.specify "should parse booleans" <|
formatter = Data_Formatter.Value
formatter.parse "True" . should_equal True
formatter.parse "False" . should_equal False
Test.specify "should allow custom boolean formats" <|
group_builder.specify "should allow custom boolean formats" <|
formatter = Data_Formatter.Value true_values=["YES", "1", "true"] false_values=["NO", "0", "false"]
formatter.parse "YES" . should_equal True
formatter.parse "NO" . should_equal False
(Data_Formatter.Value true_values=[] false_values=[]).parse "True" type=Value_Type.Boolean . should_equal Nothing
Test.specify "should parse dates" <|
group_builder.specify "should parse dates" <|
formatter = Data_Formatter.Value
formatter.parse "2022-01-01" . should_equal (Date.new 2022)
formatter.parse "2020-05-07" type=Value_Type.Date . should_equal (Date.new 2020 5 7)
@ -168,7 +167,7 @@ spec =
formatter.parse "1999-01-01 00:00" type=Value_Type.Date . should_equal Nothing
formatter.parse "30:00:65" . should_equal "30:00:65"
Test.specify "should report the warnings when parsing dates with suspicious format" <|
group_builder.specify "should report the warnings when parsing dates with suspicious format" <|
c1 = Column.from_vector "strs" ["31.12", "01.01"]
c2 = c1.parse Value_Type.Date "dd.MM"
current_year = Date.today.year
@ -188,7 +187,7 @@ spec =
c6.to_vector . should_equal ["25.12", "31.07"]
Problems.assume_no_problems c6
Test.specify "should fallback to Text" <|
group_builder.specify "should fallback to Text" <|
formatter = Data_Formatter.Value
formatter.parse "Text" . should_equal "Text"
complex_text = """
@ -196,7 +195,7 @@ spec =
And newlines toO!
formatter.parse complex_text . should_equal complex_text
Test.specify "should report Invalid_Format errors" <|
group_builder.specify "should report Invalid_Format errors" <|
formatter = Data_Formatter.Value
expect_warning r =
r.should_equal Nothing
@ -213,15 +212,15 @@ spec =
expect_warning <| formatter.parse "Text" type=Value_Type.Date_Time
expect_warning <| formatter.parse "Text" type=Value_Type.Time
Test.specify "should not allow unexpected types" <|
group_builder.specify "should not allow unexpected types" <|
formatter = Data_Formatter.Value
formatter.parse "Text" type=List . should_fail_with Illegal_Argument
Test.group "DataFormatter.format" <|
Test.specify "should handle Nothing" <|
suite_builder.group "DataFormatter.format" group_builder->
group_builder.specify "should handle Nothing" <|
Data_Formatter.Value.format Nothing . should_equal Nothing
Test.specify "should format numbers" <|
group_builder.specify "should format numbers" <|
formatter = Data_Formatter.Value
formatter.format 123 . should_equal "123"
formatter.format 1000000 . should_equal "1000000"
@ -234,7 +233,7 @@ spec =
formatter.format (Number.negative_infinity) . should_equal "-Infinity"
formatter.format (Number.nan) . should_equal "NaN"
Test.specify "should allow customizing the decimal point and thousand separator" <|
group_builder.specify "should allow customizing the decimal point and thousand separator" <|
formatter = Data_Formatter.Value thousand_separator="_" decimal_point=","
formatter.format 123 . should_equal "123"
formatter.format 1000000 . should_equal "1_000_000"
@ -246,18 +245,18 @@ spec =
formatter.format -1.0 . should_equal "-1,0"
formatter.format 1.0001 . should_equal "1,0001"
Test.specify "should format booleans" <|
group_builder.specify "should format booleans" <|
formatter = Data_Formatter.Value
formatter.format True . should_equal "True"
formatter.format False . should_equal "False"
Test.specify "should allow custom boolean formats" <|
group_builder.specify "should allow custom boolean formats" <|
formatter = Data_Formatter.Value true_values=["YES", "1", "true"] false_values=["NO", "0", "false"]
formatter.format True . should_equal "YES"
formatter.format False . should_equal "NO"
(Data_Formatter.Value true_values=[] false_values=[]).format True . should_fail_with Illegal_Argument
Test.specify "should format dates" <|
group_builder.specify "should format dates" <|
formatter = Data_Formatter.Value
formatter.format (Date.new 2022) . should_equal "2022-01-01"
formatter.format (Date_Time.new 1999) . should_contain "1999-01-01 00:00:00"
@ -265,14 +264,14 @@ spec =
formatter.format (Date_Time.new 1999 zone=(Time_Zone.parse "America/Los_Angeles")) . should_equal "1999-01-01 00:00:00-08:00[America/Los_Angeles]"
formatter.format (Time_Of_Day.new) . should_equal "00:00:00"
Test.specify "should allow custom date formats" <|
group_builder.specify "should allow custom date formats" <|
formatter = Data_Formatter.Value.with_datetime_formats date_formats=["ddd, d MMM y", Date_Time_Formatter.from_java "d MMM y[ G]"] datetime_formats=["dd/MM/yyyy HH:mm [ZZZZ]"] time_formats=["h:mma"]
formatter.format (Date.new 2022 06 21) . should_equal "Tue, 21 Jun 2022"
formatter.format (Date_Time.new 1999 02 03 04 56 11 zone=Time_Zone.utc) . should_equal "03/02/1999 04:56 GMT"
formatter.format (Date_Time.new 1999 02 03 04 56 11 zone=(Time_Zone.parse "America/Los_Angeles")) . should_equal "03/02/1999 04:56 GMT-08:00"
formatter.format (Time_Of_Day.new 13 55) . should_equal "1:55PM"
Test.specify "should act as identity on Text" <|
group_builder.specify "should act as identity on Text" <|
formatter = Data_Formatter.Value
formatter.format "Text" . should_equal "Text"
complex_text = """
@ -280,13 +279,13 @@ spec =
And newlines toO!
formatter.format complex_text . should_equal complex_text
Test.specify "should work with custom types, falling back to the `.to_text` method" <|
group_builder.specify "should work with custom types, falling back to the `.to_text` method" <|
formatter = Data_Formatter.Value thousand_separator="_"
formatter.format (Custom_Type.Value 42) . should_equal "(Custom_Type.Value 42)"
# We fallback to `to_text`, so obviously the nested numbers will not know about our formatting settings.
formatter.format (Custom_Type_With_To_Text.Value 1000) . should_equal "[CUSTOM = 1000]"
Test.specify "should correctly pass through errors from custom type's `.to_text` method" <|
group_builder.specify "should correctly pass through errors from custom type's `.to_text` method" <|
formatter = Data_Formatter.Value
r1 = formatter.format (Custom_Type_With_Error.Value 100)
r1.should_be_a Text
@ -300,10 +299,10 @@ spec =
r2.should_contain "Illegal_State"
r2.should_contain "foo_panic"
Test.group "DataFormatter builders" <|
suite_builder.group "DataFormatter builders" group_builder->
# We create a formatter with all non-default values to ensure that the builders keep the existing values of other properties instead of switching to the constructor's defaults.
formatter_1 = Data_Formatter.Value trim_values=False allow_leading_zeros=True decimal_point=',' thousand_separator='_' allow_exponential_notation=True datetime_formats=[Date_Time_Formatter.from "yyyy/MM/dd HH:mm:ss"] date_formats=[Date_Time_Formatter.from "dd/MM/yyyy"] time_formats=[Date_Time_Formatter.from "HH/mm/ss"] true_values=["YES"] false_values=["NO"]
Test.specify "should allow changing number formatting settings" <|
group_builder.specify "should allow changing number formatting settings" <|
formatter_2 = formatter_1.with_number_formatting decimal_point="*"
formatter_2.decimal_point . should_equal "*"
formatter_2.thousand_separator . should_equal formatter_1.thousand_separator
@ -322,7 +321,7 @@ spec =
formatter_3.allow_leading_zeros . should_equal False
formatter_3.allow_exponential_notation . should_equal False
Test.specify "should allow changing datetime formats" <|
group_builder.specify "should allow changing datetime formats" <|
formatter_1.with_datetime_formats . should_equal formatter_1
formatter_2 = formatter_1.with_datetime_formats date_formats="dd.MM.yyyy"
@ -340,7 +339,7 @@ spec =
formatter_3 = formatter_1.with_datetime_formats date_formats=[] datetime_formats=["foobar"] time_formats="baz"
formatter_3.should_fail_with Date_Time_Format_Parse_Error
Test.specify "should allow changing booleans' representations" <|
group_builder.specify "should allow changing booleans' representations" <|
formatter_2 = formatter_1.with_boolean_values "1" "0"
formatter_2.date_formats . should_equal formatter_1.date_formats
formatter_2.datetime_formats . should_equal formatter_1.datetime_formats
@ -357,4 +356,8 @@ spec =
formatter_3.true_values . should_equal []
formatter_3.false_values . should_equal []
main = Test_Suite.run_main spec
main =
suite = Test.build suite_builder->
add_specs suite_builder
suite.run_with_filter

View File

@ -1,12 +1,15 @@
from Standard.Base import all
from Standard.Test import Test_Suite
from Standard.Test_New import all
import project.Formatting.Data_Formatter_Spec
import project.Formatting.Parse_Values_Spec
spec =
Data_Formatter_Spec.spec
Parse_Values_Spec.spec
add_specs suite_builder =
Data_Formatter_Spec.add_specs suite_builder
Parse_Values_Spec.add_specs suite_builder
main = Test_Suite.run_main spec
main =
suite = Test.build suite_builder->
add_specs suite_builder
suite.run_with_filter

View File

@ -6,19 +6,18 @@ from Standard.Table import Table, Data_Formatter, Column
from Standard.Table.Data.Type.Value_Type import Value_Type, Auto
from Standard.Table.Errors import all
from Standard.Test import Test, Test_Suite, Problems
import Standard.Test.Extensions
from Standard.Test_New import all
import project.Util
spec =
Test.group "Table.parse" <|
Test.specify "should correctly parse integers" <|
add_specs suite_builder =
suite_builder.group "Table.parse" group_builder->
group_builder.specify "should correctly parse integers" <|
t1 = Table.new [["ints", ["0", "+0", "-0", "+1", "-1", "1", "000", "0010", "12345", Nothing]]]
t2 = t1.parse type=Value_Type.Integer
t2.at "ints" . to_vector . should_equal [0, 0, 0, 1, -1, 1, 0, 10, 12345, Nothing]
Test.specify "should correctly parse decimals" <|
group_builder.specify "should correctly parse decimals" <|
t1 = Table.new [["ints", ["0", "+0", "-0", "+1", "-1", "1", "12345", Nothing]]]
t2 = t1.parse type=Value_Type.Float
t2.at "ints" . to_vector . should_equal [0, 0, 0, 1, -1, 1, 12345, Nothing]
@ -33,7 +32,7 @@ spec =
t6 = t5.parse type=Value_Type.Float
t6.at "floats" . to_vector . should_equal [0.0, 0.0, 1.0, 0.1, 0.123, -0.1, 0.1, 0.0, 0.1234, Nothing, 11111111.111]
Test.specify "should parse leading zeros in numbers" <|
group_builder.specify "should parse leading zeros in numbers" <|
t1 = Table.new [["ints", ["0", "+00", "-00", "+01", "-01", "01", "000", "0010", "12345", Nothing]]]
t2 = Table.new [["floats", ["0.0000", ".0", "00.", "01.0", "-0010.0000", "1.0000"]]]
@ -64,7 +63,7 @@ spec =
t8.at "floats" . to_vector . should_equal [0.0, 0.0, 0.0, 1.0, -10.0, 1.0]
Problems.assume_no_problems t8
Test.specify "should correctly parse booleans" <|
group_builder.specify "should correctly parse booleans" <|
t1 = Table.new [["bools", ["true", "false", "True", "TRUE", "FALSE", Nothing, "False"]]]
t2 = t1.parse type=Value_Type.Boolean
t2.at "bools" . to_vector . should_equal [True, False, True, True, False, Nothing, False]
@ -73,7 +72,7 @@ spec =
t4 = t3.parse type=Value_Type.Boolean format="yes|no"
t4.at "bools" . to_vector . should_equal [Nothing, Nothing, Nothing, True, Nothing, False, Nothing]
Test.specify "should correctly parse date and time" <|
group_builder.specify "should correctly parse date and time" <|
t1 = Table.new [["dates", ["2022-05-07", "2000-01-01", "2010-12-31"]]]
t2 = t1.parse type=Value_Type.Date
t2.at "dates" . to_vector . should_equal [Date.new 2022 5 7, Date.new 2000 1 1, Date.new 2010 12 31]
@ -91,7 +90,7 @@ spec =
t8.at "dates" . value_type . should_equal Value_Type.Date
t8.at "dates" . to_vector . should_equal [Date.new 2022 5 7, Date.new 2001 1 1, Date.new 2010 12 31]
Test.specify "should parse date and time in various formats" <|
group_builder.specify "should parse date and time in various formats" <|
opts = Data_Formatter.Value.with_datetime_formats date_formats=["d.M.y", (Date_Time_Formatter.from_java "d MMM y[ G]"), "ddd, d MMM y"] datetime_formats=["yyyy-MM-dd HH:mm:ss", "dd/MM/yyyy HH:mm"] time_formats=["H:mm:ss.f", "h:mma"]
t1 = Table.new [["dates", ["1.2.476", "10 Jan 1900 AD", "Tue, 3 Jun 2008"]]]
@ -106,7 +105,7 @@ spec =
t6 = t5.parse format=opts type=Value_Type.Time
t6.at "times" . to_vector . should_equal [Time_Of_Day.new 1 2 3 nanosecond=987654321, Time_Of_Day.new 13 30 0 0]
Test.specify "should warn when cells do not fit the expected format" <|
group_builder.specify "should warn when cells do not fit the expected format" <|
ints = ["ints", ["0", "1", "1.0", "foobar", "", "--1", "+-1", "10", "-+1"]]
# Currently scientific notation is not supported and we document that in this test, in the future the situation may change and the test may need to be flipped.
floats = ["floats", ["0", "2.0", "1e6", "foobar", "", "--1", "+-1", "100.", "-+1"]]
@ -156,13 +155,13 @@ spec =
p6 = [Invalid_Format.Error "times" Value_Type.Time ["2001-01-01", "2001-01-01 12:34:56", "Tuesday", "foobar", "", "10:99:99", "1/2/2003", "2001-30-10"]]
Problems.test_problem_handling a6 p6 t6
Test.specify "should leave not selected columns unaffected" <|
group_builder.specify "should leave not selected columns unaffected" <|
t1 = Table.new [["A", ["1", "2"]], ["B", ["3", "4"]]]
t2 = t1.parse columns="B"
t2.at "A" . to_vector . should_equal ["1", "2"]
t2.at "B" . to_vector . should_equal [3, 4]
Test.specify "should guess the datatype for columns" <|
group_builder.specify "should guess the datatype for columns" <|
c1 = ["ints", ["1", " +2", "-123", Nothing]]
c2 = ["ints0", ["01", "02 ", Nothing, "-1"]]
c3 = ["floats", [" 1.0 ", "2.2", Nothing, "-1.0"]]
@ -212,7 +211,7 @@ spec =
# `bools` are not requested to be parsed, so they are kept as-is, with leading whitespace etc.
t6.at "bools" . to_vector . should_equal ["true", " False", Nothing, "True"]
Test.specify "should allow to specify a thousands separator and a custom decimal point" <|
group_builder.specify "should allow to specify a thousands separator and a custom decimal point" <|
opts = Data_Formatter.Value decimal_point=',' thousand_separator='_'
t1 = Table.new [["floats", ["0,0", "+0,0", "-0,0", "+1,5", "-1,2", "1,0", "0,0000", "10_000,", ",0"]]]
t2 = t1.parse format=opts
@ -226,7 +225,7 @@ spec =
t5.at "xs" . to_vector . should_equal [Nothing, Nothing, Nothing, Nothing, Nothing, 1000]
Problems.get_attached_warnings t5 . should_equal [Invalid_Format.Error "xs" Value_Type.Integer ["1,2", "1.3", "_0", "0_", "1_0_0"]]
Test.specify "should allow to specify custom values for booleans" <|
group_builder.specify "should allow to specify custom values for booleans" <|
opts_1 = Data_Formatter.Value true_values=["1", "YES"] false_values=["0"]
t1 = Table.new [["bools", ["1", "0", "YES", "1", "0"]]]
t2 = t1.parse format=opts_1
@ -246,7 +245,7 @@ spec =
times = ["times", ["11:00:00 ", " 00:00:00", "00 : 00 : 00", Nothing]]
Table.new [ints, floats, bools, dates, datetimes, times]
Test.specify "should trim input values by default" <|
group_builder.specify "should trim input values by default" <|
t1 = whitespace_table.parse columns="ints" type=Value_Type.Integer
t1.at "ints" . to_vector . should_equal [0, 1, Nothing, 2]
Problems.expect_only_warning (Invalid_Format.Error "ints" Value_Type.Integer ["0 1"]) t1
@ -271,7 +270,7 @@ spec =
t6.at "times" . to_vector . should_equal [Time_Of_Day.new 11 0 0, Time_Of_Day.new, Nothing, Nothing]
Problems.expect_only_warning (Invalid_Format.Error "times" Value_Type.Time ["00 : 00 : 00"]) t6
Test.specify "should fail to parse if whitespace is present and trimming is turned off" <|
group_builder.specify "should fail to parse if whitespace is present and trimming is turned off" <|
opts = Data_Formatter.Value trim_values=False
t1 = whitespace_table.parse format=opts columns="ints" type=Value_Type.Integer
t1.at "ints" . to_vector . should_equal [0, Nothing, Nothing, Nothing]
@ -297,7 +296,7 @@ spec =
t6.at "times" . to_vector . should_equal [Nothing, Nothing, Nothing, Nothing]
Problems.expect_only_warning (Invalid_Format.Error "times" Value_Type.Time ["11:00:00 ", " 00:00:00", "00 : 00 : 00"]) t6
Test.specify "should fallback to text if whitespace is present and trimming is turned off" <|
group_builder.specify "should fallback to text if whitespace is present and trimming is turned off" <|
c1 = ["1", " +2", "-123", Nothing]
c2 = [" 1.0 ", "2.2", Nothing, "-1.0"]
c3 = ["true", " False", Nothing, "True"]
@ -309,7 +308,7 @@ spec =
t2.at "floats" . to_vector . should_equal c2
t2.at "bools" . to_vector . should_equal c3
Test.specify "should allow selecting columns by regex" <|
group_builder.specify "should allow selecting columns by regex" <|
t1 = Table.new [["An", ["1", "2", "3"]], ["Am", ["4", "5", "6"]], ["C", ["7", "8", "9"]], ["D", ["10", "11", "12"]]]
r1 = t1.parse columns="A.*".to_regex
r1.at "An" . to_vector . should_equal [1, 2, 3]
@ -317,7 +316,7 @@ spec =
r1.at "C" . to_vector . should_equal ["7", "8", "9"]
r1.at "D" . to_vector . should_equal ["10", "11", "12"]
Test.specify "should correctly handle problems: missing input columns" <|
group_builder.specify "should correctly handle problems: missing input columns" <|
t1 = Table.new [["A", ["1", "2", "3"]]]
r1 = t1.parse columns=["A", "B", "C", "E"] on_problems=Problem_Behavior.Ignore
r1.should_fail_with Missing_Input_Columns
@ -334,7 +333,7 @@ spec =
problems = [Missing_Input_Columns.Error ["B", "C", "E"]]
Problems.test_problem_handling action problems tester
Test.specify "should correctly handle problems: out of bounds indices" <|
group_builder.specify "should correctly handle problems: out of bounds indices" <|
t1 = Table.new [["A", ["1", "2", "3"]]]
r1 = t1.parse columns=[0, -1, 42, -5]
r1.should_fail_with Missing_Input_Columns
@ -346,7 +345,7 @@ spec =
problems = [Missing_Input_Columns.Error [42, -5]]
Problems.test_problem_handling action problems tester
Test.specify "should allow mixed column selectors" <|
group_builder.specify "should allow mixed column selectors" <|
t1 = Table.new [["Am", ["1", "2", "3"]], ["B", ["4", "5", "6"]], ["C", ["7", "8", "9"]], ["D", ["10", "11", "12"]]]
r1 = t1.parse columns=["A.*".to_regex, -2, "D"]
r1.at "Am" . to_vector . should_equal [1, 2, 3]
@ -354,7 +353,7 @@ spec =
r1.at "C" . to_vector . should_equal [7, 8, 9]
r1.at "D" . to_vector . should_equal [10, 11, 12]
Test.specify "should handle edge-cases: overlapping selectors" <|
group_builder.specify "should handle edge-cases: overlapping selectors" <|
t1 = Table.new [["Am", ["1", "2", "3"]], ["B", ["4", "5", "6"]], ["C", ["7", "8", "9"]], ["D", ["10", "11", "12"]]]
r1 = t1.parse columns=["A.*".to_regex, 0, "D", -1, -1, 0, 3]
r1.at "Am" . to_vector . should_equal [1, 2, 3]
@ -362,18 +361,18 @@ spec =
r1.at "C" . to_vector . should_equal ["7", "8", "9"]
r1.at "D" . to_vector . should_equal [10, 11, 12]
Test.specify "should error if invalid target type is provided" <|
group_builder.specify "should error if invalid target type is provided" <|
t1 = Table.new [["A", ["1", "2", "3"]]]
t1.parse type=Nothing . should_fail_with Illegal_Argument
Test.specify "should error if the input column is not text" <|
group_builder.specify "should error if the input column is not text" <|
t1 = Table.new [["A", [1, 2, 3]], ["B", ["4", "5", "6"]], ["C", [7, 8, 9]], ["D", ["10", "11", "12"]]]
r1 = t1.parse columns=["A", "B", "C"]
r1.should_fail_with Invalid_Value_Type
r1.catch.related_column . should_equal "A"
r1.catch.expected.should_equal "Char"
Test.specify "should error if no input columns selected, unless error_on_missing_columns=False" <|
group_builder.specify "should error if no input columns selected, unless error_on_missing_columns=False" <|
t1 = Table.new [["A", ["1", "2", "3"]]]
r1 = t1.parse columns=[]
r1.should_fail_with No_Input_Columns_Selected
@ -391,8 +390,8 @@ spec =
Problems.expect_warning No_Input_Columns_Selected r4
Problems.expect_warning (Missing_Input_Columns.Error ["nonexistent column :D", -42]) r4
Test.group "Column.parse" <|
Test.specify "should correctly parse integers" <|
suite_builder.group "Column.parse" group_builder->
group_builder.specify "should correctly parse integers" <|
c1 = Column.from_vector "ints" ["0", "+0", "-0", "+1", "-1", "1", "000", "0010", "12345", Nothing]
c2 = c1.parse type=Value_Type.Integer
c2.name.should_equal c1.name
@ -404,7 +403,7 @@ spec =
c3.to_vector . should_equal [0, 0, 0, 1, -1, 1, 0, 10, 12345, Nothing]
Problems.assume_no_problems c3
Test.specify "should correctly parse integers in US formats" <|
group_builder.specify "should correctly parse integers in US formats" <|
cUS = Column.from_vector "ints" ["1", "000123", "-1234", "1234567", "123e456"]
pUS = cUS.parse type=Value_Type.Integer
pUS.to_vector . should_equal [1, 123, -1234, 1234567, Nothing]
@ -432,7 +431,7 @@ spec =
pUS5.to_vector . should_equal [1, 123, -1234, 1234567, Nothing]
Problems.expect_warning (Invalid_Format.Error "ints" Value_Type.Integer ["(123,456)"]) pUS5
Test.specify "should correctly parse integers in European formats" <|
group_builder.specify "should correctly parse integers in European formats" <|
cDE = Column.from_vector "ints" ["1", "000123", "-1.234", "1.234.567", "12.34.56"]
pDE = cDE.parse type=Value_Type.Integer
pDE.to_vector . should_equal [1, 123, -1234, 1234567, Nothing]
@ -448,7 +447,7 @@ spec =
pSW.to_vector . should_equal [1, 123, -1234, 1234567, Nothing]
Problems.expect_warning (Invalid_Format.Error "ints" Value_Type.Integer ["(123'456)"]) pSW
Test.specify "should correctly parse decimals" <|
group_builder.specify "should correctly parse decimals" <|
c1 = Column.from_vector "ints" ["0", "+0", "-0", "+1", "-1", "1", "000", "0010", "12345", Nothing]
c2 = c1.parse Value_Type.Float
c2.name.should_equal c1.name
@ -469,7 +468,7 @@ spec =
c6.to_vector . should_equal [0.0, 0.0, 1.0, 0.1, 0.123, -0.1, 0.1, 0.0, 0.1234, Nothing, 11111111.111]
Problems.assume_no_problems c6
Test.specify "should correctly parse decimals in US formats" <|
group_builder.specify "should correctly parse decimals in US formats" <|
cUS = Column.from_vector "floats" ["1.23", "000123", "-12.34", "123.4567", "123e456"]
pUS = cUS.parse type=Value_Type.Float
pUS.to_vector . should_equal [1.23, 123, -12.34, 123.4567, Nothing]
@ -497,7 +496,7 @@ spec =
pUS5.to_vector . should_equal [1.23, 123, -1234.567, 1234567.789, Nothing]
Problems.expect_warning (Invalid_Format.Error "floats" Value_Type.Float ["(123,456)"]) pUS5
Test.specify "should correctly parse decimals in European formats" <|
group_builder.specify "should correctly parse decimals in European formats" <|
cDE = Column.from_vector "floats" ["1,23", "000123", "-1.234,567", "1.234.567,789", "12.34,56"]
pDE = cDE.parse type=Value_Type.Float
pDE.to_vector . should_equal [1.23, 123, -1234.567, 1234567.789, Nothing]
@ -513,7 +512,7 @@ spec =
pSW.to_vector . should_equal [1.23, 123, -1234.567, 1234567, Nothing]
Problems.expect_warning (Invalid_Format.Error "floats" Value_Type.Float ["(123'456)"]) pSW
Test.specify "should correctly parse booleans" <|
group_builder.specify "should correctly parse booleans" <|
c1 = Column.from_vector "bools" ["true", "false", "True", "TRUE", "FALSE", Nothing, "False"]
c2 = c1.parse type=Value_Type.Boolean
c2.name.should_equal c1.name
@ -533,7 +532,7 @@ spec =
w.value_type . should_equal Value_Type.Boolean
w.cells . should_equal ["yes"]
Test.specify "should correctly parse date and time" <|
group_builder.specify "should correctly parse date and time" <|
c1 = Column.from_vector "date" ["2022-05-07", "2000-01-01", "2010-12-31"]
c2 = c1.parse type=Value_Type.Date
c2.to_vector . should_equal [Date.new 2022 5 7, Date.new 2000 1 1, Date.new 2010 12 31]
@ -556,7 +555,7 @@ spec =
w.value_type . should_equal Value_Type.Date_Time
w.cells . should_equal ["42", "2010-12-31"]
Test.specify "should correctly parse date and time with format" <|
group_builder.specify "should correctly parse date and time with format" <|
c1 = Column.from_vector "date" ["5/7/2022", "1/1/2000", "12/31/2010"]
c2 = c1.parse type=Value_Type.Date "M/d/yyyy"
c2.to_vector . should_equal [Date.new 2022 5 7, Date.new 2000 1 1, Date.new 2010 12 31]
@ -565,13 +564,13 @@ spec =
c4 = c3.parse type=Value_Type.Date_Time "M/d/yyyy HH:mm:ss"
c4.to_vector . should_equal [Date_Time.new 2022 5 7 23 59 59, Date_Time.new 2000 1 1, Date_Time.new 2010 12 31 12 34 56]
Test.specify "should handle invalid format strings gracefully" <|
group_builder.specify "should handle invalid format strings gracefully" <|
c1 = Column.from_vector "date" ["5/7/2022", "1/1/2000", "12/31/2010"]
c1.parse type=Value_Type.Date "M/d/fqsrf" . should_fail_with Date_Time_Format_Parse_Error
c1.parse type=Value_Type.Time "ęęę" . should_fail_with Date_Time_Format_Parse_Error
c1.parse type=Value_Type.Date_Time "M/d/fqsrf HH:mm:ss.fff" . should_fail_with Date_Time_Format_Parse_Error
Test.specify "should correctly work in Auto mode" <|
group_builder.specify "should correctly work in Auto mode" <|
c1 = Column.from_vector "A" ["1", "2", "3"]
c2 = Column.from_vector "B" ["1.0", "2.5", "3"]
c3 = Column.from_vector "C" ["2022-05-07", "2000-01-01", "2010-12-31"]
@ -627,11 +626,11 @@ spec =
r8.to_vector . should_equal [Nothing, Nothing, Nothing]
Problems.assume_no_problems r8
Test.specify "should error if invalid target type is provided" <|
group_builder.specify "should error if invalid target type is provided" <|
c1 = Column.from_vector "A" ["1", "2", "3"]
c1.parse type=Nothing . should_fail_with Illegal_Argument
Test.specify "should error if the input column is not text" <|
group_builder.specify "should error if the input column is not text" <|
c1 = Column.from_vector "A" [1, 2, 3]
r1 = c1.parse
r1.should_fail_with Invalid_Value_Type
@ -639,4 +638,8 @@ spec =
r1.catch.expected . should_equal "Char"
main = Test_Suite.run_main spec
main =
suite = Test.build suite_builder->
add_specs suite_builder
suite.run_with_filter

View File

@ -1,14 +1,17 @@
from Standard.Base import all
from Standard.Test import Test_Suite
from Standard.Test_New import all
import project.Helpers.Sorted_List_Index_Spec
import project.Helpers.Unique_Naming_Strategy_Spec
import project.Helpers.Value_Type_Spec
spec =
Unique_Naming_Strategy_Spec.spec
Sorted_List_Index_Spec.spec
Value_Type_Spec.spec
add_specs suite_builder =
Unique_Naming_Strategy_Spec.add_specs suite_builder
Sorted_List_Index_Spec.add_specs suite_builder
Value_Type_Spec.add_specs suite_builder
main = Test_Suite.run_main spec
main =
suite = Test.build suite_builder->
add_specs suite_builder
suite.run_with_filter

View File

@ -3,59 +3,68 @@ from Standard.Base import all
# We need this import, to ensure that we depend on `Standard.Table`, so that the Java import of `org.enso.table` is valid.
from Standard.Table import all
from Standard.Test import Test, Test_Suite
import Standard.Test.Extensions
from Standard.Test_New import all
polyglot java import java.util.Comparator
polyglot java import org.enso.table.data.table.join.between.SortedListIndex
main = Test_Suite.run_main spec
main =
suite = Test.build suite_builder->
add_specs suite_builder
suite.run_with_filter
make_index vec = SortedListIndex.build vec Comparator.naturalOrder
type Data
Value ~index1
setup =
v1 = [0, 0, 1, 1, 1, 1, 2, 2, 2, 3, 3, 4, 5, 6, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 8, 10, 10, 10, 10, 11, 14, 17, 19]
v1_shuffled = v1.take (Index_Sub_Range.Sample v1.length)
Data.Value (make_index v1_shuffled)
## White-box tests for the SortedListIndex, ensuring correctness of the
implementation - these are additional tests apart from
the `Join_Condition.Between` test cases, to ensure no off-by-one errors
or other bugs are present in the implementation.
spec = Test.group "SortedListIndex (used for SortJoin)" <|
make_index vec = SortedListIndex.build vec Comparator.naturalOrder
add_specs suite_builder = suite_builder.group "SortedListIndex (used for SortJoin)" group_builder->
data = Data.setup
v1 = [0, 0, 1, 1, 1, 1, 2, 2, 2, 3, 3, 4, 5, 6, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 8, 10, 10, 10, 10, 11, 14, 17, 19]
v1_shuffled = v1.take (Index_Sub_Range.Sample v1.length)
index1 = make_index v1_shuffled
group_builder.specify "should correctly handle empty matches" <|
Vector.from_polyglot_array (data.index1.findSubRange 9 9) . should_equal []
Vector.from_polyglot_array (data.index1.findSubRange -10 -2) . should_equal []
Vector.from_polyglot_array (data.index1.findSubRange 200 300) . should_equal []
Vector.from_polyglot_array (data.index1.findSubRange 20 0) . should_equal []
Test.specify "should correctly handle empty matches" <|
Vector.from_polyglot_array (index1.findSubRange 9 9) . should_equal []
Vector.from_polyglot_array (index1.findSubRange -10 -2) . should_equal []
Vector.from_polyglot_array (index1.findSubRange 200 300) . should_equal []
Vector.from_polyglot_array (index1.findSubRange 20 0) . should_equal []
group_builder.specify "should correctly handle single-element matches" <|
Vector.from_polyglot_array (data.index1.findSubRange 8 8) . should_equal [8]
Vector.from_polyglot_array (data.index1.findSubRange 12 16) . should_equal [14]
Vector.from_polyglot_array (data.index1.findSubRange 18 100) . should_equal [19]
Vector.from_polyglot_array (data.index1.findSubRange 19 100) . should_equal [19]
Vector.from_polyglot_array (data.index1.findSubRange 19 19) . should_equal [19]
Test.specify "should correctly handle single-element matches" <|
Vector.from_polyglot_array (index1.findSubRange 8 8) . should_equal [8]
Vector.from_polyglot_array (index1.findSubRange 12 16) . should_equal [14]
Vector.from_polyglot_array (index1.findSubRange 18 100) . should_equal [19]
Vector.from_polyglot_array (index1.findSubRange 19 100) . should_equal [19]
Vector.from_polyglot_array (index1.findSubRange 19 19) . should_equal [19]
group_builder.specify "should correctly handle matches" <|
Vector.from_polyglot_array (data.index1.findSubRange 4 6) . should_equal [4, 5, 6]
Vector.from_polyglot_array (data.index1.findSubRange 3 5) . should_equal [3, 3, 4, 5]
Test.specify "should correctly handle matches" <|
Vector.from_polyglot_array (index1.findSubRange 4 6) . should_equal [4, 5, 6]
Vector.from_polyglot_array (index1.findSubRange 3 5) . should_equal [3, 3, 4, 5]
Vector.from_polyglot_array (data.index1.findSubRange 0 3) . should_equal [0, 0, 1, 1, 1, 1, 2, 2, 2, 3, 3]
Vector.from_polyglot_array (data.index1.findSubRange 2 4) . should_equal [2, 2, 2, 3, 3, 4]
Vector.from_polyglot_array (data.index1.findSubRange 8 10) . should_equal [8, 10, 10, 10, 10]
Vector.from_polyglot_array (data.index1.findSubRange 8 11) . should_equal [8, 10, 10, 10, 10, 11]
Vector.from_polyglot_array (data.index1.findSubRange 8 12) . should_equal [8, 10, 10, 10, 10, 11]
Vector.from_polyglot_array (data.index1.findSubRange 9 12) . should_equal [10, 10, 10, 10, 11]
Vector.from_polyglot_array (index1.findSubRange 0 3) . should_equal [0, 0, 1, 1, 1, 1, 2, 2, 2, 3, 3]
Vector.from_polyglot_array (index1.findSubRange 2 4) . should_equal [2, 2, 2, 3, 3, 4]
Vector.from_polyglot_array (index1.findSubRange 8 10) . should_equal [8, 10, 10, 10, 10]
Vector.from_polyglot_array (index1.findSubRange 8 11) . should_equal [8, 10, 10, 10, 10, 11]
Vector.from_polyglot_array (index1.findSubRange 8 12) . should_equal [8, 10, 10, 10, 10, 11]
Vector.from_polyglot_array (index1.findSubRange 9 12) . should_equal [10, 10, 10, 10, 11]
group_builder.specify "should correctly handle big all-equal ranges" <|
Vector.from_polyglot_array (data.index1.findSubRange 1 1) . should_equal [1, 1, 1, 1]
Vector.from_polyglot_array (data.index1.findSubRange 7 7) . should_equal [7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7]
Test.specify "should correctly handle big all-equal ranges" <|
Vector.from_polyglot_array (index1.findSubRange 1 1) . should_equal [1, 1, 1, 1]
Vector.from_polyglot_array (index1.findSubRange 7 7) . should_equal [7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7]
Test.specify "other cases: empty index" <|
group_builder.specify "other cases: empty index" <|
index2 = make_index []
Vector.from_polyglot_array (index2.findSubRange 1 5) . should_equal []
Test.specify "other cases: single element index" <|
group_builder.specify "other cases: single element index" <|
index2 = make_index [5]
Vector.from_polyglot_array (index2.findSubRange 1 5) . should_equal [5]
Vector.from_polyglot_array (index2.findSubRange 5 5) . should_equal [5]

View File

@ -6,25 +6,24 @@ import Standard.Table.Internal.Unique_Name_Strategy.Unique_Name_Strategy
import Standard.Database.Internal.Common.Encoding_Limited_Naming_Properties.Encoding_Limited_Naming_Properties
from Standard.Test import Test, Test_Suite
import Standard.Test.Extensions
from Standard.Test_New import all
spec =
add_specs suite_builder =
no_limit = Unlimited_Naming_Properties.Instance
Test.group 'Unique_Name_Strategy Helper' <|
Test.specify 'should change an empty name to "Column"' <|
suite_builder.group 'Unique_Name_Strategy Helper' group_builder->
group_builder.specify 'should change an empty name to "Column"' <|
strategy = Unique_Name_Strategy.new no_limit
strategy.make_valid_name "" . should_equal "Column"
strategy.make_valid_name "FOO" . should_equal "FOO"
strategy.make_valid_name "Column" . should_equal "Column"
strategy.invalid_names.length . should_equal 1
Test.specify 'should change Nothing to "Column"' <|
group_builder.specify 'should change Nothing to "Column"' <|
strategy = Unique_Name_Strategy.new no_limit
strategy.make_valid_name Nothing . should_equal "Column"
strategy.invalid_names.length . should_equal 1
Test.specify 'should not rename unique names' <|
group_builder.specify 'should not rename unique names' <|
strategy = Unique_Name_Strategy.new no_limit
strategy.make_unique "A" . should_equal "A"
strategy.make_unique "B" . should_equal "B"
@ -32,7 +31,7 @@ spec =
strategy.renames.length . should_equal 0
strategy.invalid_names.length . should_equal 0
Test.specify 'should rename duplicates names' <|
group_builder.specify 'should rename duplicates names' <|
strategy = Unique_Name_Strategy.new no_limit
strategy.make_unique "A" . should_equal "A"
strategy.make_unique "A" . should_equal "A 1"
@ -40,7 +39,7 @@ spec =
strategy.renames.length . should_equal 2
strategy.invalid_names.length . should_equal 0
Test.specify 'should preserve existing suffix' <|
group_builder.specify 'should preserve existing suffix' <|
strategy = Unique_Name_Strategy.new no_limit
strategy.make_unique "A" . should_equal "A"
strategy.make_unique "A 1" . should_equal "A 1"
@ -49,7 +48,7 @@ spec =
strategy.renames.length . should_equal 2
strategy.invalid_names.length . should_equal 0
Test.specify "should always add a counter when renaming invalid names" <|
group_builder.specify "should always add a counter when renaming invalid names" <|
strategy = Unique_Name_Strategy.new no_limit
strategy.make_unique "" . should_equal "Column 1"
strategy.make_unique "" . should_equal "Column 2"
@ -58,7 +57,7 @@ spec =
strategy.make_unique "Column" . should_equal "Column"
strategy.make_unique "" . should_equal "Column 4"
Test.specify 'should work as in examples' <|
group_builder.specify 'should work as in examples' <|
unique_name_strategy = Unique_Name_Strategy.new no_limit
unique_names = ["A", "B", "A", ""] . map unique_name_strategy.make_unique
duplicates = unique_name_strategy.renames
@ -71,7 +70,7 @@ spec =
strategy_1.make_unique "A" . should_equal "A"
strategy_1.make_unique "A" . should_equal "A 1"
Test.specify "should treat string equality consistently with Enso" <|
group_builder.specify "should treat string equality consistently with Enso" <|
s1 = 'ś'
s2 = 's\u0301'
# Enso makes these values equal
@ -91,7 +90,7 @@ spec =
# But the underlying representation should remain unchanged.
r2.codepoints . should_equal [115, 769, 32, 49]
Test.specify "should work with a string size limit" <|
group_builder.specify "should work with a string size limit" <|
limit = Encoding_Limited_Naming_Properties.Instance encoding=Encoding.utf_8 limit=5
strategy = Unique_Name_Strategy.new limit
@ -125,7 +124,7 @@ spec =
strategy.make_valid_name "abc" . should_equal "abc"
strategy.make_valid_name "123456789" . should_equal "12345"
Test.specify "should handle too small limits gracefully" <|
group_builder.specify "should handle too small limits gracefully" <|
limit = Encoding_Limited_Naming_Properties.Instance encoding=Encoding.utf_8 limit=1
strategy = Unique_Name_Strategy.new limit
strategy.make_unique "A" . should_equal "A"
@ -148,7 +147,7 @@ spec =
strategy2.make_unique "B" . should_equal "B"
strategy2.make_unique "B" . should_fail_with Illegal_Argument
Test.specify "should correctly handle graphemes spanning multiple units with size limit" <|
group_builder.specify "should correctly handle graphemes spanning multiple units with size limit" <|
limit = Encoding_Limited_Naming_Properties.Instance encoding=Encoding.utf_8 limit=3
strategy = Unique_Name_Strategy.new limit
@ -175,7 +174,7 @@ spec =
strategy2.make_unique facepalm+facepalm . should_equal facepalm+" 1"
strategy2.make_unique facepalm+facepalm . should_equal facepalm+" 2"
Test.specify "should fail gracefully when encountering not-encodable characters" <|
group_builder.specify "should fail gracefully when encountering not-encodable characters" <|
limit = Encoding_Limited_Naming_Properties.Instance encoding=Encoding.ascii limit=5
strategy = Unique_Name_Strategy.new limit
strategy.make_valid_name "ąęś" . should_fail_with Illegal_Argument
@ -183,8 +182,8 @@ spec =
strategy.make_unique "ABC" . should_equal "ABC"
strategy.combine_with_prefix ["A"] ["ą"] "P_" . should_fail_with Illegal_Argument
Test.group "Unique_Name_Strategy.combine_with_prefix" <|
Test.specify "should work as in examples" <|
suite_builder.group "Unique_Name_Strategy.combine_with_prefix" group_builder->
group_builder.specify "should work as in examples" <|
strategy = Unique_Name_Strategy.new no_limit
first = ["A", "B", "second_A"]
second = ["A", "B", "second_A 1", "C"]
@ -193,19 +192,19 @@ spec =
strategy.invalid_names . should_equal []
strategy.renames . should_equal ["second_A"]
Test.specify "should work with no prefix" <|
group_builder.specify "should work with no prefix" <|
first = ["A", "B"]
second = ["B", "A", "C"]
strategy = Unique_Name_Strategy.new no_limit
r = strategy.combine_with_prefix first second ""
r . should_equal ["B 1", "A 1", "C"]
Test.specify "should work for empty input" <|
group_builder.specify "should work for empty input" <|
Unique_Name_Strategy.new no_limit . combine_with_prefix [] [] "" . should_equal []
Unique_Name_Strategy.new no_limit . combine_with_prefix ["a"] [] "" . should_equal []
Unique_Name_Strategy.new no_limit . combine_with_prefix [] ["a"] "" . should_equal ["a"]
Test.specify "should find the first free spot" <|
group_builder.specify "should find the first free spot" <|
Unique_Name_Strategy.new no_limit . combine_with_prefix ["A", "A 1", "A 2"] ["A"] "" . should_equal ["A 3"]
Unique_Name_Strategy.new no_limit . combine_with_prefix ["A", "A 1", "A 2"] ["A 4", "A 6", "A 100", "A", "A 3"] "" . should_equal ["A 4", "A 6", "A 100", "A 5", "A 3"]
@ -214,13 +213,13 @@ spec =
Unique_Name_Strategy.new no_limit . combine_with_prefix ["A", "A 1", "A 2", "P_A 1"] ["A"] "P_" . should_equal ["P_A"]
Unique_Name_Strategy.new no_limit . combine_with_prefix ["A", "A 1", "A 2", "P_A 1"] ["A", "P_A", "P_A 2"] "P_" . should_equal ["P_A 3", "P_A", "P_A 2"]
Test.specify "will add a prefix/suffix, not increment an existing counter" <|
group_builder.specify "will add a prefix/suffix, not increment an existing counter" <|
first = ["A", "A 1", "A 2", "A 3"]
Unique_Name_Strategy.new no_limit . combine_with_prefix first ["A 2"] "P_" . should_equal ["P_A 2"]
Unique_Name_Strategy.new no_limit . combine_with_prefix first ["A 2"] "" . should_equal ["A 2 1"]
Unique_Name_Strategy.new no_limit . combine_with_prefix first+["P_A 2"] ["A 2"] "P_" . should_equal ["P_A 2 1"]
Test.specify "should prioritize existing names when renaming conflicts and rename only ones that are clashing with the other list" <|
group_builder.specify "should prioritize existing names when renaming conflicts and rename only ones that are clashing with the other list" <|
first = ["A", "B"]
second = ["B", "A", "B 1", "C", "B 2", "B_4"]
strategy = Unique_Name_Strategy.new no_limit
@ -236,4 +235,7 @@ spec =
r3 = Unique_Name_Strategy.new no_limit . combine_with_prefix first third "P_"
r3 . should_equal ["P_B 3", "P_A", "P_B", "X", "P_B 1", "P_B 2"]
main = Test_Suite.run_main spec
main =
suite = Test.build suite_builder->
add_specs suite_builder
suite.run_with_filter

View File

@ -4,12 +4,11 @@ import Standard.Table.Data.Type.Value_Type.Bits
import Standard.Table.Data.Type.Value_Type.Value_Type
import Standard.Table.Data.Type.Value_Type_Helpers
from Standard.Test import Test, Test_Suite
import Standard.Test.Extensions
from Standard.Test_New import all
spec =
Test.group "Value_Type" <|
Test.specify "should have a nice display text representation" <|
add_specs suite_builder =
suite_builder.group "Value_Type" group_builder->
group_builder.specify "should have a nice display text representation" <|
Value_Type.Boolean.to_display_text . should_equal "Boolean"
Value_Type.Byte.to_display_text . should_equal "Byte"
@ -28,7 +27,7 @@ spec =
Value_Type.Unsupported_Data_Type.to_display_text . should_equal "Unsupported_Data_Type"
(Value_Type.Unsupported_Data_Type "FOO-BAR").to_display_text . should_equal "Unsupported_Data_Type (FOO-BAR)"
Test.specify "should use correct in-memory logic to reconcile pairs of types for operations like union/iif" <|
group_builder.specify "should use correct in-memory logic to reconcile pairs of types for operations like union/iif" <|
Value_Type_Helpers.reconcile_types Value_Type.Boolean Value_Type.Boolean . should_equal Value_Type.Boolean
Value_Type_Helpers.reconcile_types (Value_Type.Integer Bits.Bits_16) (Value_Type.Integer Bits.Bits_32) . should_equal (Value_Type.Integer Bits.Bits_32)
@ -62,4 +61,8 @@ spec =
Value_Type_Helpers.reconcile_types Value_Type.Boolean Value_Type.Byte . should_equal Value_Type.Mixed
Value_Type_Helpers.reconcile_types (Value_Type.Float Bits.Bits_32) Value_Type.Boolean . should_equal Value_Type.Mixed
main = Test_Suite.run_main spec
main =
suite = Test.build suite_builder->
add_specs suite_builder
suite.run_with_filter

View File

@ -2,30 +2,39 @@ from Standard.Base import all
from Standard.Table import all
from Standard.Test import Test, Test_Suite, Problems
import Standard.Test.Extensions
from Standard.Test_New import all
from project.Util import all
spec =
type Data
Value ~expected_table
setup =
c_1 = ["a", ["1", "4", "7", "10"]]
c_2 = ["b", [2, Nothing, 8, 11]]
c_3 = ["c", [Nothing, 6, 9, 12]]
expected_table = Table.new [c_1, c_2, c_3]
Data.Value expected_table
Test.group "Table.from Text" <|
Test.specify "should create a table from a textual CSV" <|
add_specs suite_builder =
suite_builder.group "Table.from Text" group_builder->
data = Data.setup
group_builder.specify "should create a table from a textual CSV" <|
file_contents = (enso_project.data / "simple_empty.csv") . read_text
table = Table.from file_contents (format = Delimited ",")
table.should_equal expected_table
table.should_equal data.expected_table
Test.group "File.read (Delimited)" <|
Test.specify "should create a table from a CSV in a file" <|
suite_builder.group "File.read (Delimited)" group_builder->
data = Data.setup
group_builder.specify "should create a table from a CSV in a file" <|
file = (enso_project.data / "simple_empty.csv")
table = file.read
table.should_equal expected_table
table.should_equal data.expected_table
Test.specify "should correctly infer types of varied-type columns" <|
group_builder.specify "should correctly infer types of varied-type columns" <|
varied_column = (enso_project.data / "varied_column.csv") . read
c_1 = ["Column 1", ["2005-02-25", "2005-02-28", "4", "2005-03-02", Nothing, "2005-03-04", "2005-03-07", "2005-03-08"]]
# We can re-enable this once date support is improved.
@ -38,15 +47,15 @@ spec =
expected = Table.new [c_1, c_3, c_4, c_5, c_6]
varied_column.select_columns [0, 2, 3, 4, 5] . should_equal expected
Test.specify "should handle duplicated columns" <|
group_builder.specify "should handle duplicated columns" <|
csv = """
name,x,y,x,y
foo,10,20,30,20
t = Table.from csv (format = Delimited ",")
t.columns.map .name . should_equal ['name', 'x', 'y', 'x 1', 'y 1']
Test.group 'Writing' <|
Test.specify 'should properly serialize simple tables' <|
suite_builder.group 'Writing' group_builder->
group_builder.specify 'should properly serialize simple tables' <|
varied_column = (enso_project.data / "varied_column.csv") . read
res = Text.from varied_column format=(Delimited ",")
exp = normalize_lines <| '''
@ -61,7 +70,7 @@ spec =
2005-03-08,2005-03-08,8,8,8.0,osiem
res.should_equal exp
Test.specify 'should properly handle quoting of records and allow specifying separators' <|
group_builder.specify 'should properly handle quoting of records and allow specifying separators' <|
c1 = ['name', ['Robert");DROP TABLE Students;--', 'This;Name;;Is""Strange', 'Marcin,,']]
c2 = ['grade', [10, 20, 'hello;world']]
t = Table.new [c1, c2]
@ -75,7 +84,7 @@ spec =
res = Text.from t format=(Delimited ";")
res.should_equal expected
Test.specify 'should allow forced quoting of records' <|
group_builder.specify 'should allow forced quoting of records' <|
c1 = ['name', ['Robert");DROP TABLE Students;--', 'This;Name;;Is""Strange', 'Marcin,,']]
c2 = ['grade', [10, 20, 'hello;world']]
t = Table.new [c1, c2]
@ -90,7 +99,7 @@ spec =
res.should_equal expected
Test.specify 'should write CSV to a file' <|
group_builder.specify 'should write CSV to a file' <|
varied_column = (enso_project.data / "varied_column.csv") . read
out = enso_project.data / "transient" / "out.csv"
out.delete_if_exists
@ -108,8 +117,8 @@ spec =
out.read_text.should_equal exp
out.delete_if_exists
Test.group "Integration" <|
Test.specify "should be able to round-trip a table with all kinds of weird characters to CSV and back" <|
suite_builder.group "Integration" group_builder->
group_builder.specify "should be able to round-trip a table with all kinds of weird characters to CSV and back" <|
names = ['Śłąęźż");DROP TABLE Students;--', 'This;Name;;Is""Strange', 'Marcin,,', '\'', 'a\n\nb', 'a\tc', Nothing, Nothing, Nothing, '42', '💁👌🎍😍', '', 'null?\0?', 'FFFD', '\uFFFD', '\r\n', 'a\r\nb\n\rc\rd\ne', 'what about these # ?? // /* hmm */ is it included?', 'and the rare \v vertical tab?']
d = Date_Time.new 2015 10 29 23 55 49
t = Table.new [['name', names], ['header\nspanning\nmultiple\nlines', names.map_with_index ix-> _-> ix*10], ['dates', names.map_with_index ix-> _-> d + Duration.new hours=3*ix]]
@ -136,4 +145,8 @@ spec =
out2.delete_if_exists
main = Test_Suite.run_main spec
main =
suite = Test.build suite_builder->
add_specs suite_builder
suite.run_with_filter

View File

@ -7,14 +7,13 @@ from Standard.Table import Table, Column, Data_Formatter, Quote_Style, Delimited
from Standard.Table.Extensions.Table_Conversions import all
from Standard.Table.Errors import all
from Standard.Test import Test, Test_Suite, Problems
import Standard.Test.Extensions
from Standard.Test_New import all
import project.Util
spec =
Test.group "Delimited File Parsing" <|
Test.specify "should load a simple table with headers" <|
add_specs suite_builder =
suite_builder.group "Delimited File Parsing" group_builder->
group_builder.specify "should load a simple table with headers" <|
c_1 = ["a", ['1', '4', '7', '10']]
c_2 = ["b", ['2', Nothing, '8', '11']]
c_3 = ["c", [Nothing, '6', '9', '12']]
@ -22,7 +21,7 @@ spec =
simple_empty = Data.read (enso_project.data / "simple_empty.csv") (Delimited "," headers=True value_formatter=Nothing)
simple_empty.should_equal expected_table
Test.specify "should load a simple table without headers" <|
group_builder.specify "should load a simple table without headers" <|
c_1 = ["Column 1", ['a', '1', '4', '7', '10']]
c_2 = ["Column 2", ['b', '2', Nothing, '8', '11']]
c_3 = ["Column 3", ['c', Nothing, '6', '9', '12']]
@ -30,7 +29,7 @@ spec =
simple_empty = Data.read (enso_project.data / "simple_empty.csv") (Delimited "," headers=False value_formatter=Nothing)
simple_empty.should_equal expected_table
Test.specify "should work in presence of missing headers" <|
group_builder.specify "should work in presence of missing headers" <|
action on_problems = Data.read (enso_project.data / "missing_header.csv") (Delimited "," headers=True value_formatter=Nothing) on_problems
tester table =
table.columns.map .name . should_equal ["a", "Column 1", "c", "Column 2", "d"]
@ -42,7 +41,7 @@ spec =
problems = [Invalid_Column_Names.Error [Nothing, Nothing]]
Problems.test_problem_handling action problems tester
Test.specify "should infer headers based on the first two rows" <|
group_builder.specify "should infer headers based on the first two rows" <|
t1 = Data.read (enso_project.data / "data_small.csv") (Delimited ",")
t1.columns.map .name . should_equal ["Code", "Index", "Flag", "Value", "ValueWithNothing", "TextWithNothing", "Hexadecimal", "Leading0s", "QuotedNumbers", "Mixed Types"]
@ -74,7 +73,7 @@ spec =
t6.at "1" . to_vector . should_equal ["y"]
t6.at "x" . to_vector . should_equal [2]
Test.specify "should not use the first row as headers if it is the only row, unless specifically asked to" <|
group_builder.specify "should not use the first row as headers if it is the only row, unless specifically asked to" <|
t1 = Data.read (enso_project.data / "one_row.csv") (Delimited ",")
t1.columns.map .name . should_equal ["Column 1", "Column 2", "Column 3"]
t1.at "Column 1" . to_vector . should_equal ["x"]
@ -86,11 +85,11 @@ spec =
t2.row_count . should_equal 0
t2.at "x" . to_vector . should_equal []
Test.specify "should raise an informative error when loading an empty file" <|
group_builder.specify "should raise an informative error when loading an empty file" <|
t = Data.read (enso_project.data / "empty.txt") (Delimited "," headers=True value_formatter=Nothing)
t.should_fail_with Empty_File_Error
Test.specify "should correctly handle file opening issues" <|
group_builder.specify "should correctly handle file opening issues" <|
nonexistent_file = enso_project.data / "a_filename_that_does_not_exist.foobar"
r1 = Data.read nonexistent_file (Delimited "," headers=True value_formatter=Nothing)
r1.should_fail_with File_Error
@ -101,7 +100,7 @@ spec =
r2.should_fail_with File_Error
r2.catch.should_be_a File_Error.IO_Error
Test.specify "should work with all kinds of line endings" <|
group_builder.specify "should work with all kinds of line endings" <|
path name = enso_project.data / 'transient' / name
create_file name ending_style =
lines = ['a,b,c', 'd,e,f', '1,2,3']
@ -128,7 +127,7 @@ spec =
['crlf.csv', 'lf.csv', 'cr.csv', 'mixed.csv'].each (path >> .delete)
Test.specify "should allow to override line endings style" <|
group_builder.specify "should allow to override line endings style" <|
file = enso_project.data / "transient" / "lf.csv"
lines = ['a,b,c', 'd,e,f', '1,2,3']
text = lines.join '\n'
@ -155,21 +154,21 @@ spec =
table . should_equal reference_table
file_2.delete
Test.specify "should work with Windows-1252 encoding" <|
group_builder.specify "should work with Windows-1252 encoding" <|
table = Data.read (enso_project.data / "windows.csv") (Delimited "," headers=True encoding=Encoding.windows_1252) Problem_Behavior.Report_Error
table.columns.map .name . should_equal ['a', 'b', 'c']
table.at 'a' . to_vector . should_equal ['$¢']
table.at 'b' . to_vector . should_equal ['¤']
table.at 'c' . to_vector . should_equal ['¥']
Test.specify "should work with UTF-16 encoding" <|
group_builder.specify "should work with UTF-16 encoding" <|
table = Data.read (enso_project.data / "utf16.csv") (Delimited "," headers=True encoding=Encoding.utf_16_be) Problem_Behavior.Report_Error
table.columns.map .name . should_equal ['ą', '🚀b', 'ć😎']
table.at 'ą' . to_vector . should_equal ['ą']
table.at '🚀b' . to_vector . should_equal ['✨🚀🚧😍😃😍😎😙😉☺']
table.at 'ć😎' . to_vector . should_equal ['แมวมีสี่ขา']
Test.specify "should report errors when encountering malformed characters" <|
group_builder.specify "should report errors when encountering malformed characters" <|
utf8_file = (enso_project.data / "transient" / "utf8_invalid.csv")
utf8_bytes = [97, 44, 98, 44, 99, 10, -60, -123, 44, -17, -65, -65, 44, -61, 40, -61, 40, 10]
utf8_bytes.write_bytes utf8_file
@ -196,7 +195,7 @@ spec =
problems_2 = [Encoding_Error.Error "Encoding issues at byte 22."]
Problems.test_problem_handling action_2 problems_2 tester_2
Test.specify "should handle duplicated columns" <|
group_builder.specify "should handle duplicated columns" <|
action on_problems = Data.read (enso_project.data / "duplicated_columns.csv") (Delimited "," headers=True value_formatter=Nothing) on_problems
tester table =
table.columns.map .name . should_equal ['a', 'b', 'c', 'a 1']
@ -205,7 +204,7 @@ spec =
problems = [Duplicate_Output_Column_Names.Error ['a']]
Problems.test_problem_handling action problems tester
Test.specify "should handle quotes" <|
group_builder.specify "should handle quotes" <|
t1 = Data.read (enso_project.data / "double_quoted.csv") (Delimited "," headers=True value_formatter=Nothing)
t1.at 'a' . to_vector . should_equal ['a, x', '"a']
t1.at 'c' . to_vector . should_equal ['3', '"']
@ -218,13 +217,13 @@ spec =
t3.at 'b' . to_vector . should_equal ['z"']
t3.at 'c' . to_vector . should_equal ['a']
Test.specify "should support rows spanning multiple lines if quoted" <|
group_builder.specify "should support rows spanning multiple lines if quoted" <|
t1 = Data.read (enso_project.data / "multiline_quoted.csv") (Delimited "," headers=True value_formatter=Nothing)
t1.at 'a' . to_vector . should_equal ['1', '4']
t1.at 'b' . to_vector . should_equal ['start\n\ncontinue', '5']
t1.at 'c' . to_vector . should_equal ['3', '6']
Test.specify "should fail in presence of a mismatched quote" <|
group_builder.specify "should fail in presence of a mismatched quote" <|
[Problem_Behavior.Report_Error, Problem_Behavior.Report_Warning, Problem_Behavior.Ignore].each pb->
format = (Delimited "," headers=True value_formatter=Nothing)
r1 = Data.read (enso_project.data / "mismatched_quote.csv") format on_problems=pb
@ -250,7 +249,7 @@ spec =
r5.should_fail_with File_Error
r5.catch.should_be_a File_Error.Corrupted_Format
Test.specify "should fail in presence of a mismatched quote (2)" pending="ToDo: To be fixed in https://github.com/enso-org/enso/issues/5839" <|
group_builder.specify "should fail in presence of a mismatched quote (2)" pending="ToDo: To be fixed in https://github.com/enso-org/enso/issues/5839" <|
[Problem_Behavior.Report_Error, Problem_Behavior.Report_Warning, Problem_Behavior.Ignore].each pb->
format = (Delimited "," headers=True value_formatter=Nothing)
format3 = format . with_quotes quote_escape="\"
@ -263,7 +262,7 @@ spec =
r6.catch.should_be_a File_Error.Corrupted_Format
f6.delete
Test.specify "should handle quotes if they are opened in the middle of an unquoted cell in a sane way" pending="ToDo: To be fixed in https://github.com/enso-org/enso/issues/5839" <|
group_builder.specify "should handle quotes if they are opened in the middle of an unquoted cell in a sane way" pending="ToDo: To be fixed in https://github.com/enso-org/enso/issues/5839" <|
t1 = Data.read (enso_project.data / "mismatched_quote_at_end.csv") (Delimited "," headers=True value_formatter=Nothing)
t1.column_names . should_equal ["a", "b", "c"]
t1.at 'a' . to_vector . should_equal ['1', 'abc', '7']
@ -283,7 +282,7 @@ spec =
t3.column_names.should_equal ["A", "B", "C"]
t3.print
Test.specify "should handle too long and too short rows" <|
group_builder.specify "should handle too long and too short rows" <|
action keep_invalid_rows on_problems =
Data.read (enso_project.data / "varying_rows.csv") (Delimited "," headers=True keep_invalid_rows=keep_invalid_rows value_formatter=Nothing) on_problems=on_problems
@ -321,7 +320,7 @@ spec =
r3.at 'b' . to_vector . should_equal ['2', '0', '5']
r3.at 'c' . to_vector . should_equal ['3', Nothing, '6']
Test.specify "should aggregate invalid rows over some limit" <|
group_builder.specify "should aggregate invalid rows over some limit" <|
action on_problems =
Data.read (enso_project.data / "many_invalid_rows.csv") (Delimited "," headers=True keep_invalid_rows=False value_formatter=Nothing) on_problems
@ -333,7 +332,7 @@ spec =
problems = [Invalid_Row.Error 3 Nothing ['1'] 3, Invalid_Row.Error 4 Nothing ['2'] 3, Invalid_Row.Error 5 Nothing ['3'] 3, Invalid_Row.Error 6 Nothing ['4'] 3, Invalid_Row.Error 8 Nothing ['6'] 3, Invalid_Row.Error 9 Nothing ['7'] 3, Invalid_Row.Error 10 Nothing ['8'] 3, Invalid_Row.Error 11 Nothing ['9'] 3, Invalid_Row.Error 12 Nothing ['10'] 3, Invalid_Row.Error 13 Nothing ['11'] 3, Additional_Invalid_Rows.Error 3]
Problems.test_problem_handling action problems tester
Test.specify "should allow to skip rows" <|
group_builder.specify "should allow to skip rows" <|
t1 = Data.read (enso_project.data / "simple_empty.csv") (Delimited "," headers=False skip_rows=3 value_formatter=Nothing)
t1.at "Column 1" . to_vector . should_equal ['7', '10']
@ -341,7 +340,7 @@ spec =
t2.columns.map .name . should_equal ['7', '8', '9']
t2.at "7" . to_vector . should_equal ['10']
Test.specify "should allow to set a limit of rows to read" <|
group_builder.specify "should allow to set a limit of rows to read" <|
t1 = Data.read (enso_project.data / "simple_empty.csv") (Delimited "," headers=False row_limit=2 value_formatter=Nothing)
t1.at "Column 1" . to_vector . should_equal ['a', '1']
@ -363,7 +362,7 @@ spec =
t6 = Data.read (enso_project.data / "simple_empty.csv") (Delimited "," headers=False skip_rows=3 row_limit=1000 value_formatter=Nothing)
t6.at "Column 1" . to_vector . should_equal ['7', '10']
Test.specify "should check arguments" <|
group_builder.specify "should check arguments" <|
path = (enso_project.data / "simple_empty.csv")
pb = Problem_Behavior.Report_Error
path.read (Delimited "," headers=False . with_quotes quote='abc') pb . should_fail_with Illegal_Argument
@ -371,7 +370,7 @@ spec =
path.read (Delimited "," headers=False . with_quotes quote_escape='//') pb . should_fail_with Illegal_Argument
path.read (Delimited 'a\u{301}' headers=False) pb . should_fail_with Illegal_Argument
Test.specify "should correctly guess column types" <|
group_builder.specify "should correctly guess column types" <|
t = (enso_project.data / "data_small.csv") . read (Delimited "," headers=True)
t.at "Code" . to_vector . should_equal ["gxl", "wca", "nfw", "der"]
t.at "Index" . to_vector . should_equal [7, 0, 1, 7]
@ -387,7 +386,7 @@ spec =
t2 = (enso_project.data / "data_small.csv") . read (Delimited "," headers=True value_formatter=(Data_Formatter.Value allow_leading_zeros=True))
t2.at "Leading0s" . to_vector . should_equal [1, 2, 123, Nothing]
Test.specify "should be able to detect types automatically" <|
group_builder.specify "should be able to detect types automatically" <|
t1 = (enso_project.data / "data_small.csv") . read
t1.at "Code" . to_vector . should_equal ["gxl", "wca", "nfw", "der"]
t1.at "Index" . to_vector . should_equal [7, 0, 1, 7]
@ -398,7 +397,7 @@ spec =
t2.at "c" . to_vector . should_equal [3, 6]
t2.columns.map .name . should_equal ["a", "b", "c"]
Test.specify "should be able to read in a file without splitting it to columns" <|
group_builder.specify "should be able to read in a file without splitting it to columns" <|
t1 = (enso_project.data / "data_small.csv") . read (Delimited "" headers=False)
expected = ['Code,Index,Flag,Value,ValueWithNothing,TextWithNothing,"Hexadecimal",Leading0s,QuotedNumbers,"Mixed Types"']
+ ['gxl,7,True,38.76109,63.13, pq6igd2wyd ,4DD4675B,001,"1","33"']
@ -407,7 +406,7 @@ spec =
+ ['der,7,True,0.86658,,,F32E1EFE,,"34",True']
t1.at 0 . to_vector . should_equal expected
Test.specify "should be able to parse raw text" <|
group_builder.specify "should be able to parse raw text" <|
text1 = """
a,b,c
1,2,3
@ -424,19 +423,19 @@ spec =
t2.at "a" . to_vector . should_equal [1, 3]
t2.at "b" . to_vector . should_equal [2, 4]
Test.specify "should be able to read column names starting with #" <|
group_builder.specify "should be able to read column names starting with #" <|
reference_table = Table.new [["#", ["a", ";1", "5"]], ["x", [42, 2, 6]], ["y", ["c # comment??", "3", "7;comment?"]]]
table = Data.read (enso_project.data / "comments.csv")
table.should_equal reference_table
Test.specify "should be able to handle comments if enabled" <|
group_builder.specify "should be able to handle comments if enabled" <|
table_hash = Table.new [["a", [";1", "5"]], ["42", [2, 6]], ["c # comment??", ["3", "7;comment?"]]]
table_semicolon = Table.new [["#", ["a", "5"]], ["x", [42, 6]], ["y", ["c # comment??", "7;comment?"]]]
Data.read (enso_project.data / "comments.csv") (Delimited ',' . with_comments . with_headers) . should_equal table_hash
Data.read (enso_project.data / "comments.csv") (Delimited ',' . with_comments ';' . with_headers) . should_equal table_semicolon
Test.specify "should manage to parse a file containing null characters" pending="Parsing NULL character in CSV currently does not handle some edge cases. It may need to be revised. See issue https://github.com/enso-org/enso/issues/5655" <|
group_builder.specify "should manage to parse a file containing null characters" pending="Parsing NULL character in CSV currently does not handle some edge cases. It may need to be revised. See issue https://github.com/enso-org/enso/issues/5655" <|
f = enso_project.data / "transient" / "slash_zero.csv"
f.delete_if_exists
txt = 'a,b\n\0,\0\nx\0y,zw\na#b,c\0d'
@ -449,7 +448,7 @@ spec =
f.delete_if_exists
Test.specify "should allow to build the Delimited configuration using builders" <|
group_builder.specify "should allow to build the Delimited configuration using builders" <|
Delimited "," . clone . should_equal (Delimited ",")
Delimited "," encoding=Encoding.ascii skip_rows=123 row_limit=100 headers=False value_formatter=Nothing . clone . should_equal (Delimited "," headers=False value_formatter=Nothing skip_rows=123 row_limit=100 encoding=Encoding.ascii)
Delimited "," . clone quote_style=Quote_Style.No_Quotes headers=False value_formatter=Nothing . should_equal (Delimited "," headers=False value_formatter=Nothing quote_style=Quote_Style.No_Quotes)
@ -474,4 +473,8 @@ spec =
Delimited ',' comment_character='#' . without_comments . should_equal (Delimited ',' comment_character=Nothing)
Delimited ',' . with_line_endings Line_Ending_Style.Unix . should_equal (Delimited ',' line_endings=Line_Ending_Style.Unix)
main = Test_Suite.run_main spec
main =
suite = Test.build suite_builder->
add_specs suite_builder
suite.run_with_filter

View File

@ -6,8 +6,7 @@ import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
from Standard.Table import Table, Column, Data_Formatter, Quote_Style, Match_Columns, Delimited
from Standard.Table.Errors import all
from Standard.Test import Test, Test_Suite, Problems
import Standard.Test.Extensions
from Standard.Test_New import all
from project.Util import all
@ -22,10 +21,10 @@ join_lines lines trailing_newline=True =
eol = default_line_endings_for_new_files.to_text
if trailing_newline then lines.join eol suffix=eol else lines.join eol
spec =
add_specs suite_builder =
line_ending_pairs = [[Line_Ending_Style.Unix, '\n'], [Line_Ending_Style.Windows, '\r\n'], [Line_Ending_Style.Mac_Legacy, '\r']]
Test.group "Delimited File Writing" <|
Test.specify "should correctly write a simple table and return the written file object on success" <|
suite_builder.group "Delimited File Writing" group_builder->
group_builder.specify "should correctly write a simple table and return the written file object on success" <|
table = Table.new [["A", [1,2,3]], ["B", [1.0,1.5,2.2]], ["C", ["x","y","z"]], ["D", ["a", 2, My_Type.Value 10]]]
file = (enso_project.data / "transient" / "written.csv")
file.delete_if_exists
@ -39,7 +38,7 @@ spec =
text.should_equal expected_text
file.delete
Test.specify "should allow to specify line ending style" <|
group_builder.specify "should allow to specify line ending style" <|
table = Table.new [["a", ["b", "c"]], ["d", ["e", "f"]]]
lines = ["a,d", "b,e", "c,f"]
line_ending_pairs.each setting->
@ -51,7 +50,7 @@ spec =
text.should_equal (lines.join separator suffix=separator)
file.delete
Test.specify 'should quote values that contain the delimiter, newline or quotes, in the [,""] variant' <|
group_builder.specify 'should quote values that contain the delimiter, newline or quotes, in the [,""] variant' <|
data_formatter = Data_Formatter.Value decimal_point=","
table = Table.new [['The Column "Name"', ["foo","'bar'",'"baz"', 'one, two, three', 'a\nb']], ["Hello, Column?", [1.0, 1000000.5, 2.2, -1.5, 0.0]]]
file = (enso_project.data / "transient" / "quotes1.csv")
@ -69,7 +68,7 @@ spec =
text.should_equal expected_text
file.delete
Test.specify 'should quote values that contain the delimiter, newline or quotes, in the [;\\\"] variant' <|
group_builder.specify 'should quote values that contain the delimiter, newline or quotes, in the [;\\\"] variant' <|
data_formatter = Data_Formatter.Value thousand_separator="'"
table = Table.new [['"A"', ["foo",'!"baz" ', 'one, two, three', "a;b; c ", "a\b", 'n\nm']], ["B", [1000000.5, 1000.0, 0.0, -1.2, Nothing, 33]]]
file = (enso_project.data / "transient" / "quotes2.csv")
@ -88,7 +87,7 @@ spec =
text.should_equal expected_text
file.delete
Test.specify "should quote values that contain the delimiter, newline or quotes, in the [\t''] variant" <|
group_builder.specify "should quote values that contain the delimiter, newline or quotes, in the [\t''] variant" <|
data_formatter = Data_Formatter.Value thousand_separator="'"
table = Table.new [['"A"', [Nothing,"The 'thing'.", 'one, "two", three', 'a\tb', 'x\ny', 'w\vz']], ["B\C", [1000000.5, 1000.0, Nothing, -1.2, 2.0, 42.0]]]
file = (enso_project.data / "transient" / "quotes3.csv")
@ -107,7 +106,7 @@ spec =
text.should_equal expected_text
file.delete
Test.specify "should correctly distinguish empty text from a missing value" <|
group_builder.specify "should correctly distinguish empty text from a missing value" <|
table = Table.new [["A", [1,Nothing,3]], ["B", [Nothing,"","abc"]]]
file = (enso_project.data / "transient" / "empty_vs_null.csv")
file.delete_if_exists
@ -121,7 +120,7 @@ spec =
text.should_equal expected_text
file.delete
Test.specify "should quote values containing the comment symbol if comments are enabled" <|
group_builder.specify "should quote values containing the comment symbol if comments are enabled" <|
table = Table.new [["#", ['b', 'x', '#']], ["B", [Nothing,"#","abc"]]]
file = (enso_project.data / "transient" / "comments.csv")
file.delete_if_exists
@ -142,7 +141,7 @@ spec =
text_2.should_equal expected_text_2
file.delete
Test.specify 'should not quote values if quoting is disabled' <|
group_builder.specify 'should not quote values if quoting is disabled' <|
format = Delimited "," value_formatter=(Data_Formatter.Value decimal_point=",") . without_quotes
table = Table.new [['The Column "Name"', ["foo","'bar'",'"baz"', 'one, two, three']], ["Hello, Column?", [1.0, 1000000.5, 2.2, -1.5]]]
file = (enso_project.data / "transient" / "quote_disabled.csv")
@ -168,7 +167,7 @@ spec =
text.should_equal expected_text
file.delete
Test.specify 'should allow to always quote text and custom values, but for non-text primitives only if absolutely necessary' <|
group_builder.specify 'should allow to always quote text and custom values, but for non-text primitives only if absolutely necessary' <|
format = Delimited "," value_formatter=(Data_Formatter.Value thousand_separator='"' . with_datetime_formats date_formats=["dddd, d MMM y"]) . with_quotes always_quote=True quote_escape='\\'
table = Table.new [['The Column "Name"', ["foo","'bar'",'"baz"', 'one, two, three']], ["B", [1.0, 1000000.5, 2.2, -1.5]], ["C", ["foo", My_Type.Value 44, (Date.new 2022 06 21), 42]], ["D", [1,2,3,4000]], ["E", [Nothing, (Time_Of_Day.new 13 55), Nothing, Nothing]]]
file = (enso_project.data / "transient" / "quote_always.csv")
@ -184,7 +183,7 @@ spec =
text.should_equal expected_text
file.delete
Test.specify "should correctly handle alternative encodings" <|
group_builder.specify "should correctly handle alternative encodings" <|
table = Table.new [["ąęćś", [0]], ["ß", ["żółw 🐢"]]]
file = (enso_project.data / "transient" / "utf16.csv")
file.delete_if_exists
@ -196,7 +195,7 @@ spec =
text.should_equal expected_text
file.delete
Test.specify "should correctly handle encoding errors" <|
group_builder.specify "should correctly handle encoding errors" <|
table = Table.new [["A", [0, 1]], ["B", ["słówka", "🐢"]]]
file = (enso_project.data / "transient" / "ascii.csv")
file.delete_if_exists
@ -214,7 +213,7 @@ spec =
Problems.get_attached_warnings result . should_equal [Encoding_Error.Error msg]
file.delete
Test.specify "should allow only text columns if no formatter is specified" <|
group_builder.specify "should allow only text columns if no formatter is specified" <|
format = Delimited "," value_formatter=Nothing
table_1 = Table.new [["A", ["x", "y"]], ["B", ["z", "w"]]]
file_1 = (enso_project.data / "transient" / "textonly.csv")
@ -239,7 +238,7 @@ spec =
file_1.delete
file_2.delete
Test.specify "should create a new file in append mode if it didn't exist" <|
group_builder.specify "should create a new file in append mode if it didn't exist" <|
table = Table.new [["A", [1,2,3]], ["B", [1.0,1.5,2.2]], ["C", ["x","y","z"]]]
file = (enso_project.data / "transient" / "append_nonexistent.csv")
file.delete_if_exists
@ -248,7 +247,7 @@ spec =
got_table.should_equal table
file.delete
Test.specify "should correctly append to an empty file" <|
group_builder.specify "should correctly append to an empty file" <|
table = Table.new [["A", [1,2,3]], ["B", [1.0,1.5,2.2]], ["C", ["x","y","z"]]]
file = (enso_project.data / "transient" / "append_empty.csv")
file.delete_if_exists
@ -258,7 +257,7 @@ spec =
got_table.should_equal table
file.delete
Test.specify "should correctly append to a file with a missing newline at EOF" <|
group_builder.specify "should correctly append to a file with a missing newline at EOF" <|
table = Table.new [["A", [1,2,3]], ["B", [1.0,1.5,2.2]], ["C", ["x","y","z"]]]
file = (enso_project.data / "transient" / "append_missing_newline.csv")
file.delete_if_exists
@ -269,7 +268,7 @@ spec =
text.should_equal (expected_lines.join '\r' suffix='\r')
file.delete
Test.specify "should append to a file, matching columns by name (headers=Infer)" <|
group_builder.specify "should append to a file, matching columns by name (headers=Infer)" <|
existing_table = Table.new [["A", [1,2]], ["B", [1.0,1.5]], ["C", ["x","y"]]]
appending_table = Table.new [["B", [33,44]], ["A", [Nothing, 0]], ["C", ["a","BB"]]]
file = (enso_project.data / "transient" / "append_by_name.csv")
@ -281,7 +280,7 @@ spec =
got_table.should_equal expected_table
file.delete
Test.specify "should append to a file, matching columns by name (headers=True)" <|
group_builder.specify "should append to a file, matching columns by name (headers=True)" <|
existing_table = Table.new [["0", [1,2]], ["B1", [1.0,1.5]], ["C", ["x","y"]]]
appending_table = Table.new [["B1", [33,44]], ["0", [Nothing, 0]], ["C", ["a","BB"]]]
file = (enso_project.data / "transient" / "append_by_name_2.csv")
@ -294,7 +293,7 @@ spec =
got_table.should_equal expected_table
file.delete
Test.specify "should fail when appending and matching columns by name but column names are not available in the file (headers=Infer)" <|
group_builder.specify "should fail when appending and matching columns by name but column names are not available in the file (headers=Infer)" <|
existing_table = Table.new [["A", [1,2]], ["B", [1.0,1.5]], ["C", ["x","y"]]]
appending_table = Table.new [["B", [33,44]], ["A", [Nothing, 0]], ["C", ["a","BB"]]]
file = (enso_project.data / "transient" / "append_no_header.csv")
@ -304,7 +303,7 @@ spec =
appending_table.write file on_existing_file=Existing_File_Behavior.Append . should_fail_with Illegal_Argument
file.delete
Test.specify "should fail when appending and matching columns by name but headers are disabled (headers=False)" <|
group_builder.specify "should fail when appending and matching columns by name but headers are disabled (headers=False)" <|
existing_table = Table.new [["A", [1,2]], ["B", [1.0,1.5]], ["C", ["x","y"]]]
appending_table = Table.new [["B", [33,44]], ["A", [Nothing, 0]], ["C", ["a","BB"]]]
file = (enso_project.data / "transient" / "append_no_header.csv")
@ -314,7 +313,7 @@ spec =
appending_table.write file no_header_format on_existing_file=Existing_File_Behavior.Append . should_fail_with Illegal_Argument
file.delete
Test.specify "should fail on column mismatch when appending to a file by name" <|
group_builder.specify "should fail on column mismatch when appending to a file by name" <|
existing_table = Table.new [["A", [1,2]], ["B", [1.0,1.5]]]
appending_table = Table.new [["B", [33,44]], ["X", [Nothing, 0]]]
file = (enso_project.data / "transient" / "append_no_header.csv")
@ -327,7 +326,7 @@ spec =
result.catch.to_display_text . should_equal "Columns mismatch. Missing from new data: [A] Extras in new data: [X]"
file.delete
Test.specify "should append to a file, matching columns by position" <|
group_builder.specify "should append to a file, matching columns by position" <|
existing_table = Table.new [["A", [1,2]], ["B", [1.0,1.5]], ["C", ["x","y"]]]
appending_table = Table.new [["AA", [33,44]], ["...", [Nothing, 0]], ["hmmm", ["a","BB"]]]
@ -353,7 +352,7 @@ spec =
test_append initial_file_format=no_headers append_format=base_format expected_table_without_headers
test_append initial_file_format=no_headers append_format=no_headers expected_table_without_headers
Test.specify "should fail on column count mismatch when appending to a file by position" <|
group_builder.specify "should fail on column count mismatch when appending to a file by position" <|
existing_table = Table.new [["A", [1,2]], ["B", [1.0,1.5]], ["C", ["x","y"]]]
appending_table_1 = Table.new [["B", [33,44]], ["X", [Nothing, 0]]]
appending_table_2 = Table.new [["B", [33,44]], ["X", [Nothing, 0]], ["Y", ["a","BB"]], ["Z", [Nothing, 0]]]
@ -375,7 +374,7 @@ spec =
file.delete
Test.specify "should use the same line ending style as existing data when appending" <|
group_builder.specify "should use the same line ending style as existing data when appending" <|
initial_table = Table.new [["a", [1, 2]], ["d", ["e", "f"]]]
table_to_append = Table.new [["a", ["x", "y"]], ["d", ["z", "w"]]]
expected_lines = ["a,d", "1,e", "2,f", "x,z", "y,w"]
@ -389,7 +388,7 @@ spec =
text.should_equal (expected_lines.join separator suffix=separator)
file.delete
Test.specify "should use Unix line ending style when appending to an empty or nonexistent file" <|
group_builder.specify "should use Unix line ending style when appending to an empty or nonexistent file" <|
empty_file = (enso_project.data / "transient" / "empty.csv")
"".write empty_file
nonexistent_file = (enso_project.data / "transient" / "nonexistent.csv")
@ -404,7 +403,7 @@ spec =
Data.read_text empty_file . should_equal expected_text
Data.read_text nonexistent_file . should_equal expected_text
Test.specify "should use the existing line ending style when appending to a file consisting of only comments" <|
group_builder.specify "should use the existing line ending style when appending to a file consisting of only comments" <|
initial_lines = ["# comment 1", "# comment 2"]
table_to_append = Table.new [["a", ["x", "y"]], ["b", ["z", "w"]]]
expected_lines = initial_lines + ["a,b", "x,z", "y,w"]
@ -420,7 +419,7 @@ spec =
text.should_equal expected_text
file.delete
Test.specify "should use the existing line ending style when appending to a file consisting of only comments missing last EOL" <|
group_builder.specify "should use the existing line ending style when appending to a file consisting of only comments missing last EOL" <|
initial_lines = ["# comment 1", "# comment 2 without EOL"]
table_to_append = Table.new [["a", ["x", "y"]], ["b", ["z", "w"]]]
expected_lines = initial_lines + ["a,b", "x,z", "y,w"]
@ -436,7 +435,7 @@ spec =
text.should_equal expected_text
file.delete
Test.specify "should correctly handle append edge cases" <|
group_builder.specify "should correctly handle append edge cases" <|
table = Table.new [["a", [1, 2]]]
file = (enso_project.data / "transient" / "append_edge_cases.csv")
file.delete_if_exists
@ -507,7 +506,7 @@ spec =
text.should_equal expected_text
file.delete
Test.specify "should use the existing line ending style when appending to a file consisting of only one comment with EOL" <|
group_builder.specify "should use the existing line ending style when appending to a file consisting of only one comment with EOL" <|
initial_line = "# comment 1 with EOL"
table_to_append = Table.new [["a", ["x", "y"]], ["b", ["z", "w"]]]
expected_lines = [initial_line] + ["a,b", "x,z", "y,w"]
@ -523,7 +522,7 @@ spec =
text.should_equal expected_text
file.delete
Test.specify "should use the Unix line ending style when appending to a file consisting of only one comment and missing the EOL" <|
group_builder.specify "should use the Unix line ending style when appending to a file consisting of only one comment and missing the EOL" <|
initial_lines = ["# comment 1 without EOL"]
table_to_append = Table.new [["a", ["x", "y"]], ["b", ["z", "w"]]]
expected_lines = initial_lines + ["a,b", "x,z", "y,w"]
@ -537,7 +536,7 @@ spec =
text.should_equal expected_text
file.delete
Test.specify "should fail if explicitly provided line endings do not match line endings in the file when appending" <|
group_builder.specify "should fail if explicitly provided line endings do not match line endings in the file when appending" <|
initial_table = Table.new [["a", [1, 2]]]
table_to_append = Table.new [["a", ["x", "y"]]]
file = (enso_project.data / "transient" / "endings_mismatch.csv")
@ -548,7 +547,7 @@ spec =
result.catch.message . should_equal "The explicitly provided line endings ('\n') do not match the line endings in the file ('\r')."
file.delete
Test.specify "should fail if the target file is read-only" <|
group_builder.specify "should fail if the target file is read-only" <|
f = enso_project.data / "transient" / "permission.csv"
f.delete_if_exists
@ -566,7 +565,7 @@ spec =
set_writable f True
f.delete
Test.specify "should fail if the parent directory does not exist" <|
group_builder.specify "should fail if the parent directory does not exist" <|
parent = enso_project.data / "transient" / "nonexistent"
parent.exists.should_be_false
@ -576,7 +575,7 @@ spec =
r1.should_fail_with File_Error
r1.catch.should_be_a File_Error.Not_Found
Test.specify "should warn about not-encodable characters according to the problem behaviour" <|
group_builder.specify "should warn about not-encodable characters according to the problem behaviour" <|
f = enso_project.data / "transient" / "encoding-errors.csv"
format = Delimited "," encoding=Encoding.ascii headers=True
@ -602,4 +601,8 @@ spec =
f.read Plain_Text . should_equal "Initial Content"
f.delete
main = Test_Suite.run_main spec
main =
suite = Test.build suite_builder->
add_specs suite_builder
suite.run_with_filter

View File

@ -11,8 +11,8 @@ from Standard.Table import Table, Match_Columns, Excel, Excel_Range, Data_Format
from Standard.Table.Errors import Invalid_Column_Names, Duplicate_Output_Column_Names, Invalid_Location, Range_Exceeded, Existing_Data, Column_Count_Mismatch, Column_Name_Mismatch, Empty_Sheet_Error
from Standard.Test import Test, Test_Suite, Problems
import Standard.Test.Extensions
from Standard.Test_New import all
import Standard.Examples
@ -20,20 +20,20 @@ import project.Util
polyglot java import org.enso.table_test_helpers.RandomHelpers
spec_fmt header file read_method sheet_count=5 =
Test.group header <|
Test.specify "should read a workbook in" <|
spec_fmt suite_builder header file read_method sheet_count=5 =
suite_builder.group header group_builder->
group_builder.specify "should read a workbook in" <|
wb = read_method file
wb.sheet_count . should_equal sheet_count
Test.specify "should read the specified sheet by index and use correct headers" <|
group_builder.specify "should read the specified sheet by index and use correct headers" <|
t = read_method file (Excel (Worksheet 1))
t.columns.map .name . should_equal ['Name', 'Quantity', 'Price']
t.at 'Name' . to_vector . should_equal ['blouse', 't-shirt', 'trousers', 'shoes', 'skirt', 'dress']
t.at 'Quantity' . to_vector . should_equal [10, 20, Nothing, 30, Nothing, 5]
t.at 'Price' . to_vector . should_equal [22.3, 32, 43.2, 54, 31, Nothing]
Test.specify "should read the specified sheet by index and properly format a table" <|
group_builder.specify "should read the specified sheet by index and properly format a table" <|
t = read_method file (Excel (Worksheet 2) headers=False)
t.columns.map .name . should_equal ['A', 'B', 'C', 'D', 'E']
t.at 'A' . to_vector . should_equal [Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing]
@ -42,21 +42,21 @@ spec_fmt header file read_method sheet_count=5 =
t.at 'D' . to_vector . should_equal [Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing]
t.at 'E' . to_vector . should_equal [Nothing, Nothing, Nothing, Nothing, Nothing, 'foo', Nothing]
Test.specify "should read the specified sheet by name and properly handle dates" <|
group_builder.specify "should read the specified sheet by name and properly handle dates" <|
t = read_method file (Excel (Worksheet 'Dates'))
t.columns.map .name . should_equal ['Student Name', 'Enrolment Date']
t.at 'Enrolment Date' . map .day . to_vector . should_equal [2, 26, 4, 24, 31, 7]
Test.specify "should give an informative error when reading an empty table" <|
group_builder.specify "should give an informative error when reading an empty table" <|
t = read_method file (Excel (Worksheet "Empty"))
t.should_fail_with Empty_Sheet_Error
Test.specify "should gracefully handle duplicate column names and formulas" <|
group_builder.specify "should gracefully handle duplicate column names and formulas" <|
t = read_method file (Excel (Worksheet "Duplicate Columns"))
t.columns.map .name . should_equal ['Item', 'Price', 'Quantity', 'Price 1']
t.at 'Price 1' . to_vector . should_equal [20, 40, 0, 60, 0, 10]
Test.specify "should allow reading with cell range specified" <|
group_builder.specify "should allow reading with cell range specified" <|
t_1 = read_method file (Excel (Cell_Range "Simple!B:C"))
t_1.columns.map .name . should_equal ['Quantity', 'Price']
t_1.at 'Quantity' . to_vector . should_equal [10, 20, Nothing, 30, Nothing, 5]
@ -73,280 +73,271 @@ spec_fmt header file read_method sheet_count=5 =
t_3.at 'B' . to_vector . should_equal [Nothing, 30]
t_3.at 'C' . to_vector . should_equal [43.2, 54]
spec_write suffix test_sheet_name =
Test.group ("Write " + suffix + " Files") <|
type Spec_Write_Data
Value ~data counter suffix
table self = self.data.at 0
clothes self = self.data.at 1
sub_clothes self = self.data.at 2
setup suffix =
table = enso_project.data/'varied_column.csv' . read
clothes = enso_project.data/'clothes.csv' . read
sub_clothes = clothes.select_columns [0, 1]
counter = Ref.new 0
create_out =
i = counter.get + 1
counter.put i
f = enso_project.data / "transient" / ("out" + i.to_text + "." + suffix)
Spec_Write_Data.Value [table, clothes, sub_clothes] counter suffix
teardown self =
enso_project.data/"transient" . list "out*" . each .delete
create_out self =
i = self.counter.get + 1
self.counter.put i
f = enso_project.data / "transient" / ("out" + i.to_text + "." + self.suffix)
Panic.rethrow f.delete_if_exists
f
Test.specify 'should write a table to non-existent file as a new sheet with headers; and return the file object on success' <|
out = create_out
table.write out on_problems=Report_Error . should_succeed . should_equal out
spec_write suite_builder suffix test_sheet_name =
suite_builder.group ("Write " + suffix + " Files") group_builder->
data = Spec_Write_Data.setup suffix
group_builder.teardown <|
data.teardown
group_builder.specify 'should write a table to non-existent file as a new sheet with headers; and return the file object on success' <|
out = data.create_out
data.table.write out on_problems=Report_Error . should_succeed . should_equal out
written = out.read
written.sheet_count . should_equal 1
written.sheet_names . should_equal ['EnsoSheet']
written.read 'EnsoSheet' . should_equal table
written.read 'EnsoSheet' . should_equal data.table
written.close
out.delete_if_exists . should_succeed
Test.specify 'should write a table to non-existent file in append mode as a new sheet with headers' <|
out = create_out
table.write out on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed
group_builder.specify 'should write a table to non-existent file in append mode as a new sheet with headers' <|
out = data.create_out
data.table.write out on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed
written = out.read
written.sheet_count . should_equal 1
written.sheet_names . should_equal ['EnsoSheet']
written.read 'EnsoSheet' . should_equal table
written.read 'EnsoSheet' . should_equal data.table
written.close
out.delete_if_exists . should_succeed
Test.specify 'should write a table to existing file overriding EnsoSheet' <|
out = create_out
table.write out on_problems=Report_Error . should_succeed
table.write out on_problems=Report_Error . should_succeed
group_builder.specify 'should write a table to existing file overriding EnsoSheet' <|
out = data.create_out
data.table.write out on_problems=Report_Error . should_succeed
data.table.write out on_problems=Report_Error . should_succeed
written_workbook = out.read
written_workbook.sheet_count . should_equal 1
written_workbook.sheet_names . should_equal ['EnsoSheet']
written_workbook.read 'EnsoSheet' . should_equal table
written_workbook.read 'EnsoSheet' . should_equal data.table
written_workbook.close
out.delete_if_exists . should_succeed
Test.specify 'should write a table to existing file in overwrite mode as a new sheet with headers' <|
out = create_out
group_builder.specify 'should write a table to existing file in overwrite mode as a new sheet with headers' <|
out = data.create_out
(enso_project.data / test_sheet_name) . copy_to out
table.write out (Excel (Worksheet "Another")) on_existing_file=Existing_File_Behavior.Overwrite on_problems=Report_Error . should_succeed
data.table.write out (Excel (Worksheet "Another")) on_existing_file=Existing_File_Behavior.Overwrite on_problems=Report_Error . should_succeed
written = out.read (Excel (Worksheet "Another"))
written.should_equal table
out.delete_if_exists . should_succeed
written.should_equal data.table
Test.specify 'should write a table to existing file in overwrite mode as a new sheet without headers' <|
out = create_out
group_builder.specify 'should write a table to existing file in overwrite mode as a new sheet without headers' <|
out = data.create_out
(enso_project.data / test_sheet_name) . copy_to out
table.write out (Excel (Worksheet "NoHeaders")) on_existing_file=Existing_File_Behavior.Overwrite on_problems=Report_Error . should_succeed
data.table.write out (Excel (Worksheet "NoHeaders")) on_existing_file=Existing_File_Behavior.Overwrite on_problems=Report_Error . should_succeed
written = out.read (Excel (Worksheet "NoHeaders"))
written.should_equal (table.rename_columns ['A', 'B', 'C', 'D', 'E', 'F'])
out.delete_if_exists . should_succeed
written.should_equal (data.table.rename_columns ['A', 'B', 'C', 'D', 'E', 'F'])
Test.specify 'should create new sheets at the start if index is 0' <|
out = create_out
table.write out (Excel (Worksheet 0)) on_problems=Report_Error . should_succeed
clothes.write out (Excel (Worksheet 0)) on_problems=Report_Error . should_succeed
group_builder.specify 'should create new sheets at the start if index is 0' <|
out = data.create_out
data.table.write out (Excel (Worksheet 0)) on_problems=Report_Error . should_succeed
data.clothes.write out (Excel (Worksheet 0)) on_problems=Report_Error . should_succeed
read_1 = out.read (Excel (Worksheet "Sheet1"))
read_1 . should_equal table
read_1 . should_equal data.table
read_2 = out.read (Excel (Worksheet "Sheet2"))
read_2 . should_equal clothes
read_2 . should_equal data.clothes
read_3 = out.read (Excel (Sheet_Names))
read_3 . should_equal ["Sheet2", "Sheet1"]
out.delete_if_exists . should_succeed
Test.specify 'should write a table to specific single cell location of an existing sheet' <|
out = create_out
group_builder.specify 'should write a table to specific single cell location of an existing sheet' <|
out = data.create_out
(enso_project.data / test_sheet_name) . copy_to out
table.write out (Excel (Cell_Range "Another!G1")) on_problems=Report_Error . should_succeed
data.table.write out (Excel (Cell_Range "Another!G1")) on_problems=Report_Error . should_succeed
written = out.read (Excel (Cell_Range "Another!G1"))
written.should_equal table
out.delete_if_exists . should_succeed
written.should_equal data.table
Test.specify 'should clear out an existing fixed range and replace' <|
out = create_out
group_builder.specify 'should clear out an existing fixed range and replace' <|
out = data.create_out
(enso_project.data / test_sheet_name) . copy_to out
sub_clothes.write out (Excel (Cell_Range "Another!A1:D20")) on_problems=Report_Error . should_succeed
data.sub_clothes.write out (Excel (Cell_Range "Another!A1:D20")) on_problems=Report_Error . should_succeed
written = out.read (Excel (Cell_Range "Another!A1"))
written.should_equal sub_clothes
out.delete_if_exists . should_succeed
written.should_equal data.sub_clothes
Test.specify 'should clear out an existing range and replace' <|
out = create_out
group_builder.specify 'should clear out an existing range and replace' <|
out = data.create_out
(enso_project.data / test_sheet_name) . copy_to out
sub_clothes.write out (Excel (Cell_Range "Another!A1")) on_problems=Report_Error . should_succeed
data.sub_clothes.write out (Excel (Cell_Range "Another!A1")) on_problems=Report_Error . should_succeed
written = out.read (Excel (Cell_Range "Another!A1"))
written.should_equal sub_clothes
out.delete_if_exists . should_succeed
written.should_equal data.sub_clothes
Test.specify 'should result in Invalid_Location error if trying to write in a bad location' <|
out = create_out
group_builder.specify 'should result in Invalid_Location error if trying to write in a bad location' <|
out = data.create_out
(enso_project.data / test_sheet_name) . copy_to out
sub_clothes.write out (Excel (Cell_Range "DoesNotExist!A1")) . should_fail_with Invalid_Location
sub_clothes.write out (Excel (Cell_Range "DoesNotExist!A1:B2")) . should_fail_with Invalid_Location
sub_clothes.write out (Excel (Cell_Range "SillyRangeName")) . should_fail_with Invalid_Location
out.delete_if_exists . should_succeed
data.sub_clothes.write out (Excel (Cell_Range "DoesNotExist!A1")) . should_fail_with Invalid_Location
data.sub_clothes.write out (Excel (Cell_Range "DoesNotExist!A1:B2")) . should_fail_with Invalid_Location
data.sub_clothes.write out (Excel (Cell_Range "SillyRangeName")) . should_fail_with Invalid_Location
Test.specify 'should result in Range_Exceeded error if trying to write in too small a range' <|
out = create_out
group_builder.specify 'should result in Range_Exceeded error if trying to write in too small a range' <|
out = data.create_out
(enso_project.data / test_sheet_name) . copy_to out
sub_clothes.write out (Excel (Cell_Range "Another!A1:B2")) . should_fail_with Range_Exceeded
out.delete_if_exists . should_succeed
data.sub_clothes.write out (Excel (Cell_Range "Another!A1:B2")) . should_fail_with Range_Exceeded
Test.specify 'should result in Existing_Data error if in Error mode and trying to replace' <|
out = create_out
group_builder.specify 'should result in Existing_Data error if in Error mode and trying to replace' <|
out = data.create_out
(enso_project.data / test_sheet_name) . copy_to out
lmd = out.last_modified_time
r1 = sub_clothes.write out (Excel (Worksheet 1)) on_existing_file=Existing_File_Behavior.Error
r1 = data.sub_clothes.write out (Excel (Worksheet 1)) on_existing_file=Existing_File_Behavior.Error
r1.should_fail_with File_Error
r1.catch.should_be_a File_Error.Already_Exists
sub_clothes.write out (Excel (Worksheet "Another")) on_existing_file=Existing_File_Behavior.Error . should_fail_with File_Error
sub_clothes.write out (Excel (Cell_Range "Another!A1")) on_existing_file=Existing_File_Behavior.Error . should_fail_with File_Error
sub_clothes.write out (Excel (Cell_Range "Sheet1!A9")) on_existing_file=Existing_File_Behavior.Error . should_fail_with File_Error
data.sub_clothes.write out (Excel (Worksheet "Another")) on_existing_file=Existing_File_Behavior.Error . should_fail_with File_Error
data.sub_clothes.write out (Excel (Cell_Range "Another!A1")) on_existing_file=Existing_File_Behavior.Error . should_fail_with File_Error
data.sub_clothes.write out (Excel (Cell_Range "Sheet1!A9")) on_existing_file=Existing_File_Behavior.Error . should_fail_with File_Error
Test.with_clue "the original file should remain unmodified: " <|
out.last_modified_time.should_equal lmd
out.delete_if_exists . should_succeed
Test.specify 'should not allow adding a new sheet if in Error mode, even if sheet is not clashing' <|
out = create_out
group_builder.specify 'should not allow adding a new sheet if in Error mode, even if sheet is not clashing' <|
out = data.create_out
(enso_project.data / test_sheet_name) . copy_to out
lmd = out.last_modified_time
result = sub_clothes.write out (Excel (Worksheet "Testing")) on_existing_file=Existing_File_Behavior.Error
result = data.sub_clothes.write out (Excel (Worksheet "Testing")) on_existing_file=Existing_File_Behavior.Error
result.should_fail_with File_Error
result.catch.should_be_a File_Error.Already_Exists
Test.with_clue "the original file should remain unmodified: " <|
out.last_modified_time.should_equal lmd
out.delete_if_exists . should_succeed
Test.specify 'should write a table to non-existent file as a new sheet without headers' <|
out = create_out
table.write out (Excel (Worksheet "Sheet1") headers=False) on_problems=Report_Error . should_succeed
group_builder.specify 'should write a table to non-existent file as a new sheet without headers' <|
out = data.create_out
data.table.write out (Excel (Worksheet "Sheet1") headers=False) on_problems=Report_Error . should_succeed
written = out.read
written.sheet_count . should_equal 1
written.sheet_names . should_equal ['Sheet1']
written.read 'Sheet1' . should_equal (table.rename_columns ['A', 'B', 'C', 'D', 'E', 'F'])
written.read 'Sheet1' . should_equal (data.table.rename_columns ['A', 'B', 'C', 'D', 'E', 'F'])
# We need to close the workbook to be able to delete it.
written.close
out.delete_if_exists . should_succeed
Test.specify 'should be able to append to a sheet by name' <|
out = create_out
group_builder.specify 'should be able to append to a sheet by name' <|
out = data.create_out
(enso_project.data / test_sheet_name) . copy_to out
extra_another = Table.new [['AA', ['d', 'e']], ['BB',[4, 5]], ['CC',[True, False]], ['DD', ['2022-01-20', '2022-01-21']]]
expected = Table.new [['AA', ['a','b','c','d', 'e']], ['BB',[1,2,3,4,5]], ['CC',[True, False, False, True, False]]]
extra_another.write out (Excel (Worksheet "Another")) on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed
written = out.read (Excel (Worksheet "Another")) . select_columns [0, 1, 2]
written.should_equal expected
out.delete_if_exists . should_succeed
Test.specify 'should be able to append to a sheet by position' <|
out = create_out
group_builder.specify 'should be able to append to a sheet by position' <|
out = data.create_out
(enso_project.data / test_sheet_name) . copy_to out
extra_another = Table.new [['A', ['d', 'e']], ['B',[4, 5]], ['C',[True, False]], ['D', ['2022-01-20', '2022-01-21']]]
expected = Table.new [['AA', ['a','b','c','d', 'e']], ['BB',[1,2,3,4,5]], ['CC',[True, False, False, True, False]]]
extra_another.write out (Excel (Worksheet "Another")) on_existing_file=Existing_File_Behavior.Append match_columns=Match_Columns.By_Position on_problems=Report_Error . should_succeed
written = out.read (Excel (Worksheet "Another")) . select_columns [0, 1, 2]
written.should_equal expected
out.delete_if_exists . should_succeed
Test.specify 'should be able to append to a sheet by name out of order' <|
out = create_out
group_builder.specify 'should be able to append to a sheet by name out of order' <|
out = data.create_out
(enso_project.data / test_sheet_name) . copy_to out
extra_another = Table.new [['CC',[True, False]], ['BB',[4, 5]], ['AA', ['d', 'e']], ['DD', ['2022-01-20', '2022-01-21']]]
expected = Table.new [['AA', ['a','b','c','d', 'e']], ['BB',[1,2,3,4,5]], ['CC',[True, False, False, True, False]]]
extra_another.write out (Excel (Worksheet "Another")) on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed
written = out.read (Excel (Worksheet "Another")) . select_columns [0, 1, 2]
written.should_equal expected
out.delete_if_exists . should_succeed
Test.specify 'should be able to append to a single cell by name' <|
out = create_out
group_builder.specify 'should be able to append to a single cell by name' <|
out = data.create_out
(enso_project.data / test_sheet_name) . copy_to out
extra_another = Table.new [['AA', ['d', 'e']], ['BB',[4, 5]], ['CC',[True, False]], ['DD', ['2022-01-20', '2022-01-21']]]
expected = Table.new [['AA', ['a','b','c','d', 'e']], ['BB',[1,2,3,4,5]], ['CC',[True, False, False, True, False]]]
extra_another.write out (Excel (Cell_Range "Another!A1")) on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed
written = out.read (Excel (Worksheet "Another")) . select_columns [0, 1, 2]
written.should_equal expected
out.delete_if_exists . should_succeed
Test.specify 'should be able to append to a single cell by position' <|
out = create_out
group_builder.specify 'should be able to append to a single cell by position' <|
out = data.create_out
(enso_project.data / test_sheet_name) . copy_to out
extra_another = Table.new [['A', ['d', 'e']], ['B',[4, 5]], ['C',[True, False]], ['D', ['2022-01-20', '2022-01-21']]]
expected = Table.new [['AA', ['a','b','c','d', 'e']], ['BB',[1,2,3,4,5]], ['CC',[True, False, False, True, False]]]
extra_another.write out (Excel (Cell_Range "Another!A1")) on_existing_file=Existing_File_Behavior.Append match_columns=Match_Columns.By_Position on_problems=Report_Error . should_succeed
written = out.read (Excel (Worksheet "Another")) . select_columns [0, 1, 2]
written.should_equal expected
out.delete_if_exists . should_succeed
Test.specify 'should be able to append to a single cell by name out of order' <|
out = create_out
group_builder.specify 'should be able to append to a single cell by name out of order' <|
out = data.create_out
(enso_project.data / test_sheet_name) . copy_to out
extra_another = Table.new [['CC',[True, False]], ['BB',[4, 5]], ['AA', ['d', 'e']], ['DD', ['2022-01-20', '2022-01-21']]]
expected = Table.new [['AA', ['a','b','c','d', 'e']], ['BB',[1,2,3,4,5]], ['CC',[True, False, False, True, False]]]
extra_another.write out (Excel (Cell_Range "Another!A1")) on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed
written = out.read (Excel (Worksheet "Another")) . select_columns [0, 1, 2]
written.should_equal expected
out.delete_if_exists . should_succeed
Test.specify 'should be able to append to a range by name' <|
out = create_out
group_builder.specify 'should be able to append to a range by name' <|
out = data.create_out
(enso_project.data / test_sheet_name) . copy_to out
extra_another = Table.new [['AA', ['d', 'e']], ['BB', [4, 5]], ['CC', [True, False]], ['DD', ['2022-01-20', '2022-01-21']]]
expected = Table.new [['AA', ['a', 'b', 'c', 'd', 'e']], ['BB', [1, 2, 3, 4, 5]], ['CC', [True, False, False, True, False]]]
extra_another.write out (Excel (Cell_Range "Another!A1:D6")) on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed
written = out.read (Excel (Worksheet "Another")) . select_columns [0, 1, 2]
written.should_equal expected
out.delete_if_exists . should_succeed
Test.specify 'should be able to append to a range by position' <|
out = create_out
group_builder.specify 'should be able to append to a range by position' <|
out = data.create_out
(enso_project.data / test_sheet_name) . copy_to out
extra_another = Table.new [['A', ['d', 'e']], ['B',[4, 5]], ['C',[True, False]], ['D', ['2022-01-20', '2022-01-21']]]
expected = Table.new [['AA', ['a','b','c','d', 'e']], ['BB',[1,2,3,4,5]], ['CC',[True, False, False, True, False]]]
extra_another.write out (Excel (Cell_Range "Another!A1:D6")) on_existing_file=Existing_File_Behavior.Append match_columns=Match_Columns.By_Position on_problems=Report_Error . should_succeed
written = out.read (Excel (Worksheet "Another")) . select_columns [0, 1, 2]
written.should_equal expected
out.delete_if_exists . should_succeed
Test.specify 'should be able to append to a range by name not in top left' <|
out = create_out
group_builder.specify 'should be able to append to a range by name not in top left' <|
out = data.create_out
(enso_project.data / test_sheet_name) . copy_to out
extra_another = Table.new [['AA', ['d', 'e']], ['BB',[4, 5]], ['CC',[True, False]], ['DD', ['2022-01-20', '2022-01-21']]]
expected = Table.new [['AA', ['f', 'g', 'h', 'd', 'e']], ['BB',[1, 2, 3, 4, 5]], ['CC',[True, False, False, True, False]]]
extra_another.write out (Excel (Cell_Range "Random!K9")) on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed
written = out.read (Excel (Cell_Range "Random!K9")) . select_columns [0, 1, 2]
written.should_equal expected
out.delete_if_exists . should_succeed
Test.specify 'should be able to append to a range by name after deduplication of names' <|
out = create_out
group_builder.specify 'should be able to append to a range by name after deduplication of names' <|
out = data.create_out
(enso_project.data / test_sheet_name) . copy_to out
extra_another = Table.new [['AA', ['d', 'e']], ['BB',[4, 5]], ['AA 1',[True, False]], ['BB 1', ['2022-01-20', '2022-01-21']]]
expected = Table.new [['AA', ['f', 'g', 'h', 'd', 'e']], ['BB',[1, 2, 3, 4, 5]], ['AA 1',[True, False, False, True, False]]]
extra_another.write out (Excel (Cell_Range "Random!S3")) on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed
written = out.read (Excel (Cell_Range "Random!S3")) . select_columns [0, 1, 2]
written.should_equal expected
out.delete_if_exists . should_succeed
Test.specify 'should be able to append to a range by position not in top left' <|
out = create_out
group_builder.specify 'should be able to append to a range by position not in top left' <|
out = data.create_out
(enso_project.data / test_sheet_name) . copy_to out
extra_another = Table.new [['A', ['d', 'e']], ['B',[4, 5]], ['C',[True, False]], ['D', ['2022-01-20', '2022-01-21']]]
expected = Table.new [['AA', ['f', 'g', 'h', 'd', 'e']], ['BB',[1, 2, 3, 4, 5]], ['CC',[True, False, False, True, False]]]
extra_another.write out (Excel (Cell_Range "Random!K9")) on_existing_file=Existing_File_Behavior.Append match_columns=Match_Columns.By_Position on_problems=Report_Error . should_succeed
written = out.read (Excel (Cell_Range "Random!K9")) . select_columns [0, 1, 2]
written.should_equal expected
out.delete_if_exists . should_succeed
Test.specify 'should be able to append to a range by name out of order' <|
out = create_out
group_builder.specify 'should be able to append to a range by name out of order' <|
out = data.create_out
(enso_project.data / test_sheet_name) . copy_to out
extra_another = Table.new [['CC',[True, False]], ['BB',[4, 5]], ['AA', ['d', 'e']], ['DD', ['2022-01-20', '2022-01-21']]]
expected = Table.new [['AA', ['a','b','c','d', 'e']], ['BB',[1,2,3,4,5]], ['CC',[True, False, False, True, False]]]
extra_another.write out (Excel (Cell_Range "Another!A1:D6")) on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed
written = out.read (Excel (Worksheet "Another")) . select_columns [0, 1, 2]
written.should_equal expected
out.delete_if_exists . should_succeed
Test.specify 'should be able to write to a new dry run file' <|
out = create_out
group_builder.specify 'should be able to write to a new dry run file' <|
out = data.create_out
temp = Context.Output.with_disabled <|
result = table.write out on_problems=Report_Error . should_succeed
result = data.table.write out on_problems=Report_Error . should_succeed
Problems.expect_only_warning Dry_Run_Operation result
result.exists.should_be_true
@ -355,16 +346,16 @@ spec_write suffix test_sheet_name =
written = result.read
written.sheet_count . should_equal 1
written.sheet_names . should_equal ['EnsoSheet']
written.read 'EnsoSheet' . should_equal table
written.read 'EnsoSheet' . should_equal data.table
written.close
result
temp.delete_if_exists
Test.specify "should be able to write to a dry-run file, even if the dry-run workbook is open" <|
out = create_out
group_builder.specify "should be able to write to a dry-run file, even if the dry-run workbook is open" <|
out = data.create_out
out.exists.should_be_false
temp = Context.Output.with_disabled <|
result = table.write out on_problems=Report_Error . should_succeed
result = data.table.write out on_problems=Report_Error . should_succeed
Problems.expect_only_warning Dry_Run_Operation result
result.exists.should_be_true
result
@ -375,7 +366,7 @@ spec_write suffix test_sheet_name =
opened_temp.sheet_names . should_equal ['EnsoSheet']
temp2 = Context.Output.with_disabled <|
result = table.write out (Excel (Worksheet "Another")) on_problems=Report_Error . should_succeed
result = data.table.write out (Excel (Worksheet "Another")) on_problems=Report_Error . should_succeed
Problems.expect_only_warning Dry_Run_Operation result
result.exists.should_be_true
result
@ -392,10 +383,10 @@ spec_write suffix test_sheet_name =
opened_temp.close
temp.delete_if_exists
Test.specify "should be able to write to a dry-run file multiple times if the dry-run file object is threaded through" <|
out = create_out
group_builder.specify "should be able to write to a dry-run file multiple times if the dry-run file object is threaded through" <|
out = data.create_out
temp1 = Context.Output.with_disabled <|
result = table.write out on_problems=Report_Error . should_succeed
result = data.table.write out on_problems=Report_Error . should_succeed
Problems.expect_only_warning Dry_Run_Operation result
result.exists.should_be_true
result
@ -405,7 +396,7 @@ spec_write suffix test_sheet_name =
opened_temp.sheet_names . should_equal ['EnsoSheet']
temp2 = Context.Output.with_disabled <|
result = table.write temp1 (Excel (Worksheet "Another")) on_problems=Report_Error . should_succeed
result = data.table.write temp1 (Excel (Worksheet "Another")) on_problems=Report_Error . should_succeed
Problems.expect_only_warning Dry_Run_Operation result
result.exists.should_be_true
result
@ -419,8 +410,8 @@ spec_write suffix test_sheet_name =
opened_temp.close
temp1.delete_if_exists
Test.specify "should be able to create a backup, even if it is currently open" <|
out = create_out
group_builder.specify "should be able to create a backup, even if it is currently open" <|
out = data.create_out
bak = out.parent / (out.name+".bak")
t1 = Table.new [["X", [1]]]
@ -448,92 +439,83 @@ spec_write suffix test_sheet_name =
opened_out.close
opened_backup.close
out.delete_if_exists . should_succeed
bak.delete_if_exists . should_succeed
Test.specify 'should be able to write to an existing empty file' <|
out = create_out
group_builder.specify 'should be able to write to an existing empty file' <|
out = data.create_out
[].write_bytes out
out_bak = out.parent / (out.name+".bak")
table.write out on_problems=Report_Error . should_succeed . should_equal out
data.table.write out on_problems=Report_Error . should_succeed . should_equal out
written = out.read
written.sheet_count . should_equal 1
written.sheet_names . should_equal ['EnsoSheet']
written.read 'EnsoSheet' . should_equal table
written.read 'EnsoSheet' . should_equal data.table
Test.with_clue "should have created a backup file: " <|
out_bak.exists.should_be_true
out_bak.size.should_equal 0
written.close
out.delete_if_exists . should_succeed
out_bak.delete_if_exists . should_succeed
Test.specify 'should fail to append to a sheet by name if missing columns' <|
out = create_out
group_builder.specify 'should fail to append to a sheet by name if missing columns' <|
out = data.create_out
(enso_project.data / test_sheet_name) . copy_to out
extra_another = Table.new [['CC',[True, False]], ['BB',[4, 5]], ['AA', ['d', 'e']]]
extra_another.write out (Excel (Worksheet "Another")) on_existing_file=Existing_File_Behavior.Append . should_fail_with Column_Name_Mismatch
out.delete_if_exists . should_succeed
Test.specify 'should fail to append to a sheet by name if extra columns' <|
out = create_out
group_builder.specify 'should fail to append to a sheet by name if extra columns' <|
out = data.create_out
(enso_project.data / test_sheet_name) . copy_to out
lmd = out.last_modified_time
extra_another = Table.new [['CC',[True, False]], ['BB',[4, 5]], ['AA', ['d', 'e']], ['DD', ['2022-01-20', '2022-01-21']], ['EE', ['2022-01-20', '2022-01-21']]]
extra_another.write out (Excel (Worksheet "Another")) on_existing_file=Existing_File_Behavior.Append . should_fail_with Column_Name_Mismatch
out.last_modified_time.should_equal lmd
out.delete_if_exists . should_succeed
Test.specify 'should fail to append to a sheet by name if no headers' <|
out = create_out
group_builder.specify 'should fail to append to a sheet by name if no headers' <|
out = data.create_out
(enso_project.data / test_sheet_name) . copy_to out
lmd = out.last_modified_time
extra_another = Table.new [['CC',[True, False]], ['BB',[4, 5]], ['AA', ['d', 'e']], ['DD', ['2022-01-20', '2022-01-21']], ['EE', ['2022-01-20', '2022-01-21']]]
extra_another.write out (Excel (Worksheet "NoHeaders")) on_existing_file=Existing_File_Behavior.Append . should_fail_with Illegal_Argument
extra_another.write out (Excel (Worksheet "Another") headers=False) on_existing_file=Existing_File_Behavior.Append . should_fail_with Illegal_Argument
out.last_modified_time.should_equal lmd
out.delete_if_exists . should_succeed
Test.specify 'should fail to append to a sheet by position if too few columns' <|
out = create_out
group_builder.specify 'should fail to append to a sheet by position if too few columns' <|
out = data.create_out
(enso_project.data / test_sheet_name) . copy_to out
lmd = out.last_modified_time
extra_another = Table.new [['CC',[True, False]], ['BB',[4, 5]], ['AA', ['d', 'e']]]
extra_another.write out (Excel (Worksheet "Another")) on_existing_file=Existing_File_Behavior.Append match_columns=Match_Columns.By_Position . should_fail_with Column_Count_Mismatch
out.last_modified_time.should_equal lmd
out.delete_if_exists . should_succeed
Test.specify 'should fail to append to a sheet by position if too many columns' <|
out = create_out
group_builder.specify 'should fail to append to a sheet by position if too many columns' <|
out = data.create_out
(enso_project.data / test_sheet_name) . copy_to out
lmd = out.last_modified_time
extra_another = Table.new [['CC',[True, False]], ['BB',[4, 5]], ['AA', ['d', 'e']], ['DD', ['2022-01-20', '2022-01-21']], ['EE', ['2022-01-20', '2022-01-21']]]
extra_another.write out (Excel (Worksheet "Another")) on_existing_file=Existing_File_Behavior.Append match_columns=Match_Columns.By_Position . should_fail_with Column_Count_Mismatch
out.last_modified_time.should_equal lmd
out.delete_if_exists . should_succeed
Test.specify 'should fail to append to a range by name if not large enough' <|
out = create_out
group_builder.specify 'should fail to append to a range by name if not large enough' <|
out = data.create_out
(enso_project.data / test_sheet_name) . copy_to out
lmd = out.last_modified_time
extra_another = Table.new [['AA', ['d', 'e']], ['BB',[4, 5]], ['CC',[True, False]], ['DD', ['2022-01-20', '2022-01-21']]]
extra_another.write out (Excel (Cell_Range "Another!A1:D5")) on_existing_file=Existing_File_Behavior.Append . should_fail_with Range_Exceeded
out.last_modified_time.should_equal lmd
out.delete_if_exists . should_succeed
Test.specify 'should fail to append to a range by name if it hits another table' <|
out = create_out
group_builder.specify 'should fail to append to a range by name if it hits another table' <|
out = data.create_out
(enso_project.data / test_sheet_name) . copy_to out
lmd = out.last_modified_time
extra_another = Table.new [['AA', ['d', 'e']], ['BB',[4, 5]], ['CC',[True, False]], ['DD', ['2022-01-20', '2022-01-21']]]
extra_another.write out (Excel (Cell_Range "Random!B3")) on_existing_file=Existing_File_Behavior.Append . should_fail_with Existing_Data
out.last_modified_time.should_equal lmd
out.delete_if_exists . should_succeed
Test.specify "should fail if the target file is read-only" <|
group_builder.specify "should fail if the target file is read-only" <|
f = enso_project.data / "transient" / "permission."+suffix
if f.exists then Util.set_writable f True
f.delete_if_exists
@ -558,9 +540,9 @@ spec_write suffix test_sheet_name =
Util.set_writable f True
f.delete
Test.specify "should allow to write to a workbook that is open, and reflect that changes when the sheet is read again" <|
out = create_out
table.write out on_problems=Report_Error . should_succeed
group_builder.specify "should allow to write to a workbook that is open, and reflect that changes when the sheet is read again" <|
out = data.create_out
data.table.write out on_problems=Report_Error . should_succeed
workbook = out.read (Excel headers=True)
workbook.sheet_names.should_equal ["EnsoSheet"]
@ -570,7 +552,7 @@ spec_write suffix test_sheet_name =
w2 = out.read (Excel headers=True)
t1 = workbook.read "EnsoSheet"
t1.should_equal table
t1.should_equal data.table
[Existing_File_Behavior.Backup, Existing_File_Behavior.Overwrite].each behavior-> Test.with_clue behavior.to_text+": " <|
t2 = Table.new [["X", [behavior.to_text, "B", "C", behavior.to_text+"..."]]]
@ -587,9 +569,8 @@ spec_write suffix test_sheet_name =
workbook.close
w2.close
out.delete_if_exists . should_succeed
Test.specify "should fail if the parent directory does not exist" <|
group_builder.specify "should fail if the parent directory does not exist" <|
parent = enso_project.data / "transient" / "nonexistent"
parent.exists.should_be_false
@ -600,7 +581,7 @@ spec_write suffix test_sheet_name =
r1.should_fail_with File_Error
r1.catch.should_be_a File_Error.Not_Found
Test.specify "should allow to write and read-back Unicode characters" <|
group_builder.specify "should allow to write and read-back Unicode characters" <|
encodings = enso_project.data / "transient" / "encodings."+suffix
encodings.delete_if_exists . should_succeed
@ -610,7 +591,7 @@ spec_write suffix test_sheet_name =
t2.at "A" . to_vector . should_equal ["A", "B", "😊", "D"]
encodings.delete
Test.specify "should be able to overwrite a pre-existing empty file" <|
group_builder.specify "should be able to overwrite a pre-existing empty file" <|
empty = enso_project.data / "transient" / "empty."+suffix
[Existing_File_Behavior.Backup, Existing_File_Behavior.Overwrite, Existing_File_Behavior.Append].each behavior-> Test.with_clue behavior.to_text+": " <|
empty.delete_if_exists . should_succeed
@ -625,8 +606,7 @@ spec_write suffix test_sheet_name =
t2 = empty.read (Excel (Worksheet "EnsoSheet"))
t2.should_equal t1
spec =
Test.group 'Excel Range' <|
check_range excel_range sheet_name tlbr_vector single_cell=False =
excel_range.sheet_name . should_equal sheet_name
excel_range.top_row . should_equal (tlbr_vector.at 0)
@ -635,7 +615,10 @@ spec =
excel_range.right_column . should_equal (tlbr_vector.at 3)
excel_range.is_single_cell . should_equal single_cell
Test.specify 'should be able to parse A1 format' <|
add_specs suite_builder =
suite_builder.group 'Excel Range' group_builder->
group_builder.specify 'should be able to parse A1 format' <|
check_range (Excel_Range.from_address "Test!EE4") 'Test' [4, 135, 4, 135] True
check_range (Excel_Range.from_address "Test!EE4:EE4") 'Test' [4, 135, 4, 135]
check_range (Excel_Range.from_address "Test!A1:D5") 'Test' [1, 1, 5, 4]
@ -650,15 +633,15 @@ spec =
check_range (Excel_Range.from_address "Test!$CB") 'Test' [Nothing, 80, Nothing, 80]
check_range (Excel_Range.from_address "Test!$DD:$XAZ") 'Test' [Nothing, 108, Nothing, 16276]
Test.specify 'should be able to parse RC format' <|
group_builder.specify 'should be able to parse RC format' <|
check_range (Excel_Range.from_address "Test!R1C1") 'Test' [1, 1, 1, 1] True
check_range (Excel_Range.from_address "Test!R1C1:R5C3") 'Test' [1, 1, 5, 3]
Test.specify 'should fail gracefully for invalid patterns' <|
group_builder.specify 'should fail gracefully for invalid patterns' <|
Excel_Range.from_address "Test!$$QA1" . should_fail_with Illegal_Argument
Excel_Range.from_address "Test!BADADDRESS" . should_fail_with Illegal_Argument
Test.specify 'should allow Range creation for a cell' <|
group_builder.specify 'should allow Range creation for a cell' <|
check_range (Excel_Range.for_cell "Hello World" 123 14) 'Hello World' [14, 123, 14, 123] True
check_range (Excel_Range.for_cell "Hello World" "DS" 14) 'Hello World' [14, 123, 14, 123] True
Excel_Range.for_cell "Test" 123 14 . address . should_equal "Test!DS14"
@ -669,7 +652,7 @@ spec =
Excel_Range.for_cell "Test" 1 10000000 . should_fail_with Illegal_Argument
Excel_Range.for_cell "Test" 1 0 . should_fail_with Illegal_Argument
Test.specify 'should allow Range creation for a range' <|
group_builder.specify 'should allow Range creation for a range' <|
check_range (Excel_Range.for_range "Hello World" 55 120 123 14) 'Hello World' [14, 55, 120, 123]
check_range (Excel_Range.for_range "Hello World" "BC" 120 "DS" 14) 'Hello World' [14, 55, 120, 123]
Excel_Range.for_range "Test" 55 120 123 14 . address . should_equal "Test!BC14:DS120"
@ -684,7 +667,7 @@ spec =
Excel_Range.for_range "Test" 5 1 123 0 . should_fail_with Illegal_Argument
Excel_Range.for_range "Test" 5 1 123 10000000 . should_fail_with Illegal_Argument
Test.specify 'should allow Range creation for a column' <|
group_builder.specify 'should allow Range creation for a column' <|
check_range (Excel_Range.for_columns "Hello World" 123) 'Hello World' [Nothing, 123, Nothing, 123]
check_range (Excel_Range.for_columns "Hello World" "DS") 'Hello World' [Nothing, 123, Nothing, 123]
Excel_Range.for_columns "Test" 123 . address . should_equal "Test!DS"
@ -693,7 +676,7 @@ spec =
Excel_Range.for_columns "Test" "ZZZ" . should_fail_with Illegal_Argument
Excel_Range.for_columns "Test" 0 . should_fail_with Illegal_Argument
Test.specify 'should allow Range creation for columns' <|
group_builder.specify 'should allow Range creation for columns' <|
check_range (Excel_Range.for_columns "Hello World" "BC" 123) 'Hello World' [Nothing, 55, Nothing, 123]
check_range (Excel_Range.for_columns "Hello World" 55 "DS") 'Hello World' [Nothing, 55, Nothing, 123]
Excel_Range.for_columns "Test" 55 123 . address . should_equal "Test!BC:DS"
@ -702,14 +685,14 @@ spec =
Excel_Range.for_columns "Test" 55 "ZZZ" . should_fail_with Illegal_Argument
Excel_Range.for_columns "Test" 55 0 . should_fail_with Illegal_Argument
Test.specify 'should allow Range creation for a row' <|
group_builder.specify 'should allow Range creation for a row' <|
check_range (Excel_Range.for_rows "Hello World" 123) 'Hello World' [123, Nothing, 123, Nothing]
Excel_Range.for_rows "Test" 123 . address . should_equal "Test!123"
Excel_Range.for_rows "Hello World" 123 . address . should_equal "'Hello World'!123"
Excel_Range.for_rows "Test" 20000000 . should_fail_with Illegal_Argument
Excel_Range.for_rows "Test" 0 . should_fail_with Illegal_Argument
Test.specify 'should allow Range creation for rows' <|
group_builder.specify 'should allow Range creation for rows' <|
check_range (Excel_Range.for_rows "Hello World" 55 123) 'Hello World' [55, Nothing, 123, Nothing]
Excel_Range.for_rows "Test" 55 123 . address . should_equal "Test!55:123"
Excel_Range.for_rows "Hello World" 55 123 . address . should_equal "'Hello World'!55:123"
@ -749,8 +732,8 @@ spec =
workbook.sheet_count . should_equal sheets
workbook.named_ranges_count . should_equal ranges
Test.group "Read XLSX / XLS Files" <|
Test.specify "should let you read the workbook with Auto_Detect" <|
suite_builder.group "Read XLSX / XLS Files" group_builder->
group_builder.specify "should let you read the workbook with Auto_Detect" <|
check_workbook <| xlsx_sheet.read
check_workbook <| Data.read xlsx_sheet
check_workbook <| Data.read xlsx_path
@ -759,7 +742,7 @@ spec =
check_workbook <| Data.read xls_sheet
check_workbook <| Data.read xls_path
Test.specify "should let you read the workbook with Excel" <|
group_builder.specify "should let you read the workbook with Excel" <|
check_workbook <| xlsx_sheet.read Excel
check_workbook <| Data.read xlsx_sheet Excel
check_workbook <| Data.read xlsx_path Excel
@ -768,7 +751,7 @@ spec =
check_workbook <| Data.read xls_sheet Excel
check_workbook <| Data.read xls_path Excel
Test.specify "workbook should look like a database connection" <|
group_builder.specify "workbook should look like a database connection" <|
workbook = xlsx_sheet.read
workbook.database . should_equal xlsx_sheet.normalize.path
@ -786,17 +769,17 @@ spec =
workbook.tables "%not%" . row_count . should_equal 1
workbook.tables "%not%" . at 'Name' . to_vector . should_equal ["Another"]
Test.specify "should let you read the sheet names" <|
group_builder.specify "should let you read the sheet names" <|
xlsx_sheet.read (Excel Sheet_Names) . should_equal sheet_names
xls_sheet.read (Excel Sheet_Names) . should_equal sheet_names
xlsx_sheet.read . sheet_names . should_equal sheet_names
Test.specify "should let you read the range names" <|
group_builder.specify "should let you read the range names" <|
xlsx_sheet.read (Excel Range_Names) . should_equal range_names
xls_sheet.read (Excel Range_Names) . should_equal range_names
xlsx_sheet.read . named_ranges . should_equal range_names
Test.specify "should let you read by sheet index" <|
group_builder.specify "should let you read by sheet index" <|
table = xlsx_sheet.read (Excel (Worksheet 1))
check_table table
@ -804,7 +787,7 @@ spec =
table_2.row_count . should_equal col_a.length
check_table table_2
Test.specify "should let you read by sheet name" <|
group_builder.specify "should let you read by sheet name" <|
table = xlsx_sheet.read (Excel (Worksheet "Sheet1"))
check_table table
@ -815,7 +798,7 @@ spec =
table_3 = xlsx_sheet.read . read "Sheet1"
check_table table_3
Test.specify "should let you read XLS by sheet index" <|
group_builder.specify "should let you read XLS by sheet index" <|
table = xls_sheet.read (Excel (Worksheet 1))
check_table table
@ -823,14 +806,14 @@ spec =
table_2.row_count . should_equal col_a.length
check_table table_2
Test.specify "should let you read XLS by sheet name" <|
group_builder.specify "should let you read XLS by sheet name" <|
table = xls_sheet.read (Excel (Worksheet "Sheet1"))
check_table table
table_2 = xls_sheet.read . read "Sheet1"
check_table table_2
Test.specify "should let you read by range" <|
group_builder.specify "should let you read by range" <|
table = xlsx_sheet.read (Excel (Cell_Range "Sheet1!A:C"))
check_table table 3
@ -844,7 +827,7 @@ spec =
check_table <| xlsx_sheet.read . read "Sheet1!10:13"
check_table count=3 <| xlsx_sheet.read . read "Sheet1!A10:C13"
Test.specify "should let you read by range name" <|
group_builder.specify "should let you read by range name" <|
table = xlsx_sheet.read (Excel (Cell_Range "myData"))
table.row_count . should_equal col_a.length
check_table table 3
@ -853,7 +836,7 @@ spec =
table_2.row_count . should_equal col_a.length
check_table table_2 3
Test.specify "should let you restrict number of rows read and skip rows" <|
group_builder.specify "should let you restrict number of rows read and skip rows" <|
table = xlsx_sheet.read (Excel (Worksheet "Sheet1"))
check_table table
@ -867,14 +850,14 @@ spec =
table_4 = xlsx_sheet.read (Excel (Worksheet "Sheet1" row_limit=6))
table_4.row_count . should_equal 6
Test.group "Problems" <|
Test.specify "should handle non-existing file gracefully" <|
suite_builder.group "Problems" group_builder->
group_builder.specify "should handle non-existing file gracefully" <|
bad_file = enso_project.data / "DoesNotExists.xlsx"
result = bad_file.read (Excel (Cell_Range "Sheet1!A:C"))
result.should_fail_with File_Error
result.catch.should_be_a File_Error.Not_Found
Test.specify "should handle wrong xls_format gracefully" <|
group_builder.specify "should handle wrong xls_format gracefully" <|
xlsx_sheet_copy = enso_project.data / "transient" / "TestSheetCopy.xlsx"
xlsx_sheet.copy_to xlsx_sheet_copy
@ -897,7 +880,7 @@ spec =
r2.should_fail_with File_Error
r2.catch.should_be_a File_Error.Corrupted_Format
Test.specify "should handle malformed XLS files gracefully" <|
group_builder.specify "should handle malformed XLS files gracefully" <|
bad_file = enso_project.data / "transient" / "malformed.xls"
"not really an XLS file contents...".write bad_file on_existing_file=Existing_File_Behavior.Overwrite
@ -921,7 +904,7 @@ spec =
bad_file.delete
Test.specify "will fail if an operation is performed on a closed workbook" <|
group_builder.specify "will fail if an operation is performed on a closed workbook" <|
workbook = xlsx_sheet.read
workbook.sheet_count . should_equal 4
@ -932,7 +915,7 @@ spec =
workbook.read "Sheet1" . should_fail_with Illegal_State
ci_pending = if Environment.get "CI" != Nothing then "This test takes a lot of time so it is disabled on CI."
Test.specify "should be able to write and read a big XLSX file (>110MB)" pending=ci_pending <|
group_builder.specify "should be able to write and read a big XLSX file (>110MB)" pending=ci_pending <|
n = 10^6
IO.println "Generating big XLSX file "+Time_Of_Day.now.to_text
rng = RandomHelpers.new 123
@ -962,7 +945,7 @@ spec =
workbook.close
big_file.delete_if_exists . should_succeed
Test.specify "should be able to write and read a big XLS file (>110MB)" pending=ci_pending <|
group_builder.specify "should be able to write and read a big XLS file (>110MB)" pending=ci_pending <|
IO.println "Generating big XLS file "+Time_Of_Day.now.to_text
rng = RandomHelpers.new 123
# Here we instead create a 2D table, because XLS has a limit of 65536 rows and 16k columns.
@ -992,11 +975,11 @@ spec =
workbook.close
big_file.delete_if_exists . should_succeed
spec_fmt 'XLSX reading' Examples.xlsx .read
spec_fmt suite_builder 'XLSX reading' Examples.xlsx .read
spec_fmt 'XLS reading' Examples.xls .read
spec_fmt suite_builder 'XLS reading' Examples.xls .read
Test.group "Reading single cells correctly" <|
suite_builder.group "Reading single cells correctly" group_builder->
file = enso_project.data / "RangeTests.xlsx"
check_table table col_names data =
@ -1005,7 +988,7 @@ spec =
data.each_with_index idx->values->
table.at (col_names.at idx) . to_vector . should_equal values
Test.specify "Simple table" <|
group_builder.specify "Simple table" <|
check_table (file.read (Excel (Cell_Range "Sheet1!A1"))) ["AA", "BB"] [[1,2,3,4,5,6], ["A","B","C","D","E","F"]]
check_table (file.read (Excel (Cell_Range "Sheet1!A2"))) ["A", "B"] [[1,2,3,4,5,6], ["A","B","C","D","E","F"]]
check_table (file.read (Excel (Cell_Range "Sheet1!A1:A1"))) ["A"] [["AA"]]
@ -1013,37 +996,39 @@ spec =
check_table (file.read (Excel (Cell_Range "Sheet1!B1") headers=True)) ["BB"] [["A","B","C","D","E","F"]]
check_table (file.read (Excel (Cell_Range "Sheet1!B2"))) ["B"] [["A","B","C","D","E","F"]]
Test.specify "Patchy table" <|
group_builder.specify "Patchy table" <|
check_table (file.read (Excel (Cell_Range "Sheet1!D1"))) ["A", "B", "F"] [[1,2,4], [4,4,Nothing], [6,Nothing,6]]
check_table (file.read (Excel (Cell_Range "Sheet1!D2"))) ["D", "E", "F"] [[1,2,4], [4,4,Nothing], [6,Nothing,6]]
check_table (file.read (Excel (Cell_Range "Sheet1!E"))) ["B"] [[4,4,Nothing,Nothing,Nothing,Nothing]]
check_table (file.read (Excel (Cell_Range "Sheet1!E1"))) ["B", "F"] [[4,4,Nothing], [6,Nothing,6]]
check_table (file.read (Excel (Cell_Range "Sheet1!E2"))) ["E", "F"] [[4,4,Nothing], [6,Nothing,6]]
Test.specify "Single cell" <|
group_builder.specify "Single cell" <|
check_table (file.read (Excel (Cell_Range "Sheet1!H1"))) ["H"] [["Single Cell"]]
check_table (file.read (Excel (Cell_Range "Sheet1!H2"))) ["H"] [[]]
Test.specify "Single line" <|
group_builder.specify "Single line" <|
check_table (file.read (Excel (Cell_Range "Sheet1!J1"))) ["J", "K", "L"] [["Just"],["Some"],["Headers"]]
Test.specify "Growing table" <|
group_builder.specify "Growing table" <|
check_table (file.read (Excel (Cell_Range "Sheet1!N1"))) ["A", "Full", "Table", "Q"] [["Hello","World",Nothing,"Extend"],[1,Nothing,"Gap",3],[2,2,"Here",5],[Nothing,Nothing,"To","Hello"]]
check_table (file.read (Excel (Cell_Range "Sheet1!O1"))) ["Full", "Table", "Q"] [[1,Nothing,"Gap",3],[2,2,"Here",5],[Nothing,Nothing,"To","Hello"]]
check_table (file.read (Excel (Cell_Range "Sheet1!O2"))) ["O", "P", "Q"] [[1,Nothing,"Gap",3],[2,2,"Here",5],[Nothing,Nothing,"To","Hello"]]
Test.specify "Should handle blank headers without warnings" <|
group_builder.specify "Should handle blank headers without warnings" <|
check_table (file.read (Excel (Cell_Range "Sheet1!D1"))) ["A", "B", "F"] [[1,2,4], [4,4,Nothing], [6,Nothing,6]]
Test.specify "Should handle duplicate headers with warnings" <|
group_builder.specify "Should handle duplicate headers with warnings" <|
action = file.read (Excel (Cell_Range "Sheet1!S1")) on_problems=_
tester = check_table _ ["DD", "DD 1"] [[1,3], [2,4]]
problems = [Duplicate_Output_Column_Names.Error ["DD"]]
Problems.test_problem_handling action problems tester
# Cleanup any leftovers from previous runs
enso_project.data/"transient" . list "out*" . each .delete
spec_write "xlsx" 'TestSheet.xlsx'
spec_write "xls" 'TestSheetOld.xls'
spec_write suite_builder "xlsx" 'TestSheet.xlsx'
spec_write suite_builder "xls" 'TestSheetOld.xls'
main =
suite = Test.build suite_builder->
add_specs suite_builder
suite.run_with_filter spec_filter="should write a table to non-existent file as a new sheet with headers; and return the file object on success"
main = Test_Suite.run_main spec

View File

@ -7,14 +7,17 @@ import Standard.Base.Runtime.Context
from Standard.Table import all
import Standard.Table.Errors.Invalid_JSON_Format
from Standard.Test import Test, Test_Suite
import Standard.Test.Extensions
from Standard.Test_New import all
import project.Util
main = Test_Suite.run_main spec
main =
suite = Test.build suite_builder->
add_specs suite_builder
suite.run_with_filter
spec =
add_specs suite_builder =
## To run this test locally:
$ sbt 'http-test-helper/run localhost 8080'
$ export ENSO_HTTP_TEST_HTTPBIN_URL=http://localhost:8080/
@ -24,13 +27,13 @@ spec =
pending_has_url = if base_url != Nothing then Nothing else
"The HTTP tests only run when the `ENSO_HTTP_TEST_HTTPBIN_URL` environment variable is set to URL of the httpbin server"
Test.group "fetching files using HTTP" pending=pending_has_url <|
Test.specify "fetching json" <|
suite_builder.group "fetching files using HTTP" pending=pending_has_url group_builder->
group_builder.specify "fetching json" <|
r = Data.fetch base_url_with_slash+"testfiles/table.json"
expected_table = Table.from_rows ["A", "B"] [[1, "x"], [3, "y"]]
r.to Table . should_equal expected_table
Test.specify "fetching csv" <|
group_builder.specify "fetching csv" <|
url = base_url_with_slash+"testfiles/table.csv"
r = Data.fetch url
expected_table = Table.from_rows ["A", "B"] [[1, "x"], [3, "y"]]
@ -42,7 +45,7 @@ spec =
r2.should_be_a Table
r2.should_equal expected_table
Test.specify "fetching xls" <|
group_builder.specify "fetching xls" <|
url = base_url_with_slash+"testfiles/table.xls"
r = Data.fetch url
expected_table = Table.from_rows ["A", "B"] [[1, "x"], [3, "y"]]
@ -55,7 +58,7 @@ spec =
r2.should_be_a Table
r2.should_equal expected_table
Test.specify "fetching xlsx" <|
group_builder.specify "fetching xlsx" <|
url = base_url_with_slash+"testfiles/table.xlsx"
r = Data.fetch url
expected_table = Table.from_rows ["A", "B"] [[1, "x"], [3, "y"]]
@ -72,7 +75,7 @@ spec =
r3.should_be_a Table
r3.should_equal expected_table
Test.specify "format detection based on Content-Type and Content-Disposition" <|
group_builder.specify "format detection based on Content-Type and Content-Disposition" <|
content = 'A,B\n1,x\n3,y'
uri = URI.from (base_url_with_slash+"test_headers")
. add_query_argument "base64_response_data" (Base_64.encode_text content)

View File

@ -5,18 +5,18 @@ import Standard.Base.Runtime.Context
from Standard.Table import all
import Standard.Table.Errors.Invalid_JSON_Format
from Standard.Test import Test, Test_Suite
import Standard.Test.Extensions
from Standard.Test_New import all
import project.Util
spec =
add_specs suite_builder =
transient = enso_project.data / "transient"
Test.group 'Various File Format support on Table' <|
suite_builder.group 'Various File Format support on Table' group_builder->
t1 = Table.new [["X", [1, 2, 3]]]
simple_empty = enso_project.data/'simple_empty.csv' . read
Test.specify "should be able to be written as CSV, Excel" <|
group_builder.specify "should be able to be written as CSV, Excel" <|
f1 = transient / "test2.csv"
f2 = transient / "test3.xlsx"
[f1, f2].each f->
@ -25,14 +25,14 @@ spec =
f.exists.should_be_true
f.delete
Test.specify "should be able to be written as JSON using Table.write" <|
group_builder.specify "should be able to be written as JSON using Table.write" <|
f1 = transient / "test1.json"
f1.delete_if_exists
t1.write f1 . should_succeed
f1.exists.should_be_true
f1.delete
Test.specify 'should write JSON tables' <|
group_builder.specify 'should write JSON tables' <|
simple_empty = enso_project.data/'simple_empty.csv' . read
out = transient / 'out.json'
out.delete_if_exists
@ -40,7 +40,7 @@ spec =
Table.from_objects (Json.parse out.read_text) ['a', 'b', 'c'] . should_equal simple_empty
out.delete_if_exists
Test.specify 'should append to JSON tables' <|
group_builder.specify 'should append to JSON tables' <|
out = transient / 'out.json'
out.delete_if_exists
simple_empty.write out . should_equal out
@ -48,7 +48,7 @@ spec =
Table.from_objects (Json.parse out.read_text) ['a', 'b', 'c'] . row_count . should_equal 2*simple_empty.row_count
out.delete_if_exists
Test.specify 'should fail to append to JSON non-arrays' <|
group_builder.specify 'should fail to append to JSON non-arrays' <|
out = transient / 'out.json'
out.delete_if_exists
'1'.write out
@ -63,7 +63,7 @@ spec =
simple_empty.write out on_existing_file=Existing_File_Behavior.Append . should_fail_with Invalid_JSON_Format
out.delete_if_exists
Test.specify "should fail gracefully when provided with an unsupported format" <|
group_builder.specify "should fail gracefully when provided with an unsupported format" <|
f1 = (transient / "test4.unknown-format")
f1.delete_if_exists
r1 = t1.write f1
@ -80,11 +80,11 @@ spec =
r2.catch.should_be_a File_Error.Unsupported_Output_Type
r2.catch.format . should_equal my_format
write_tests extension = Test.group 'Writing to '+extension+' files' <|
write_tests extension = suite_builder.group 'Writing to '+extension+' files' group_builder->
count result =
if result . is_a Table then result.row_count else result.length
Test.specify "should write to a temporary "+extension+" file part of the data if context is disabled" <|
group_builder.specify "should write to a temporary "+extension+" file part of the data if context is disabled" <|
f = transient / ("big." + extension)
f.delete_if_exists
f_bak = transient / ("big." + extension + ".bak")
@ -112,7 +112,7 @@ spec =
f_bak.delete_if_exists
r.delete_if_exists
Test.specify "should create a backup file if overwriting" <|
group_builder.specify "should create a backup file if overwriting" <|
f = transient / ("test." + extension)
f.delete_if_exists
f_bak = transient / ("test." + extension + ".bak")
@ -138,7 +138,7 @@ spec =
f_bak.delete_if_exists
Test.specify "should support appending" <|
group_builder.specify "should support appending" <|
f = transient / ("test." + extension)
f.delete_if_exists
f_bak = transient / ("test." + extension + ".bak")
@ -161,4 +161,8 @@ spec =
write_tests "csv"
write_tests "json"
main = Test_Suite.run_main spec
main =
suite = Test.build suite_builder->
add_specs suite_builder
suite.run_with_filter

View File

@ -2,19 +2,26 @@ from Standard.Base import all
from Standard.Table import Table
from Standard.Table.Extensions.Table_Conversions import all
from Standard.Test import Test, Test_Suite
import Standard.Test.Extensions
from Standard.Test_New import all
import project.Util
spec = Test.group 'JSON conversion' <|
type Data
Value ~clothes
setup =
clothes = enso_project.data/'clothes.csv' . read
Data.Value clothes
Test.specify 'should convert tables to a format compatible with Table.from_objects' <|
clothes_json = clothes.to_json
Table.from_objects (Json.parse clothes_json) ['Id', 'Name', 'Quantity', 'Rating', 'Price'] . should_equal clothes
add_specs suite_builder =
suite_builder.group 'JSON conversion' group_builder->
data = Data.setup
Test.specify "should allow converting a JSON array into a table" <|
group_builder.specify 'should convert tables to a format compatible with Table.from_objects' <|
clothes_json = data.clothes.to_json
Table.from_objects (Json.parse clothes_json) ['Id', 'Name', 'Quantity', 'Rating', 'Price'] . should_equal data.clothes
group_builder.specify "should allow converting a JSON array into a table" <|
r_1 = JS_Object.from_pairs [['foo', 20], ['bar', 'baz'], ['baz', False]]
r_2 = JS_Object.from_pairs [['bar', 'xyz'], ['baz', True]]
r_3 = JS_Object.from_pairs [['baz', False], ['foo', 13]]
@ -24,4 +31,8 @@ spec = Test.group 'JSON conversion' <|
t.at 'bar' . to_vector . should_equal ['baz', 'xyz', Nothing]
t.at 'baz' . to_vector . should_equal [False, True, False]
main = Test_Suite.run_main spec
main =
suite = Test.build suite_builder->
add_specs suite_builder
suite.run_with_filter

View File

@ -1,6 +1,6 @@
from Standard.Base import all
from Standard.Test import Test_Suite
from Standard.Test_New import all
import project.IO.Csv_Spec
import project.IO.Delimited_Read_Spec
@ -10,13 +10,16 @@ import project.IO.Fetch_Spec
import project.IO.Formats_Spec
import project.IO.Json_Spec
spec =
Csv_Spec.spec
Delimited_Read_Spec.spec
Delimited_Write_Spec.spec
Excel_Spec.spec
Formats_Spec.spec
Fetch_Spec.spec
Json_Spec.spec
add_specs suite_builder =
Csv_Spec.add_specs suite_builder
Delimited_Read_Spec.add_specs suite_builder
Delimited_Write_Spec.add_specs suite_builder
Excel_Spec.add_specs suite_builder
Formats_Spec.add_specs suite_builder
Fetch_Spec.add_specs suite_builder
Json_Spec.add_specs suite_builder
main = Test_Suite.run_main spec
main =
suite = Test.build suite_builder->
add_specs suite_builder
suite.run_with_filter

View File

@ -8,12 +8,21 @@ import Standard.Table.Internal.Aggregate_Column_Helper
import Standard.Table.Internal.Java_Problems
import Standard.Table.Internal.Problem_Builder.Problem_Builder
from Standard.Test import Test, Test_Suite
import Standard.Test.Extensions
from Standard.Test_New import all
spec = Test.group "Aggregate Columns" <|
type Data
Value ~data
simple_table self = self.data.at 0
empty_table self = self.data.at 1
setup = Data.Value <|
simple_table = Table.new [["count", [1, 2, Nothing, 3, Nothing]], ["is_valid", [Nothing, False, True, False, Nothing]], ["float", [3.4, 1, 5.6, 2.1, Nothing]], ["text", ["A", "", Nothing, "B,C", Nothing]]]
empty_table = Table.new [["count", []], ["is_valid", []], ["text", []]]
[simple_table, empty_table]
add_specs suite_builder = suite_builder.group "Aggregate Columns" group_builder->
test_name = "Test Column"
@ -34,155 +43,161 @@ spec = Test.group "Aggregate Columns" <|
if epsilon != False then ((result - expected_result).abs < epsilon).should_be_true else
result.should_equal expected_result
Test.specify "should be able to count a set" <|
test_aggregator simple_table (Count) "Count" simple_table.row_count
test_aggregator simple_table (Count test_name) test_name simple_table.row_count
test_aggregator empty_table (Count test_name) test_name empty_table.row_count
data = Data.setup
Test.specify "should be able to count missing values in a set" <|
test_aggregator simple_table (Count_Nothing 0) "Count Nothing count" 2
test_aggregator simple_table (Count_Nothing 0 test_name) test_name 2
test_aggregator simple_table (Count_Nothing "text" test_name) test_name 2
test_aggregator empty_table (Count_Nothing 0 test_name) test_name empty_table.row_count
group_builder.specify "should be able to count a set" <|
test_aggregator data.simple_table (Count) "Count" data.simple_table.row_count
test_aggregator data.simple_table (Count test_name) test_name data.simple_table.row_count
test_aggregator data.empty_table (Count test_name) test_name data.empty_table.row_count
Test.specify "should be able to count non missing values in a set" <|
test_aggregator simple_table (Count_Not_Nothing 0) "Count Not Nothing count" 3
test_aggregator simple_table (Count_Not_Nothing 0 test_name) test_name 3
test_aggregator simple_table (Count_Not_Nothing "text" test_name) test_name 3
test_aggregator empty_table (Count_Not_Nothing 0 test_name) test_name empty_table.row_count
group_builder.specify "should be able to count missing values in a set" <|
test_aggregator data.simple_table (Count_Nothing 0) "Count Nothing count" 2
test_aggregator data.simple_table (Count_Nothing 0 test_name) test_name 2
test_aggregator data.simple_table (Count_Nothing "text" test_name) test_name 2
test_aggregator data.empty_table (Count_Nothing 0 test_name) test_name data.empty_table.row_count
Test.specify "should be able to count empties in a set of Texts" <|
test_aggregator simple_table (Count_Empty -1) "Count Empty text" 3
test_aggregator simple_table (Count_Empty -1 test_name) test_name 3
test_aggregator simple_table (Count_Empty "text" test_name) test_name 3
group_builder.specify "should be able to count non missing values in a set" <|
test_aggregator data.simple_table (Count_Not_Nothing 0) "Count Not Nothing count" 3
test_aggregator data.simple_table (Count_Not_Nothing 0 test_name) test_name 3
test_aggregator data.simple_table (Count_Not_Nothing "text" test_name) test_name 3
test_aggregator data.empty_table (Count_Not_Nothing 0 test_name) test_name data.empty_table.row_count
group_builder.specify "should be able to count empties in a set of Texts" <|
test_aggregator data.simple_table (Count_Empty -1) "Count Empty text" 3
test_aggregator data.simple_table (Count_Empty -1 test_name) test_name 3
test_aggregator data.simple_table (Count_Empty "text" test_name) test_name 3
# TODO [RW] Re-enable this once #6281 is implemented.
# test_aggregator empty_table (Count_Empty 0 test_name) test_name empty_table.row_count
# test_aggregator data.empty_table (Count_Empty 0 test_name) test_name data.empty_table.row_count
Test.specify "should be able to count non empties in a set of Texts" <|
test_aggregator simple_table (Count_Not_Empty -1) "Count Not Empty text" 2
test_aggregator simple_table (Count_Not_Empty -1 test_name) test_name 2
test_aggregator simple_table (Count_Not_Empty "text" test_name) test_name 2
group_builder.specify "should be able to count non empties in a set of Texts" <|
test_aggregator data.simple_table (Count_Not_Empty -1) "Count Not Empty text" 2
test_aggregator data.simple_table (Count_Not_Empty -1 test_name) test_name 2
test_aggregator data.simple_table (Count_Not_Empty "text" test_name) test_name 2
# TODO [RW] Re-enable this once #6281 is implemented.
# test_aggregator empty_table (Count_Not_Empty 0 test_name) test_name empty_table.row_count
# test_aggregator data.empty_table (Count_Not_Empty 0 test_name) test_name data.empty_table.row_count
Test.specify "should be able to total a set of values" <|
test_aggregator simple_table (Sum -2) "Sum float" 12.1
test_aggregator simple_table (Sum -2 test_name) test_name 12.1
test_aggregator simple_table (Sum "float" test_name) test_name 12.1
group_builder.specify "should be able to total a set of values" <|
test_aggregator data.simple_table (Sum -2) "Sum float" 12.1
test_aggregator data.simple_table (Sum -2 test_name) test_name 12.1
test_aggregator data.simple_table (Sum "float" test_name) test_name 12.1
# TODO [RW] Re-enable this once #6281 is implemented.
# test_aggregator empty_table (Sum 0 test_name) test_name Nothing
# test_aggregator data.empty_table (Sum 0 test_name) test_name Nothing
Test.specify "should be able to average a set of values" <|
test_aggregator simple_table (Average -2) "Average float" 3.025 0.000001
test_aggregator simple_table (Average -2 test_name) test_name 3.025 0.000001
test_aggregator simple_table (Average "float" test_name) test_name 3.025 0.000001
group_builder.specify "should be able to average a set of values" <|
test_aggregator data.simple_table (Average -2) "Average float" 3.025 0.000001
test_aggregator data.simple_table (Average -2 test_name) test_name 3.025 0.000001
test_aggregator data.simple_table (Average "float" test_name) test_name 3.025 0.000001
# TODO [RW] Re-enable this once #6281 is implemented.
# test_aggregator empty_table (Average 0 test_name) test_name Nothing
# test_aggregator data.empty_table (Average 0 test_name) test_name Nothing
Test.specify "should be able to compute standard deviation a set of values" <|
test_aggregator simple_table (Standard_Deviation -2) "Standard Deviation float" 1.977161 0.000001
test_aggregator simple_table (Standard_Deviation -2 test_name) test_name 1.977161 0.000001
test_aggregator simple_table (Standard_Deviation "float" test_name) test_name 1.977161 0.000001
group_builder.specify "should be able to compute standard deviation a set of values" <|
test_aggregator data.simple_table (Standard_Deviation -2) "Standard Deviation float" 1.977161 0.000001
test_aggregator data.simple_table (Standard_Deviation -2 test_name) test_name 1.977161 0.000001
test_aggregator data.simple_table (Standard_Deviation "float" test_name) test_name 1.977161 0.000001
# TODO [RW] Re-enable this once #6281 is implemented.
# test_aggregator empty_table (Standard_Deviation 0 test_name) test_name Nothing
# test_aggregator data.empty_table (Standard_Deviation 0 test_name) test_name Nothing
Test.specify "should be able to compute standard deviation of a population a set of values" <|
test_aggregator simple_table (Standard_Deviation -2 population=True) "Standard Deviation float" 1.712271 0.000001
test_aggregator simple_table (Standard_Deviation -2 test_name population=True) test_name 1.712271 0.000001
test_aggregator simple_table (Standard_Deviation "float" test_name population=True) test_name 1.712271 0.000001
group_builder.specify "should be able to compute standard deviation of a population a set of values" <|
test_aggregator data.simple_table (Standard_Deviation -2 population=True) "Standard Deviation float" 1.712271 0.000001
test_aggregator data.simple_table (Standard_Deviation -2 test_name population=True) test_name 1.712271 0.000001
test_aggregator data.simple_table (Standard_Deviation "float" test_name population=True) test_name 1.712271 0.000001
# TODO [RW] Re-enable this once #6281 is implemented.
# test_aggregator empty_table (Standard_Deviation 0 test_name population=True) test_name Nothing
# test_aggregator data.empty_table (Standard_Deviation 0 test_name population=True) test_name Nothing
Test.specify "should be able to compute median a set of values" <|
test_aggregator simple_table (Median -2) "Median float" 2.75 0.000001
test_aggregator simple_table (Median -2 test_name) test_name 2.75 0.000001
test_aggregator simple_table (Median "float" test_name) test_name 2.75 0.000001
group_builder.specify "should be able to compute median a set of values" <|
test_aggregator data.simple_table (Median -2) "Median float" 2.75 0.000001
test_aggregator data.simple_table (Median -2 test_name) test_name 2.75 0.000001
test_aggregator data.simple_table (Median "float" test_name) test_name 2.75 0.000001
# TODO [RW] Re-enable this once #6281 is implemented.
# test_aggregator empty_table (Median 0 test_name) test_name Nothing
# test_aggregator data.empty_table (Median 0 test_name) test_name Nothing
Test.specify "should be able to compute first of a set of values including missing" <|
test_aggregator simple_table (First 1 ignore_nothing=False) "First is_valid" Nothing
test_aggregator simple_table (First 1 test_name ignore_nothing=False) test_name Nothing
test_aggregator simple_table (First "is_valid" test_name ignore_nothing=False) test_name Nothing
test_aggregator empty_table (First 0 test_name ignore_nothing=False) test_name Nothing
group_builder.specify "should be able to compute first of a set of values including missing" <|
test_aggregator data.simple_table (First 1 ignore_nothing=False) "First is_valid" Nothing
test_aggregator data.simple_table (First 1 test_name ignore_nothing=False) test_name Nothing
test_aggregator data.simple_table (First "is_valid" test_name ignore_nothing=False) test_name Nothing
test_aggregator data.empty_table (First 0 test_name ignore_nothing=False) test_name Nothing
Test.specify "should be able to compute first of a set of values excluding missing" <|
test_aggregator simple_table (First 1) "First is_valid" False
test_aggregator simple_table (First 1 test_name) test_name False
test_aggregator simple_table (First "is_valid" test_name) test_name False
test_aggregator empty_table (First 0 test_name) test_name Nothing
group_builder.specify "should be able to compute first of a set of values excluding missing" <|
test_aggregator data.simple_table (First 1) "First is_valid" False
test_aggregator data.simple_table (First 1 test_name) test_name False
test_aggregator data.simple_table (First "is_valid" test_name) test_name False
test_aggregator data.empty_table (First 0 test_name) test_name Nothing
Test.specify "should be able to compute last of a set of values including missing" <|
test_aggregator simple_table (Last 1 ignore_nothing=False) "Last is_valid" Nothing
test_aggregator simple_table (Last 1 test_name ignore_nothing=False) test_name Nothing
test_aggregator simple_table (Last "is_valid" test_name ignore_nothing=False) test_name Nothing
test_aggregator empty_table (Last 0 test_name ignore_nothing=False) test_name Nothing
group_builder.specify "should be able to compute last of a set of values including missing" <|
test_aggregator data.simple_table (Last 1 ignore_nothing=False) "Last is_valid" Nothing
test_aggregator data.simple_table (Last 1 test_name ignore_nothing=False) test_name Nothing
test_aggregator data.simple_table (Last "is_valid" test_name ignore_nothing=False) test_name Nothing
test_aggregator data.empty_table (Last 0 test_name ignore_nothing=False) test_name Nothing
Test.specify "should be able to compute last of a set of values excluding missing" <|
test_aggregator simple_table (Last 1) "Last is_valid" False
test_aggregator simple_table (Last 1 test_name) test_name False
test_aggregator simple_table (Last "is_valid" test_name) test_name False
test_aggregator empty_table (Last 0 test_name) test_name Nothing
group_builder.specify "should be able to compute last of a set of values excluding missing" <|
test_aggregator data.simple_table (Last 1) "Last is_valid" False
test_aggregator data.simple_table (Last 1 test_name) test_name False
test_aggregator data.simple_table (Last "is_valid" test_name) test_name False
test_aggregator data.empty_table (Last 0 test_name) test_name Nothing
Test.specify "should be able to concatenate a set of values excluding missing" <|
test_aggregator simple_table (Concatenate -1 "" ',' '[' ']' '"') "Concatenate text" '[A,"",,"B,C",]'
test_aggregator simple_table (Concatenate -1 test_name) test_name 'AB,C'
test_aggregator simple_table (Concatenate "text" test_name ',') test_name 'A,,,B,C,'
group_builder.specify "should be able to concatenate a set of values excluding missing" <|
test_aggregator data.simple_table (Concatenate -1 "" ',' '[' ']' '"') "Concatenate text" '[A,"",,"B,C",]'
test_aggregator data.simple_table (Concatenate -1 test_name) test_name 'AB,C'
test_aggregator data.simple_table (Concatenate "text" test_name ',') test_name 'A,,,B,C,'
# TODO [RW] Re-enable this once #6281 is implemented.
# test_aggregator empty_table (Concatenate 0 test_name) test_name Nothing
# test_aggregator data.empty_table (Concatenate 0 test_name) test_name Nothing
Test.specify "should be able to count distinct items on a single set of values" <|
test_aggregator simple_table (Count_Distinct 0) "Count Distinct count" 4
test_aggregator simple_table (Count_Distinct 0 test_name) test_name 4
test_aggregator simple_table (Count_Distinct "count" test_name) test_name 4
test_aggregator empty_table (Count_Distinct 0 test_name) test_name 0
test_aggregator simple_table (Count_Distinct "float" test_name ignore_nothing=False) test_name 5
test_aggregator simple_table (Count_Distinct "float" test_name ignore_nothing=True) test_name 4
group_builder.specify "should be able to count distinct items on a single set of values" <|
test_aggregator data.simple_table (Count_Distinct 0) "Count Distinct count" 4
test_aggregator data.simple_table (Count_Distinct 0 test_name) test_name 4
test_aggregator data.simple_table (Count_Distinct "count" test_name) test_name 4
test_aggregator data.empty_table (Count_Distinct 0 test_name) test_name 0
test_aggregator data.simple_table (Count_Distinct "float" test_name ignore_nothing=False) test_name 5
test_aggregator data.simple_table (Count_Distinct "float" test_name ignore_nothing=True) test_name 4
Test.specify "should be able to count distinct items on a multiple sets of values" <|
test_aggregator simple_table (Count_Distinct [0, 1]) "Count Distinct count is_valid" 5
test_aggregator simple_table (Count_Distinct ["is_valid", "float"]) "Count Distinct is_valid float" 5
test_aggregator simple_table (Count_Distinct ["is_valid", "float"] ignore_nothing=True) "Count Distinct is_valid float" 4
group_builder.specify "should be able to count distinct items on a multiple sets of values" <|
test_aggregator data.simple_table (Count_Distinct [0, 1]) "Count Distinct count is_valid" 5
test_aggregator data.simple_table (Count_Distinct ["is_valid", "float"]) "Count Distinct is_valid float" 5
test_aggregator data.simple_table (Count_Distinct ["is_valid", "float"] ignore_nothing=True) "Count Distinct is_valid float" 4
Test.specify "should be able to get the minimum of a set of values" <|
test_aggregator simple_table (Minimum -2) "Minimum float" 1
test_aggregator simple_table (Minimum -2 test_name) test_name 1
test_aggregator simple_table (Minimum "float" test_name) test_name 1
test_aggregator empty_table (Minimum 0 test_name) test_name Nothing
group_builder.specify "should be able to get the minimum of a set of values" <|
test_aggregator data.simple_table (Minimum -2) "Minimum float" 1
test_aggregator data.simple_table (Minimum -2 test_name) test_name 1
test_aggregator data.simple_table (Minimum "float" test_name) test_name 1
test_aggregator data.empty_table (Minimum 0 test_name) test_name Nothing
Test.specify "should be able to get the maximum of a set of values" <|
test_aggregator simple_table (Maximum -2) "Maximum float" 5.6
test_aggregator simple_table (Maximum -2 test_name) test_name 5.6
test_aggregator simple_table (Maximum "float" test_name) test_name 5.6
test_aggregator empty_table (Maximum 0 test_name) test_name Nothing
group_builder.specify "should be able to get the maximum of a set of values" <|
test_aggregator data.simple_table (Maximum -2) "Maximum float" 5.6
test_aggregator data.simple_table (Maximum -2 test_name) test_name 5.6
test_aggregator data.simple_table (Maximum "float" test_name) test_name 5.6
test_aggregator data.empty_table (Maximum 0 test_name) test_name Nothing
Test.specify "should be able to get the shortest of a set of texts" <|
test_aggregator simple_table (Shortest -1) "Shortest text" ""
test_aggregator simple_table (Shortest -1 test_name) test_name ""
test_aggregator simple_table (Shortest "text" test_name) test_name ""
group_builder.specify "should be able to get the shortest of a set of texts" <|
test_aggregator data.simple_table (Shortest -1) "Shortest text" ""
test_aggregator data.simple_table (Shortest -1 test_name) test_name ""
test_aggregator data.simple_table (Shortest "text" test_name) test_name ""
# TODO [RW] Re-enable this once #6281 is implemented.
# test_aggregator empty_table (Shortest 0 test_name) test_name Nothing
# test_aggregator data.empty_table (Shortest 0 test_name) test_name Nothing
Test.specify "should be able to get the longest of a set of texts" <|
test_aggregator simple_table (Longest -1) "Longest text" "B,C"
test_aggregator simple_table (Longest -1 test_name) test_name "B,C"
test_aggregator simple_table (Longest "text" test_name) test_name "B,C"
group_builder.specify "should be able to get the longest of a set of texts" <|
test_aggregator data.simple_table (Longest -1) "Longest text" "B,C"
test_aggregator data.simple_table (Longest -1 test_name) test_name "B,C"
test_aggregator data.simple_table (Longest "text" test_name) test_name "B,C"
# TODO [RW] Re-enable this once #6281 is implemented.
# test_aggregator empty_table (Longest 0 test_name) test_name Nothing
# test_aggregator data.empty_table (Longest 0 test_name) test_name Nothing
Test.specify "should be able to get the mode of a set of numbers" <|
group_builder.specify "should be able to get the mode of a set of numbers" <|
mode_table = Table.new [["tests", [1,2,3,4,2,4,1,2,3,4,2,1,3,5,2,1,2,4,5,2,1,2,3,5,6,1,2,2]]]
test_aggregator mode_table (Mode -1) "Mode tests" 2
test_aggregator mode_table (Mode -1 test_name) test_name 2
test_aggregator empty_table (Mode 0 test_name) test_name Nothing
test_aggregator data.empty_table (Mode 0 test_name) test_name Nothing
Test.specify "should be able to get the percentile of a set of numbers" <|
group_builder.specify "should be able to get the percentile of a set of numbers" <|
percentile_table = Table.new [["tests", [67,23,56,93,36,47,45,1,88,44,49,13,74,76,4,97,49,81,81,37]]]
test_aggregator percentile_table (Percentile 0 0) "0%-ile tests" 1
test_aggregator percentile_table (Percentile 0 -1 test_name) test_name 1
test_aggregator percentile_table (Percentile 0.15 0) "15%-ile tests" 21.5
test_aggregator percentile_table (Percentile 0.25 0) "25%-ile tests" 36.75
test_aggregator percentile_table (Percentile 0.66 0) "66%-ile tests" 70.78
test_aggregator empty_table (Mode 0 test_name) test_name Nothing
test_aggregator data.empty_table (Mode 0 test_name) test_name Nothing
main =
suite = Test.build suite_builder->
add_specs suite_builder
suite.run_with_filter
main = Test_Suite.run_main spec

View File

@ -4,16 +4,18 @@ from Standard.Table import Column
from Standard.Table.Internal.Java_Exports import make_inferred_builder
import Standard.Table.Internal.Java_Problems
from Standard.Test import Test, Test_Suite, Problems
import Standard.Test.Extensions
from Standard.Test_New import all
type My
Data x y
main = Test_Suite.run_main spec
main =
suite = Test.build suite_builder->
add_specs suite_builder
suite.run_with_filter
spec = Test.group "[In-Memory] Storage Builders" <|
Test.specify "should correctly incrementally build a table in presence of lots of null values" <|
add_specs suite_builder = suite_builder.group "[In-Memory] Storage Builders" group_builder->
group_builder.specify "should correctly incrementally build a table in presence of lots of null values" <|
## This test is introduced to avoid regressions related to a bug where
the underlying array was not resized to big enough size after many
null values were inserted to a builder.
@ -34,7 +36,7 @@ spec = Test.group "[In-Memory] Storage Builders" <|
column.to_vector . should_equal vector
Problems.assume_no_problems r
Test.specify "Inferred Builder should correctly resize when retyping to a mixed column, with an underestimated initial size" <|
group_builder.specify "Inferred Builder should correctly resize when retyping to a mixed column, with an underestimated initial size" <|
mixed_values = [10, 11, 22, 23, 24, 25, '2020-02-28']
r = Java_Problems.with_problem_aggregator Problem_Behavior.Report_Warning java_problem_aggregator->
builder = make_inferred_builder 3 java_problem_aggregator

View File

@ -10,20 +10,19 @@ from Standard.Table import Column, Value_Type
from Standard.Table.Errors import Invalid_Value_Type
from Standard.Table.Internal.Column_Format import all
import Standard.Test.Extensions
from Standard.Test import Test, Test_Suite
from Standard.Test_New import all
from project.Util import all
spec =
Test.group "Date Column.format, with format string" <|
Test.specify "Date column" <|
add_specs suite_builder =
suite_builder.group "Date Column.format, with format string" group_builder->
group_builder.specify "Date column" <|
input = Column.from_vector "values" [Date.new 2020 12 21, Date.new 2023 4 25]
expected = Column.from_vector "values" ["20201221", "20230425"]
actual = input.format "yyyyMMdd"
actual . should_equal expected
Test.specify "Date with locale" <|
group_builder.specify "Date with locale" <|
input = Column.from_vector "values" [Date.new 2020 6 21, Date.new 2023 4 25]
expected_default = Column.from_vector "values" ["21. June 2020", "25. April 2023"]
expected_gb = Column.from_vector "values" ["21. June 2020", "25. April 2023"]
@ -33,61 +32,61 @@ spec =
input.format (Date_Time_Formatter.from "d. MMMM yyyy" Locale.uk) . should_equal expected_gb
input.format (Date_Time_Formatter.from "d. MMMM yyyy" Locale.france) . should_equal expected_fr
Test.specify "Empty/Nothing format" <|
group_builder.specify "Empty/Nothing format" <|
input = Column.from_vector "values" [Date.new 2020 12 21, Date.new 2023 4 25]
expected = Column.from_vector "values" ['2020-12-21', '2023-04-25']
input.format . should_equal expected
input.format "" . should_equal expected
input.format Nothing . should_equal expected
Test.specify "Bad format" <|
group_builder.specify "Bad format" <|
input = Column.from_vector "values" [Date.new 2020 6 21, Date.new 2023 4 25]
input.format "jjjjjj" . should_fail_with Date_Time_Format_Parse_Error
Test.group "Date Column.format, with format Column" <|
Test.specify "Date column" <|
suite_builder.group "Date Column.format, with format Column" group_builder->
group_builder.specify "Date column" <|
input = Column.from_vector "values" [Date.new 2020 12 21, Date.new 2023 4 25]
formats = Column.from_vector "formats" ["yyyyMMdd", "dd-MM-yyyy"]
expected = Column.from_vector "values" ["20201221", "25-04-2023"]
actual = input.format formats
actual . should_equal expected
Test.specify "Date with locale" <|
group_builder.specify "Date with locale" <|
input = Column.from_vector "values" [Date.new 2020 6 21, Date.new 2023 4 25]
formats = Column.from_vector "formats" ["d. MMMM yyyy", "d-MMMM-yyyy"]
expected = Column.from_vector "values" ["21. juin 2020", "25-avril-2023"]
input.format formats (Locale.new "fr") . should_equal expected
Test.specify "Empty/Nothing format, with format Column" <|
group_builder.specify "Empty/Nothing format, with format Column" <|
input = Column.from_vector "values" [Date.new 2020 12 21, Date.new 2023 4 25]
formats = Column.from_vector "formats" ["", Nothing]
expected = Column.from_vector "values" ["2020-12-21", "2023-04-25"]
actual = input.format formats
actual . should_equal expected
Test.specify "Bad format" <|
group_builder.specify "Bad format" <|
input = Column.from_vector "values" [Date.new 2020 6 21, Date.new 2023 4 25, Date.new 2023 4 26]
formats = Column.from_vector "formats" ["yyyyMMdd", "jjjjj", "FFF"]
input.format formats . should_fail_with Date_Time_Format_Parse_Error
Test.specify "Bad format column type" <|
group_builder.specify "Bad format column type" <|
input = Column.from_vector "values" [Date.new 2020 6 21, Date.new 2023 4 25, Date.new 2023 4 26]
formats = Column.from_vector "formats" [3, 4, 5]
input.format formats . should_fail_with Invalid_Value_Type
Test.specify "column length mismatch" <|
group_builder.specify "column length mismatch" <|
input = Column.from_vector "values" [Date.new 2020 6 21, Date.new 2023 4 25]
formats = Column.from_vector "formats" ["yyyyMMdd", "DDDDD", "w"]
input.format formats . should_fail_with Illegal_Argument
Test.group "Date_Time Column.format, with format string" <|
Test.specify "Date_Time column" <|
suite_builder.group "Date_Time Column.format, with format string" group_builder->
group_builder.specify "Date_Time column" <|
input = Column.from_vector "values" [Date_Time.new 2020 12 21 8 10 20, Date_Time.new 2023 4 25 14 25 2]
expected = Column.from_vector "values" ["20201221 08.10.20", "20230425 14.25.02"]
actual = input.format "yyyyMMdd HH.mm.ss"
actual . should_equal expected
Test.specify "Date_Time with locale" <|
group_builder.specify "Date_Time with locale" <|
input = Column.from_vector "values" [Date_Time.new 2020 6 21 8 10 20, Date_Time.new 2023 4 25 14 25 2]
expected_default = Column.from_vector "values" ["21. June 2020 08.10.20", "25. April 2023 14.25.02"]
expected_gb = Column.from_vector "values" ["21. June 2020 08.10.20", "25. April 2023 14.25.02"]
@ -97,7 +96,7 @@ spec =
input.format (Date_Time_Formatter.from "d. MMMM yyyy HH.mm.ss" Locale.uk) . should_equal expected_gb
input.format (Date_Time_Formatter.from "d. MMMM yyyy HH.mm.ss" Locale.france) . should_equal expected_fr
Test.specify "overriding the Locale with `format` argument" <|
group_builder.specify "overriding the Locale with `format` argument" <|
formatter = Date_Time_Formatter.from "d. MMMM yyyy HH.mm.ss" Locale.france
input = Column.from_vector "values" [Date_Time.new 2020 6 21 8 10 20, Date_Time.new 2023 4 25 14 25 2]
expected_fr = Column.from_vector "values" ["21. juin 2020 08.10.20", "25. avril 2023 14.25.02"]
@ -107,7 +106,7 @@ spec =
# If I provide a locale argument, it overrides what is already in the formatter:
input.format formatter Locale.poland . should_equal expected_pl
Test.specify "Empty/Nothing format" <|
group_builder.specify "Empty/Nothing format" <|
zone = Time_Zone.parse "US/Hawaii"
input = Column.from_vector "values" [Date_Time.new 2020 12 21 8 10 20 zone=zone, Date_Time.new 2023 4 25 14 25 2 zone=zone]
expected = Column.from_vector "values" ['2020-12-21 08:10:20-10:00[US/Hawaii]', '2023-04-25 14:25:02-10:00[US/Hawaii]']
@ -115,25 +114,25 @@ spec =
input.format "" . should_equal expected
input.format Nothing . should_equal expected
Test.specify "Bad format" <|
group_builder.specify "Bad format" <|
input = Column.from_vector "values" [Date_Time.new 2020 6 21 8 10 20, Date_Time.new 2023 4 25 14 25 2]
input.format "jjjjjjjj" . should_fail_with Date_Time_Format_Parse_Error
Test.group "Date_Time Column.format, with format Column" <|
Test.specify "Date_Time column" <|
suite_builder.group "Date_Time Column.format, with format Column" group_builder->
group_builder.specify "Date_Time column" <|
input = Column.from_vector "values" [Date_Time.new 2020 6 21 8 10 20, Date_Time.new 2023 4 25 14 25 2]
formats = Column.from_vector "formats" ["yyyyMMdd HH.mm.ss", "dd-MM-yyyy HH.mm.ss"]
expected = Column.from_vector "values" ["20200621 08.10.20", "25-04-2023 14.25.02"]
actual = input.format formats
actual . should_equal expected
Test.specify "Date_Time with locale" <|
group_builder.specify "Date_Time with locale" <|
input = Column.from_vector "values" [Date_Time.new 2020 6 21 8 10 20, Date_Time.new 2023 4 25 14 25 2]
formats = Column.from_vector "formats" ["d. MMMM yyyy HH.mm.ss", "d-MMMM-yyyy HH.mm.ss"]
expected = Column.from_vector "values" ["21. juin 2020 08.10.20", "25-avril-2023 14.25.02"]
input.format formats (Locale.new "fr") . should_equal expected
Test.specify "Empty/Nothing format, with format Column" <|
group_builder.specify "Empty/Nothing format, with format Column" <|
zone = Time_Zone.parse "US/Hawaii"
input = Column.from_vector "values" [Date_Time.new 2020 6 21 8 10 20 zone=zone, Date_Time.new 2023 4 25 14 25 2 zone=zone]
formats = Column.from_vector "formats" ["", Nothing]
@ -141,29 +140,29 @@ spec =
actual = input.format formats
actual . should_equal expected
Test.specify "Bad format" <|
group_builder.specify "Bad format" <|
input = Column.from_vector "values" [Date_Time.new 2020 6 21 8 10 20, Date_Time.new 2023 4 25 14 25 2, Date_Time.new 2023 4 26 3 4 5]
formats = Column.from_vector "formats" ["yyyyMMdd HH.mm.ss", "jjjjj", "FFF"]
input.format formats . should_fail_with Date_Time_Format_Parse_Error
Test.specify "Bad format column type" <|
group_builder.specify "Bad format column type" <|
input = Column.from_vector "values" [Date_Time.new 2020 6 21 8 10 20, Date_Time.new 2023 4 25 14 25 2]
formats = Column.from_vector "formats" [3, 4, 5]
input.format formats . should_fail_with Invalid_Value_Type
Test.specify "column length mismatch" <|
group_builder.specify "column length mismatch" <|
input = Column.from_vector "values" [Date_Time.new 2020 6 21 8 10 20, Date_Time.new 2023 4 25 14 25 2]
formats = Column.from_vector "formats" ["yyyyMMdd", "jjjj", "w"]
input.format formats . should_fail_with Illegal_Argument
Test.group "Time_Of_Day Column.format, with format string" <|
Test.specify "Time_Of_Day column" <|
suite_builder.group "Time_Of_Day Column.format, with format string" group_builder->
group_builder.specify "Time_Of_Day column" <|
input = Column.from_vector "values" [Time_Of_Day.new 8 10 20, Time_Of_Day.new 14 25 2]
expected = Column.from_vector "values" ["08.10.20", "14.25.02"]
actual = input.format "HH.mm.ss"
actual . should_equal expected
Test.specify "Time_Of_Day with locale" <|
group_builder.specify "Time_Of_Day with locale" <|
input = Column.from_vector "values" [Time_Of_Day.new 8 10 20, Time_Of_Day.new 14 25 2]
# Note that the results are all the same.
expected = Column.from_vector "values" ["08.10.20", "14.25.02"]
@ -171,178 +170,182 @@ spec =
input.format "HH.mm.ss" (Locale.default) . should_equal expected
input.format "HH.mm.ss" (Locale.new "gb") . should_equal expected
Test.specify "Empty/Nothing format" <|
group_builder.specify "Empty/Nothing format" <|
input = Column.from_vector "values" [Time_Of_Day.new 8 10 20, Time_Of_Day.new 14 25 2]
expected = Column.from_vector "values" ['08:10:20', '14:25:02']
input.format . should_equal expected
input.format "" . should_equal expected
input.format Nothing . should_equal expected
Test.specify "Bad format" <|
group_builder.specify "Bad format" <|
input = Column.from_vector "values" [Time_Of_Day.new 8 10 20, Time_Of_Day.new 14 25 2]
input.format "jjjj" . should_fail_with Date_Time_Format_Parse_Error
Test.specify "Format for wrong date type" <|
group_builder.specify "Format for wrong date type" <|
input = Column.from_vector "values" [Time_Of_Day.new 8 10 20, Time_Of_Day.new 14 25 2]
input.format "yyyyMMdd HH.mm.ss" . should_fail_with Time_Error
Test.group "Time_Of_Day Column.format, with format Column" <|
Test.specify "Time_Of_Day column" <|
suite_builder.group "Time_Of_Day Column.format, with format Column" group_builder->
group_builder.specify "Time_Of_Day column" <|
input = Column.from_vector "values" [Time_Of_Day.new 8 10 20, Time_Of_Day.new 14 25 2]
formats = Column.from_vector "formats" ["HH.mm.ss", "ss mm HH"]
expected = Column.from_vector "values" ["08.10.20", "02 25 14"]
actual = input.format formats
actual . should_equal expected
Test.specify "Time_Of_Day with locale" <|
group_builder.specify "Time_Of_Day with locale" <|
input = Column.from_vector "values" [Time_Of_Day.new 8 10 20, Time_Of_Day.new 14 25 2]
formats = Column.from_vector "formats" ["HH.mm.ss", "ss mm HH"]
expected = Column.from_vector "values" ["08.10.20", "02 25 14"]
input.format formats (Locale.new "fr") . should_equal expected
Test.specify "Empty/Nothing format, with format Column" <|
group_builder.specify "Empty/Nothing format, with format Column" <|
input = Column.from_vector "values" [Time_Of_Day.new 8 10 20, Time_Of_Day.new 14 25 2]
formats = Column.from_vector "formats" ["", Nothing]
expected = Column.from_vector "values" ["08:10:20", "14:25:02"]
actual = input.format formats
actual . should_equal expected
Test.specify "Bad format" <|
group_builder.specify "Bad format" <|
input = Column.from_vector "values" [Time_Of_Day.new 8 10 20, Time_Of_Day.new 14 25 2, Time_Of_Day.new 3 4 5]
formats = Column.from_vector "formats" ["HH.mm.ss", "jjjjj", "FFF"]
input.format formats . should_fail_with Date_Time_Format_Parse_Error
Test.specify "Bad format column type" <|
group_builder.specify "Bad format column type" <|
input = Column.from_vector "values" [Time_Of_Day.new 8 10 20, Time_Of_Day.new 14 25 2]
formats = Column.from_vector "formats" [3, 4, 5]
input.format formats . should_fail_with Invalid_Value_Type
Test.specify "column length mismatch" <|
group_builder.specify "column length mismatch" <|
input = Column.from_vector "values" [Time_Of_Day.new 8 10 20, Time_Of_Day.new 14 25 2]
formats = Column.from_vector "formats" ["yyyyMMdd", "jjjjj", "w"]
input.format formats . should_fail_with Illegal_Argument
Test.group "Boolean Column.format, with format string" <|
Test.specify "Boolean column" <|
suite_builder.group "Boolean Column.format, with format string" group_builder->
group_builder.specify "Boolean column" <|
input = Column.from_vector "values" [True, False]
expected = Column.from_vector "values" ["t", "f"]
actual = input.format "t|f"
actual . should_equal expected
Test.specify "Empty/Nothing format" <|
group_builder.specify "Empty/Nothing format" <|
input = Column.from_vector "values" [True, False]
expected = Column.from_vector "values" ["True", "False"]
input.format . should_equal expected
input.format "" . should_equal expected
input.format Nothing . should_equal expected
Test.specify "Bad format" <|
group_builder.specify "Bad format" <|
input = Column.from_vector "values" [True, False]
input.format "x|y|z" . should_fail_with Illegal_Argument
Test.group "Boolean Column.format, with format Column" <|
Test.specify "Time_Of_Day column" <|
suite_builder.group "Boolean Column.format, with format Column" group_builder->
group_builder.specify "Time_Of_Day column" <|
input = Column.from_vector "values" [True, False, True, False]
formats = Column.from_vector "formats" ["True|False", "True|False", "troo|valz", "troo|valz"]
expected = Column.from_vector "values" ["True", "False", "troo", "valz"]
actual = input.format formats
actual . should_equal expected
Test.specify "Empty/Nothing format, with format Column" <|
group_builder.specify "Empty/Nothing format, with format Column" <|
input = Column.from_vector "values" [True, False]
formats = Column.from_vector "formats" ["", Nothing]
expected = Column.from_vector "values" ["True", "False"]
input.format formats . should_equal expected
Test.specify "Bad format" <|
group_builder.specify "Bad format" <|
input = Column.from_vector "values" [True, False]
formats = Column.from_vector "formats" ["True|False", "xyzzy"]
input.format formats . should_fail_with Illegal_Argument
spec_with_numeric_type "Integer" (Value_Type.Integer Bits.Bits_64)
spec_with_numeric_type "Float" (Value_Type.Float Bits.Bits_64)
spec_with_numeric_type suite_builder "Integer" (Value_Type.Integer Bits.Bits_64)
spec_with_numeric_type suite_builder "Float" (Value_Type.Float Bits.Bits_64)
Test.group "Integer" <|
Test.specify "Integer Column (constructing the column directly from Integers)" <|
suite_builder.group "Integer" group_builder->
group_builder.specify "Integer Column (constructing the column directly from Integers)" <|
input = Column.from_vector "values" [100000000, 2222, 3]
expected = Column.from_vector "values" ["100,000,000.00", "2,222.00", "3.00"]
input.format "#,##0.00" . should_equal expected
Test.group "Numeric, empty/Nothing" <|
Test.specify "Integer" <|
suite_builder.group "Numeric, empty/Nothing" group_builder->
group_builder.specify "Integer" <|
input = Column.from_vector "values" ["100000000", "2222", "3"] . parse (Value_Type.Integer Bits.Bits_64)
expected = Column.from_vector "values" ["100000000", "2222", "3"]
input.format . should_equal expected
input.format "" . should_equal expected
input.format Nothing . should_equal expected
Test.specify "Float" <|
group_builder.specify "Float" <|
input = Column.from_vector "values" ["100000000", "2222", "3"] . parse (Value_Type.Float Bits.Bits_64)
expected = Column.from_vector "values" ['1.0E8', '2222.0', '3.0']
input.format . should_equal expected
input.format "" . should_equal expected
input.format Nothing . should_equal expected
Test.specify "Integer, with format Column" <|
group_builder.specify "Integer, with format Column" <|
input = Column.from_vector "values" ["100000000", "2222", "3"] . parse (Value_Type.Integer Bits.Bits_64)
formats = Column.from_vector "formats" ["", Nothing, Nothing]
expected = Column.from_vector "values" ["100000000", "2222", "3"]
input.format formats . should_equal expected
Test.specify "Float, with format Column" <|
group_builder.specify "Float, with format Column" <|
input = Column.from_vector "values" ["100000000", "2222", "3"] . parse (Value_Type.Float Bits.Bits_64)
formats = Column.from_vector "formats" ["", Nothing, Nothing]
expected = Column.from_vector "values" ['1.0E8', '2222.0', '3.0']
input.format formats . should_equal expected
Test.group "Errors" <|
Test.specify "Unsupported column type" <|
suite_builder.group "Errors" group_builder->
group_builder.specify "Unsupported column type" <|
input = Column.from_vector "values" ["100000000", "hey", "3"]
input.format "xyz" . should_fail_with Illegal_Argument
Test.specify "Format is not text" <|
group_builder.specify "Format is not text" <|
input = Column.from_vector "values" [Date.new 2020 12 21, Date.new 2023 4 25]
Test.expect_panic_with (input.format 73) Type_Error
Test.group "Edge cases" <|
Test.specify "empty table is ok" <|
suite_builder.group "Edge cases" group_builder->
group_builder.specify "empty table is ok" <|
input = Column.from_vector "values" [Date.new 2020 12 21, Date.new 2023 4 25] . take 0
expected = Column.from_vector "values" []
actual = input.format "yyyyMMdd"
actual . should_equal expected
spec_with_numeric_type name numeric_type =
Test.group name <|
Test.specify "Column" <|
spec_with_numeric_type suite_builder name numeric_type =
suite_builder.group name group_builder->
group_builder.specify "Column" <|
input = Column.from_vector "values" ["100000000", "2222", "3"] . parse numeric_type
expected = Column.from_vector "values" ["100,000,000.00", "2,222.00", "3.00"]
input.format "#,##0.00" . should_equal expected
Test.specify "Column with locale" <|
group_builder.specify "Column with locale" <|
input = Column.from_vector "values" ["100000000", "2222", "3"] . parse numeric_type
expected = Column.from_vector "values" ["100000000,00", "2222,00", "3,00"]
input.format "#,##0.00" locale=(Locale.new "fr") . should_equal expected
Test.specify "Bad format" <|
group_builder.specify "Bad format" <|
input = Column.from_vector "values" ["100000000", "2222", "3"] . parse numeric_type
input.format "#.##0,00" . should_fail_with Illegal_Argument
Test.group name+", with format Column" <|
Test.specify "Column" <|
suite_builder.group name+", with format Column" group_builder->
group_builder.specify "Column" <|
input = Column.from_vector "values" ["100000000", "2222", "3"] . parse numeric_type
formats = Column.from_vector "formats" ["#,##0.00", "0.00", "0"]
expected = Column.from_vector "values" ["100,000,000.00", "2222.00", "3"]
input.format formats . should_equal expected
Test.specify "Column with locale" <|
group_builder.specify "Column with locale" <|
input = Column.from_vector "values" ["100000000", "2222", "3"] . parse numeric_type
formats = Column.from_vector "formats" ["#,##0.00", "0.00", "0"]
expected = Column.from_vector "values" ["100000000,00", "2222,00", "3"]
input.format formats locale=(Locale.new "fr") . should_equal expected
Test.specify "Bad format" <|
group_builder.specify "Bad format" <|
input = Column.from_vector "values" ["100000000", "2222", "3"] . parse numeric_type
formats = Column.from_vector "formats" ["#,##0.00", "#.##0,00", "0"]
input.format formats . should_fail_with Illegal_Argument
main = Test_Suite.run_main spec
main =
suite = Test.build suite_builder->
add_specs suite_builder
suite.run_with_filter

View File

@ -12,23 +12,27 @@ from Standard.Table import Column, Value_Type, Auto
import Standard.Table.Data.Type.Value_Type.Bits
from Standard.Table.Errors import Invalid_Value_Type, Invalid_Column_Names
from Standard.Test import Test, Test_Suite, Problems
from Standard.Test_New import all
main = Test_Suite.run_main spec
main =
suite = Test.build suite_builder->
add_specs suite_builder
suite.run_with_filter
spec =
Test.group "Columns" <|
add_specs suite_builder =
suite_builder.group "Columns" group_builder->
test_column = Column.from_vector "Test" [1, 3, 5, 2, 4, 6]
empty_column = Column.from_vector "Test" []
Test.specify "should allow getting specific elements" <|
group_builder.specify "should allow getting specific elements" <|
test_column.at 0 . should_equal 1
test_column.at 2 . should_equal 5
test_column.at 5 . should_equal 6
test_column.at 6 . should_fail_with Index_Out_Of_Bounds
empty_column.at 0 . should_fail_with Index_Out_Of_Bounds
Test.specify "should be able to take the first n elements" <|
group_builder.specify "should be able to take the first n elements" <|
expected_1 = Column.from_vector "Test" [1, 3, 5]
expected_2 = Column.from_vector "Test" [1, 3, 5, 2, 4, 6]
expected_3 = Column.from_vector "Test" []
@ -36,7 +40,7 @@ spec =
test_column.take (First 7) . to_vector . should_equal expected_2.to_vector
test_column.take (First 0) . to_vector . should_equal expected_3.to_vector
Test.specify "should be able to take the first n elements by Integer" <|
group_builder.specify "should be able to take the first n elements by Integer" <|
expected_1 = Column.from_vector "Test" [1, 3, 5]
expected_2 = Column.from_vector "Test" [1, 3, 5, 2, 4, 6]
expected_3 = Column.from_vector "Test" []
@ -44,7 +48,7 @@ spec =
test_column.take 7 . to_vector . should_equal expected_2.to_vector
test_column.take 0 . to_vector . should_equal expected_3.to_vector
Test.specify "should be able to take the last n elements" <|
group_builder.specify "should be able to take the last n elements" <|
expected_1 = Column.from_vector "Test" [2, 4, 6]
expected_2 = Column.from_vector "Test" [1, 3, 5, 2, 4, 6]
expected_3 = Column.from_vector "Test" []
@ -52,27 +56,27 @@ spec =
test_column.take (Last 7) . to_vector . should_equal expected_2.to_vector
test_column.take (Last 0) . to_vector . should_equal expected_3.to_vector
Test.specify "should be able to get the first element" <|
group_builder.specify "should be able to get the first element" <|
test_column.first . should_equal 1
empty_column.first.should_fail_with Index_Out_Of_Bounds
Test.specify "should be able to get the last element" <|
group_builder.specify "should be able to get the last element" <|
test_column.last . should_equal 6
empty_column.last.should_fail_with Index_Out_Of_Bounds
Test.specify "should be able to be reversed" <|
group_builder.specify "should be able to be reversed" <|
expected_1 = Column.from_vector "Test" [6, 4, 2, 5, 3, 1]
test_column.reverse.to_vector . should_equal expected_1.to_vector
empty_column.reverse.to_vector . should_equal empty_column.to_vector
Test.specify "should allow to count duplicate value occurrences" <|
group_builder.specify "should allow to count duplicate value occurrences" <|
c_1 = Column.from_vector "c 1" [0, 1, 2, 2, 1, 0, 2]
c_1.duplicate_count.to_vector.should_equal [0, 0, 0, 1, 1, 1, 2]
c_2 = Column.from_vector "c 2" ["foo", "bar", "foo", "baz", "bar"]
c_2.duplicate_count.to_vector.should_equal [0, 0, 1, 0, 1]
Test.specify "should result in correct Storage if operation allows it" <|
group_builder.specify "should result in correct Storage if operation allows it" <|
another = Column.from_vector "Test" [10, 20, 30, 40, 50, 60]
(test_column + 1).value_type . should_equal Value_Type.Integer
(test_column - 1).value_type . should_equal Value_Type.Integer
@ -80,14 +84,14 @@ spec =
(test_column * 1.5).value_type . should_equal Value_Type.Float
(test_column + another).value_type . should_equal Value_Type.Integer
Test.specify "should forward dataflow error if constructed from one" <|
group_builder.specify "should forward dataflow error if constructed from one" <|
foo x =
if x == 1 then Error.throw "X" else x
col = Column.from_vector "Test" [foo 0, foo 1, foo 2]
col . should_fail_with Text
col.catch . should_equal "X"
Test.specify "should not allow invalid column names" <|
group_builder.specify "should not allow invalid column names" <|
c1 = Column.from_vector "" [1, 2, 3]
c1.should_fail_with Invalid_Column_Names
@ -99,14 +103,14 @@ spec =
c4 = Column.from_vector 'foo\0bar' [1, 2, 3]
c4.should_fail_with Invalid_Column_Names
Test.specify "will coerce integers to decimals by default, to get a numeric column" <|
group_builder.specify "will coerce integers to decimals by default, to get a numeric column" <|
c1 = Column.from_vector "X" [1, 2.0]
c1.value_type . should_equal Value_Type.Float
c1.at 0 . should_be_a Float
c1.at 1 . should_be_a Float
c1.at 0 . is_a Integer . should_be_false
Test.specify "will preserve the types if the column is Mixed, regardless of ordering" <|
group_builder.specify "will preserve the types if the column is Mixed, regardless of ordering" <|
run_test vector =
Test.with_clue vector.pretty+": " <|
c = Column.from_vector "X" vector
@ -149,7 +153,7 @@ spec =
run_test (big_test_vector medium big)
run_test (big_test_vector 123 456)
Test.specify "should allow to set a specific type at construction" <|
group_builder.specify "should allow to set a specific type at construction" <|
c1 = Column.from_vector "X" [1, 2] Value_Type.Float
c1.value_type . should_equal Value_Type.Float
c1.at 0 . should_be_a Float
@ -199,7 +203,7 @@ spec =
c9.value_type . should_equal Value_Type.Time
c9.to_vector . should_equal [Time_Of_Day.new 10 11 12, Time_Of_Day.new 11 30]
Test.specify "will fail if unexpected values are encountered for the requested type" <|
group_builder.specify "will fail if unexpected values are encountered for the requested type" <|
r1 = Column.from_vector "X" ["a", 2] Value_Type.Char
r1.should_fail_with Invalid_Value_Type
r1.catch.to_display_text.should_contain "Expected type Char (variable length, max_size=unlimited), but got a value 2 of type Integer (16 bits)"
@ -239,15 +243,15 @@ spec =
r9.should_fail_with Invalid_Value_Type
r9.catch.to_display_text.should_contain "Expected type Integer (64 bits), but got a value 1.5 of type Float"
Test.specify "will not allow to construct a column with Char size=0" <|
group_builder.specify "will not allow to construct a column with Char size=0" <|
r1 = Column.from_vector "X" [] (Value_Type.Char size=0 variable_length=False)
r1.should_fail_with Illegal_Argument
r2 = Column.from_vector "X" [] (Value_Type.Char size=0 variable_length=True)
r2.should_fail_with Illegal_Argument
Test.group "Rounding" <|
Test.specify "should be able to round a column of decimals" <|
suite_builder.group "Rounding" group_builder->
group_builder.specify "should be able to round a column of decimals" <|
Column.from_vector "foo" [1.2, 2.3, 2.5, 3.6] . round . should_equal (Column.from_vector "round([foo])" [1, 2, 3, 4])
Column.from_vector "foo" [1.25, 2.33, 3.57] . round 1 . should_equal <| Column.from_vector "round([foo])" [1.3, 2.3, 3.6]
Column.from_vector "foo" [12.0, 24.0, 25.0, 29.0] . round -1 . should_equal <| Column.from_vector "round([foo])" [10, 20, 30, 30]
@ -257,98 +261,98 @@ spec =
Column.from_vector "foo" [-1.25, -2.33, -2.45, -3.57] . round 1 . should_equal <| Column.from_vector "round([foo])" [-1.3, -2.3, -2.5, -3.6]
Column.from_vector "foo" [-12.0, -24.0, -25.0, -29.0] . round -1 . should_equal <| Column.from_vector "round([foo])" [-10, -20, -30, -30]
Test.specify "decimal rounding should return the correct column type" <|
group_builder.specify "decimal rounding should return the correct column type" <|
col = Column.from_vector "foo" [1.21, 2.34, 3.68]
col . round -1 . value_type . should_equal Value_Type.Integer
col . round . value_type . should_equal Value_Type.Integer
col . round 1 . value_type . should_equal Value_Type.Float
Test.specify "should be able to round a column of integers" <|
group_builder.specify "should be able to round a column of integers" <|
Column.from_vector "foo" [12, 24, 25, 29] . round . should_equal <| Column.from_vector "round([foo])" [12, 24, 25, 29]
Column.from_vector "foo" [12, 24, 25, 29] . round -1 . should_equal <| Column.from_vector "round([foo])" [10, 20, 30, 30]
Column.from_vector "foo" [15, 25, 35] . round -1 use_bankers=True . should_equal <| Column.from_vector "round([foo])" [20, 20, 40]
Test.specify "integer rounding should return the correct column type" <|
group_builder.specify "integer rounding should return the correct column type" <|
col = Column.from_vector "foo" [12, 24, 25, 29]
col . round 1 . value_type . should_equal Value_Type.Integer
col . round 0 . value_type . should_equal Value_Type.Integer
col . round -1 . value_type . should_equal Value_Type.Integer
Test.specify "rounding should not attach a warning by default" <|
group_builder.specify "rounding should not attach a warning by default" <|
Problems.assume_no_problems <| Column.from_vector "foo" [12, 24, 25, 29] . round 1
Test.specify "should report out-of-range values as warnings" <|
group_builder.specify "should report out-of-range values as warnings" <|
col = Column.from_vector "foo" [12, 23, 99999999999999999]
expected = Column.from_vector "round([foo])" [10, 20, Nothing]
actual = col.round -1
actual . should_equal expected
Warning.get_all actual . map .value . should_equal [Illegal_Argument.Error "Error: `round` can only accept values between -99999999999999 and 99999999999999 (inclusive), but was 99999999999999999 (at rows [2])."]
Test.specify "should throw an error on decimal places out of range" <|
group_builder.specify "should throw an error on decimal places out of range" <|
col = Column.from_vector "foo" [12, 23, 99999999999999999]
col.round decimal_places=-1200 . should_fail_with Illegal_Argument
Test.specify "should handle type errors" <|
group_builder.specify "should handle type errors" <|
col = Column.from_vector "foo" [12, 23, 45]
Test.expect_panic_with (col.round use_bankers="string") Type_Error
Test.expect_panic_with (col.round decimal_places="string") Type_Error
Test.group "truncate" <|
Test.specify "should be able to truncate a column of floats" <|
suite_builder.group "truncate" group_builder->
group_builder.specify "should be able to truncate a column of floats" <|
Column.from_vector "foo" [1.25, 2.33, 3.57] . truncate . should_equal <| Column.from_vector "truncate([foo])" [1, 2, 3]
Column.from_vector "foo" [1.25, 2.33, 3.57] . truncate . value_type . should_equal Value_Type.Integer
Test.specify "should also work on ints" <|
group_builder.specify "should also work on ints" <|
Column.from_vector "foo" [1, 2, 3] . truncate . should_equal <| Column.from_vector "truncate([foo])" [1, 2, 3]
Column.from_vector "foo" [1, 2, 3] . truncate . value_type . should_equal Value_Type.Integer
Test.specify "Should error on input of the wrong type" <|
group_builder.specify "Should error on input of the wrong type" <|
Column.from_vector "foo" ["asdf", "zxcv", "qwer"] . truncate . should_fail_with Invalid_Value_Type
Test.group "ceil" <|
Test.specify "should be able to take the ceil of a column of floats" <|
suite_builder.group "ceil" group_builder->
group_builder.specify "should be able to take the ceil of a column of floats" <|
Column.from_vector "foo" [1.25, 2.33, 3.57] . ceil . should_equal <| Column.from_vector "ceil([foo])" [2, 3, 4]
Column.from_vector "foo" [1.25, 2.33, 3.57] . ceil . value_type . should_equal Value_Type.Integer
Test.specify "should also work on ints" <|
group_builder.specify "should also work on ints" <|
Column.from_vector "foo" [1, 2, 3] . ceil . should_equal <| Column.from_vector "ceil([foo])" [1, 2, 3]
Column.from_vector "foo" [1, 2, 3] . ceil . value_type . should_equal Value_Type.Integer
Test.specify "Should error on input of the wrong type" <|
group_builder.specify "Should error on input of the wrong type" <|
Column.from_vector "foo" ["asdf", "zxcv", "qwer"] . ceil . should_fail_with Invalid_Value_Type
Test.group "floor" <|
Test.specify "should be able to take the floor of a column of floats" <|
suite_builder.group "floor" group_builder->
group_builder.specify "should be able to take the floor of a column of floats" <|
Column.from_vector "foo" [1.25, 2.33, 3.57] . floor . should_equal <| Column.from_vector "floor([foo])" [1, 2, 3]
Column.from_vector "foo" [1.25, 2.33, 3.57] . floor . value_type . should_equal Value_Type.Integer
Test.specify "should also work on ints" <|
group_builder.specify "should also work on ints" <|
Column.from_vector "foo" [1, 2, 3] . floor . should_equal <| Column.from_vector "floor([foo])" [1, 2, 3]
Column.from_vector "foo" [1, 2, 3] . floor . value_type . should_equal Value_Type.Integer
Test.specify "Should error on input of the wrong type" <|
group_builder.specify "Should error on input of the wrong type" <|
Column.from_vector "foo" ["asdf", "zxcv", "qwer"] . floor . should_fail_with Invalid_Value_Type
Test.group "round/truncate/ceil/floor" <|
suite_builder.group "round/truncate/ceil/floor" group_builder->
do_op n op =
col = Column.from_vector "x" [n]
result = op col
result.to_vector.at 0
do_round n dp=0 use_bankers=False = do_op n (_.round dp use_bankers)
Test.specify "round returns the correct type" <|
group_builder.specify "round returns the correct type" <|
do_round 231.2 1 . should_be_a Float
do_round 231.2 0 . should_be_a Integer
do_round 231.2 . should_be_a Integer
do_round 231.2 -1 . should_be_a Integer
Test.specify "round returns the correct type" <|
group_builder.specify "round returns the correct type" <|
do_round 231 1 . should_be_a Integer
do_round 231 0 . should_be_a Integer
do_round 231 . should_be_a Integer
do_round 231 -1 . should_be_a Integer
Test.specify "nan/inf" <|
group_builder.specify "nan/inf" <|
ops = [.truncate, .ceil, .floor, .round]
ops.map op->
col = Column.from_vector "x" [2.1, 0.0, Number.nan, Number.positive_infinity, Number.negative_infinity, Nothing, 12.1]
@ -359,8 +363,8 @@ spec =
warnings . should_contain <| Arithmetic_Error.Error 'Value is Infinity (at rows [3]).'
warnings . should_contain <| Arithmetic_Error.Error 'Value is NaN (at rows [2]).'
Test.group "Date_Time truncate" <|
Test.specify "should be able to truncate a column of Date_Times" <|
suite_builder.group "Date_Time truncate" group_builder->
group_builder.specify "should be able to truncate a column of Date_Times" <|
c = Column.from_vector "foo" [Date_Time.new 2020 10 24 1 2 3, Date_Time.new 2020 10 24 1 2 3]
truncated = c.truncate
truncated . should_equal <| Column.from_vector "truncate([foo])" [Date.new 2020 10 24, Date.new 2020 10 24]

View File

@ -2,22 +2,38 @@ from Standard.Base import all
from Standard.Table import Table
from Standard.Test import Test_Suite
from Standard.Test_New import Test
import project.Common_Table_Operations
run_common_spec spec =
type Dummy_Connection
close = Nothing
add_specs suite_builder =
selection = Common_Table_Operations.Main.Test_Selection.Config supports_case_sensitive_columns=True order_by=True natural_ordering=True case_insensitive_ordering=True order_by_unicode_normalization_by_default=True supports_unicode_normalization=True supports_time_duration=True supports_nanoseconds_in_time=True supports_mixed_columns=True fixed_length_text_columns=True supports_8bit_integer=True
aggregate_selection = Common_Table_Operations.Aggregate_Spec.Test_Selection.Config
agg_table_fn = _ ->
(enso_project.data / "data.csv") . read
empty_table_fn = _ ->
table = (enso_project.data / "data.csv") . read
empty_table = table.take 0
table.take 0
materialize = x->x
setup = Common_Table_Operations.Main.Test_Setup.Config "[In-Memory] " table empty_table Table.new materialize is_database=False test_selection=selection aggregate_test_selection=aggregate_selection connection=Nothing
spec setup
table_builder columns connection=Nothing =
_ = connection
Table.new columns
spec =
run_common_spec Common_Table_Operations.Main.spec
create_connection_func = _-> Dummy_Connection
main = Test_Suite.run_main spec
setup = Common_Table_Operations.Main.Test_Setup.Config "[In-Memory] " agg_table_fn empty_table_fn table_builder materialize is_database=False test_selection=selection aggregate_test_selection=aggregate_selection create_connection_func=create_connection_func
Common_Table_Operations.Main.add_specs suite_builder setup
main =
suite = Test.build suite_builder->
add_specs suite_builder
suite.run_with_filter

View File

@ -5,12 +5,11 @@ from project.Util import all
import Standard.Table.Internal.Fan_Out
from Standard.Table import Table
import Standard.Test.Extensions
from Standard.Test import Test, Test_Suite, Problems
from Standard.Test_New import all
spec =
Test.group "Fan_Out" <|
Test.specify "can do fan_out_to_columns " <|
add_specs suite_builder =
suite_builder.group "Fan_Out" group_builder->
group_builder.specify "can do fan_out_to_columns " <|
cols = [["foo", [0, 1, 2]], ["bar", ["abc", "cbdbef", "ghbijbu"]]]
t = Table.new cols
expected_rows = [[0, "a", "c", Nothing], [1, "c", "d", "ef"], [2, "gh", "ij", "u"]]
@ -18,7 +17,7 @@ spec =
t2 = Fan_Out.fan_out_to_columns t "bar" (_.split "b")
t2.should_equal expected
Test.specify "can do fan_out_to_rows" <|
group_builder.specify "can do fan_out_to_rows" <|
cols = [["foo", [0, 1, 2]], ["bar", ["abc", "cbdbef", "ghbijbu"]]]
t = Table.new cols
expected_rows = [[0, "a"], [0, "c"], [1, "c"], [1, "d"], [1, "ef"], [2, "gh"], [2, "ij"], [2, "u"]]
@ -26,4 +25,8 @@ spec =
t2 = Fan_Out.fan_out_to_rows t "bar" (_.split "b")
t2.should_equal expected
main = Test_Suite.run_main spec
main =
suite = Test.build suite_builder->
add_specs suite_builder
suite.run_with_filter

View File

@ -4,8 +4,7 @@ import Standard.Table.Data.Type.Value_Type.Bits
from Standard.Table import all
from Standard.Table.Errors import Arithmetic_Overflow, Conversion_Failure, Invalid_Value_Type, No_Common_Type, Loss_Of_Integer_Precision
from Standard.Test import Test, Test_Suite, Problems
import Standard.Test.Extensions
from Standard.Test_New import all
from project.Util import all
@ -16,11 +15,15 @@ polyglot java import java.lang.Long as Java_Long
polyglot java import org.enso.table_test_helpers.PolyglotHelpers
main = Test_Suite.run_main spec
main =
suite = Test.build suite_builder->
add_specs suite_builder
suite.run_with_filter
spec =
Test.group "[In-Memory] Column operation Integer Overflow handling" <|
Test.specify "64-bit integer column overflow" <|
add_specs suite_builder =
suite_builder.group "[In-Memory] Column operation Integer Overflow handling" group_builder->
group_builder.specify "64-bit integer column overflow" <|
min_value = Java_Long.MIN_VALUE
max_value = Java_Long.MAX_VALUE
value_type = Value_Type.Integer Bits.Bits_64
@ -90,7 +93,7 @@ spec =
c10.value_type . should_equal value_type
Problems.expect_only_warning Arithmetic_Overflow c10
test_no_overflow value_type max_value min_value = Test.specify "operations on "+value_type.to_display_text+" will not overflow, because the result type is always 64-bit integer column" <|
test_no_overflow value_type max_value min_value = group_builder.specify "operations on "+value_type.to_display_text+" will not overflow, because the result type is always 64-bit integer column" <|
t = Table.new [["X", [0, 1, max_value, 0]], ["Y", [0, -1, min_value, 0]], ["U", [1, 1, 1, 1]]]
x = t.at "X" . cast value_type
y = t.at "Y" . cast value_type
@ -140,7 +143,7 @@ spec =
test_no_overflow (Value_Type.Integer Bits.Bits_16) Java_Short.MAX_VALUE Java_Short.MIN_VALUE
test_no_overflow (Value_Type.Integer Bits.Bits_32) Java_Integer.MAX_VALUE Java_Integer.MIN_VALUE
Test.specify "if we cast to Decimal first, then the operations will not overflow" <|
group_builder.specify "if we cast to Decimal first, then the operations will not overflow" <|
t0 = Table.new [["X", [0, 1, Java_Long.MAX_VALUE, 0]], ["U", [1, 1, 1, 1]]]
t1 = t0.cast "X" (Value_Type.Decimal scale=0)
x = t1.at "X"
@ -162,7 +165,7 @@ spec =
Problems.assume_no_problems c4
c4.to_vector . should_equal [0, 1, Java_Long.MAX_VALUE*Java_Long.MAX_VALUE, 0]
Test.specify "mixed operations" <|
group_builder.specify "mixed operations" <|
t = Table.new [["X", [Java_Short.MAX_VALUE]], ["Y", [1]]]
x = t.at "X" . cast (Value_Type.Integer Bits.Bits_16)
y = t.at "Y" . cast Value_Type.Byte
@ -195,8 +198,8 @@ spec =
(x%2).value_type . should_equal (Value_Type.Integer Bits.Bits_64)
Test.group "[In-Memory] Handling of Big Integer values" <|
Test.specify "will create a BigInteger column if some values do not fit in long" <|
suite_builder.group "[In-Memory] Handling of Big Integer values" group_builder->
group_builder.specify "will create a BigInteger column if some values do not fit in long" <|
c0 = Column.from_vector "X" [Java_Long.MAX_VALUE, 0, 1]
Problems.assume_no_problems c0
c0.value_type . should_equal (Value_Type.Integer Bits.Bits_64)
@ -220,12 +223,12 @@ spec =
Problems.assume_no_problems t1
t1.at "X" . value_type . should_be_a (Value_Type.Decimal ...)
Test.specify "should fail if a big integer is provided for an Integer 64-bit column" <|
group_builder.specify "should fail if a big integer is provided for an Integer 64-bit column" <|
c1 = Column.from_vector "X" [Java_Long.MAX_VALUE, 2^70, 100] value_type=Value_Type.Integer
c1.should_fail_with Invalid_Value_Type
c1.catch.to_display_text . should_contain "Decimal"
Test.specify "allows to construct a column from big integers coming from Java" <|
group_builder.specify "allows to construct a column from big integers coming from Java" <|
big_integer_but_small = PolyglotHelpers.createSmallBigIntegerComingFromJava
t1 = Table.new [["X", [big_integer_but_small]]]
t1.at "X" . value_type . should_equal (Value_Type.Integer Bits.Bits_64)
@ -239,12 +242,12 @@ spec =
v2.at 0 . should_be_a Integer
v2.at 0 . to_text . should_equal big_big_integer.to_text
Test.specify "will create a Mixed column if other types are present" <|
group_builder.specify "will create a Mixed column if other types are present" <|
c1 = Column.from_vector "X" [Java_Long.MAX_VALUE, 2^70, "abc"]
Problems.assume_no_problems c1
c1.value_type . should_equal Value_Type.Mixed
Test.specify "should allow to create a Float column from a big integer, but warn about Loss_Of_Integer_Precision if relevant" <|
group_builder.specify "should allow to create a Float column from a big integer, but warn about Loss_Of_Integer_Precision if relevant" <|
# 2^70 is not exactly representable as a Float.
(2^70 + 0.0).truncate . should_not_equal (2^70)
@ -268,7 +271,7 @@ spec =
c4.to_vector . should_equal [2^70, 1]
Problems.expect_only_warning Loss_Of_Integer_Precision c4
Test.specify "should use Decimal type if a mapping operation yields a numeric column with big integers" <|
group_builder.specify "should use Decimal type if a mapping operation yields a numeric column with big integers" <|
c = Column.from_vector "X" [1, 2, 3]
f1 x = if x == 2 then 2^70 else x
@ -287,7 +290,7 @@ spec =
Problems.assume_no_problems c2
c2.value_type . should_equal Value_Type.Mixed
Test.specify "allows arithmetic on Decimal columns" <|
group_builder.specify "allows arithmetic on Decimal columns" <|
t = Table.new [["X", [10^30, 2^70, Nothing, 3]], ["Y", [10^20, 2, 3, 4]]]
x = t.at "X"
y = t.at "Y"
@ -324,7 +327,7 @@ spec =
x.is_infinite . to_vector . should_equal [False, False, Nothing, False]
x.is_in [3, 2^70] . to_vector . should_equal [False, True, Nothing, True]
Test.specify "allows arithmetic on Decimal columns and other numeric columns" <|
group_builder.specify "allows arithmetic on Decimal columns and other numeric columns" <|
t = Table.new [["X", [10^30, 2^70, Nothing, 3]], ["Y", [1, 2, 3, 4]], ["Z", [1.5, 2.5, 3.5, 4.5]]]
x = t.at "X"
y = t.at "Y"
@ -391,7 +394,7 @@ spec =
r4.value_type . should_be_a (Value_Type.Decimal ...)
r4.to_vector . should_equal [10^30, 2^70, 23, 3]
Test.specify "returns a Decimal column if the scalar argument is a big integer" <|
group_builder.specify "returns a Decimal column if the scalar argument is a big integer" <|
c = Column.from_vector "X" [1, 2, Nothing, 3]
c.value_type.should_equal Value_Type.Integer
x = 2^70
@ -430,7 +433,7 @@ spec =
r7.value_type . should_be_a (Value_Type.Decimal ...)
r7.to_vector . should_equal [1, 2, x, 3]
Test.specify "should work fine with typing edge cases" <|
group_builder.specify "should work fine with typing edge cases" <|
c1 = Column.from_vector "X" [2^70, 100, Nothing, 200]
c1.value_type . should_be_a (Value_Type.Decimal ...)

View File

@ -3,21 +3,24 @@ from Standard.Base import all
from Standard.Table import all
from Standard.Table.Errors import Loss_Of_Integer_Precision
from Standard.Test import Test, Test_Suite, Problems
import Standard.Test.Extensions
from Standard.Test_New import all
from project.Util import all
main = Test_Suite.run_main spec
main =
suite = Test.build suite_builder->
add_specs suite_builder
suite.run_with_filter
spec =
add_specs suite_builder =
table_builder = Table.new
Test.group "[In-Memory] Loss_Of_Integer_Precision (large integers not being exactly representable in Float)" <|
suite_builder.group "[In-Memory] Loss_Of_Integer_Precision (large integers not being exactly representable in Float)" group_builder->
# The power of 2 is still represented exactly, but x+1 is not anymore.
x = 2^60
y = x+1
Test.specify "should be reported in `cast`" <|
group_builder.specify "should be reported in `cast`" <|
t1 = table_builder [["X", [y]]]
c1 = t1.at "X" . cast Value_Type.Float
c1.value_type . should_equal Value_Type.Float
@ -49,7 +52,7 @@ spec =
w2.to_display_text . should_contain "too large to represent exactly"
w2.affected_rows_count . should_equal 4
Test.specify "should be reported in Column.from_vector and Table.new" <|
group_builder.specify "should be reported in Column.from_vector and Table.new" <|
c1 = Column.from_vector "X" [1, 2, x, x+1, x+2, 100]
# No problems if integers only.
Problems.assume_no_problems c1
@ -78,7 +81,7 @@ spec =
Problems.assume_no_problems c6
c6.to_vector.map .to_text . should_equal ([x+1, 1.0, x+2, "a", x+3].map .to_text)
Test.specify "should not be reported when numeric operation mixes Float and Integer" <|
group_builder.specify "should not be reported when numeric operation mixes Float and Integer" <|
t = table_builder [["A", [y]], ["B", [1.0]]]
a = t.at "A"
b = t.at "B"
@ -97,7 +100,7 @@ spec =
Problems.assume_no_problems (a - b)
Problems.assume_no_problems (b / a)
Test.specify "should be reported if mixing column types in `iif` causes an Integer column to be casted" <|
group_builder.specify "should be reported if mixing column types in `iif` causes an Integer column to be casted" <|
t = table_builder [["A", [x+1, x+2]], ["B", [1.5, 2.5]], ["C", [True, False]]]
r = (t.at "C").iif (t.at "A") (t.at "B")
r.value_type . should_equal Value_Type.Float
@ -107,7 +110,7 @@ spec =
w.affected_rows_count . should_equal 1
w.example_value.to_text . should_equal (x+1).to_text
Test.specify "should be reported when reading a CSV file" <|
group_builder.specify "should be reported when reading a CSV file" <|
t = (enso_project.data / "lossy_int.csv") . read
t.column_names . should_equal ["X", "Y", "Z"]
t.at "X" . value_type . should_equal Value_Type.Char

View File

@ -1,6 +1,6 @@
from Standard.Base import all
from Standard.Test import Test_Suite
from Standard.Test_New import all
import project.In_Memory.Aggregate_Column_Spec
import project.In_Memory.Builders_Spec
@ -16,19 +16,22 @@ import project.In_Memory.Table_Date_Spec
import project.In_Memory.Table_Date_Time_Spec
import project.In_Memory.Table_Time_Of_Day_Spec
spec =
Table_Spec.spec
Column_Spec.spec
Column_Format_Spec.spec
Common_Spec.spec
Integer_Overflow_Spec.spec
Lossy_Conversions_Spec.spec
Table_Date_Spec.spec
Table_Date_Time_Spec.spec
Table_Time_Of_Day_Spec.spec
Aggregate_Column_Spec.spec
Builders_Spec.spec
Split_Tokenize_Spec.spec
Parse_To_Table_Spec.spec
add_specs suite_builder =
Table_Spec.add_specs suite_builder
Column_Spec.add_specs suite_builder
Column_Format_Spec.add_specs suite_builder
Common_Spec.add_specs suite_builder
Integer_Overflow_Spec.add_specs suite_builder
Lossy_Conversions_Spec.add_specs suite_builder
Table_Date_Spec.add_specs suite_builder
Table_Date_Time_Spec.add_specs suite_builder
Table_Time_Of_Day_Spec.add_specs suite_builder
Aggregate_Column_Spec.add_specs suite_builder
Builders_Spec.add_specs suite_builder
Split_Tokenize_Spec.add_specs suite_builder
Parse_To_Table_Spec.add_specs suite_builder
main = Test_Suite.run_main spec
main =
suite = Test.build suite_builder->
add_specs suite_builder
suite.run_with_filter

View File

@ -9,73 +9,77 @@ from Standard.Table import Table
from Standard.Table.Data.Type.Value_Type import Bits, Value_Type
from Standard.Table.Errors import Invalid_Value_Type, Column_Count_Exceeded, Duplicate_Output_Column_Names, Missing_Input_Columns
from Standard.Table.Extensions.Table_Conversions import all
from Standard.Test import Test, Test_Suite, Problems
from Standard.Test_New import all
from project.Util import all
spec =
Test.group "Text.parse_to_table" <|
Test.specify "text_to_table" <|
add_specs suite_builder =
suite_builder.group "Text.parse_to_table" group_builder->
group_builder.specify "text_to_table" <|
expected = Table.from_rows ["Column"]
[["a"], ["ab12"], ["bt100"], ["c12"], ["d20"], ["q"]]
actual = "a 7 ab12 bt100 c12d20q 12".parse_to_table "[a-z]+\d*"
actual.should_equal expected
Test.specify "text_to_table with a regex" <|
group_builder.specify "text_to_table with a regex" <|
expected = Table.from_rows ["Column"]
[["a"], ["ab12"], ["bt100"], ["c12"], ["d20"], ["q"]]
actual = "a 7 ab12 bt100 c12d20q 12".parse_to_table "[a-z]+\d*".to_regex
actual.should_equal expected
Test.group "Text.parse_to_table with groups" <|
Test.specify "with groups" <|
suite_builder.group "Text.parse_to_table with groups" group_builder->
group_builder.specify "with groups" <|
expected = Table.from_rows ["Column 1", "Column 2"]
[["ab", 12], ["bt", 100], ["c", 12], ["d", 20]]
actual = "a 7 ab-12 bt-100 c-12d-20q q8 12".parse_to_table "([a-z]+)-(\d*)"
actual.should_equal expected
Test.specify "with named groups" <|
group_builder.specify "with named groups" <|
expected = Table.from_rows ["letters", "Column 2"]
[["ab", 12], ["bt", 100], ["c", 12], ["d", 20]]
actual = "a 7 ab-12 bt-100 c-12d-20q q8 12".parse_to_table "(?<letters>[a-z]+)-(\d*)"
actual.should_equal expected
Test.group "Text.parse_to_table with case-insensitivity" <|
Test.specify "case insensitivity" <|
suite_builder.group "Text.parse_to_table with case-insensitivity" group_builder->
group_builder.specify "case insensitivity" <|
expected = Table.from_rows ["Column 1", "Column 2"]
[["a", "B"], ["A", "b"], ["a", "b"], ["A", "B"]]
actual = "xy aB Ab ab AB".parse_to_table "(a)(b)" case_sensitivity=Case_Sensitivity.Insensitive
actual.should_equal expected
Test.group "Text.parse_to_table parsing" <|
Test.specify "parsing on" <|
suite_builder.group "Text.parse_to_table parsing" group_builder->
group_builder.specify "parsing on" <|
expected = Table.from_rows ["Column 1", "Column 2"]
[["ab", 12], ["bt", 100], ["c", 12], ["d", 20]]
actual = "a 7 ab-12 bt-100 c-12d-20q q8 12".parse_to_table "([a-z]+)-(\d*)"
actual.should_equal expected
actual.columns.map .value_type . should_equal [Value_Type.Char Nothing True, Value_Type.Integer Bits.Bits_64]
Test.specify "parsing on, with a mixed column" <|
group_builder.specify "parsing on, with a mixed column" <|
expected = Table.from_rows ["Column 1", "Column 2"]
[["ab", "12"], ["bt", "100"], ["c", "012"], ["d", "20"]]
actual = "a 7 ab-12 bt-100 c-012d-20q q8 12".parse_to_table "([a-z]+)-(\d*)"
actual.should_equal expected
actual.columns.map .value_type . should_equal [Value_Type.Char Nothing True, Value_Type.Char Nothing True]
Test.specify "parsing off" <|
group_builder.specify "parsing off" <|
expected = Table.from_rows ["Column 1", "Column 2"]
[["ab", "12"], ["bt", "100"], ["c", "12"], ["d", "20"]]
actual = "a 7 ab-12 bt-100 c-12d-20q q8 12".parse_to_table "([a-z]+)-(\d*)" parse_values=False
actual.should_equal expected
actual.columns.map .value_type . should_equal [Value_Type.Char Nothing True, Value_Type.Char Nothing True]
Test.group "Text.parse_to_table errors" <|
Test.specify "Regex_Syntax_Error" <|
suite_builder.group "Text.parse_to_table errors" group_builder->
group_builder.specify "Regex_Syntax_Error" <|
"abc".parse_to_table "(a)(?<<" . should_fail_with Regex_Syntax_Error
Test.specify "enpty pattern" <|
group_builder.specify "enpty pattern" <|
"abc".parse_to_table "" . should_fail_with Illegal_Argument
Test.specify "bad arg" <|
group_builder.specify "bad arg" <|
Test.expect_panic_with (actual = "a 7 ab12 bt100 c12d20q 12".parse_to_table 12) Type_Error
main = Test_Suite.run_main spec
main =
suite = Test.build suite_builder->
add_specs suite_builder
suite.run_with_filter

View File

@ -4,12 +4,12 @@ import Standard.Test.Extensions
from Standard.Table import Table
from Standard.Table.Errors import Invalid_Value_Type, Column_Count_Exceeded, Duplicate_Output_Column_Names, No_Such_Column
from Standard.Test import Test, Test_Suite, Problems
from Standard.Test_New import all
from project.Util import all
spec =
Test.group "Table.split" <|
Test.specify "can do split_to_columns" <|
add_specs suite_builder =
suite_builder.group "Table.split" group_builder->
group_builder.specify "can do split_to_columns" <|
cols = [["foo", [0, 1, 2]], ["bar", ["a|c", "c|d|ef", "gh|ij|u"]]]
t = Table.new cols
expected_rows = [[0, "a", "c", Nothing], [1, "c", "d", "ef"], [2, "gh", "ij", "u"]]
@ -17,7 +17,7 @@ spec =
t2 = t.split_to_columns "bar" "|"
t2.should_equal expected
Test.specify "can do split_to_columns where split character, first, last and only character" <|
group_builder.specify "can do split_to_columns where split character, first, last and only character" <|
cols = [["foo", [0, 1, 2]], ["bar", ["|cb", "ab|", "|"]]]
t = Table.new cols
expected_rows = [[0, "", "cb"], [1, "ab", ""], [2, "", ""]]
@ -25,7 +25,7 @@ spec =
t2 = t.split_to_columns "bar" "|"
t2.should_equal expected
Test.specify "can do split_to_columns where split character, first, last and only character and mismatch in number of split characters" <|
group_builder.specify "can do split_to_columns where split character, first, last and only character and mismatch in number of split characters" <|
cols = [["foo", [0, 1, 2]], ["bar", ["|c|", "ab|", "|"]]]
t = Table.new cols
expected_rows = [[0, "", "c", ""], [1, "ab", "", Nothing], [2, "", "", Nothing]]
@ -33,7 +33,7 @@ spec =
t2 = t.split_to_columns "bar" "|"
t2.should_equal expected
Test.specify "can do split_to_rows" <|
group_builder.specify "can do split_to_rows" <|
cols = [["foo", [0, 1, 2]], ["bar", ["a|c", "c|d|ef", "gh|ij|u"]]]
t = Table.new cols
expected_rows = [[0, "a"], [0, "c"], [1, "c"], [1, "d"], [1, "ef"], [2, "gh"], [2, "ij"], [2, "u"]]
@ -41,7 +41,7 @@ spec =
t2 = t.split_to_rows "bar" "|"
t2.should_equal expected
Test.specify "can do split_to_rows where split character, first, last and only character" <|
group_builder.specify "can do split_to_rows where split character, first, last and only character" <|
cols = [["foo", [0, 1, 2]], ["bar", ["|cb", "ab|", "|"]]]
t = Table.new cols
expected_rows = [[0, ""], [0, "cb"], [1, "ab"], [1, ""], [2, ""], [2, ""]]
@ -49,7 +49,7 @@ spec =
t2 = t.split_to_rows "bar" "|"
t2.should_equal expected
Test.specify "can do split_to_columns with some Nothings and Empty Strings" <|
group_builder.specify "can do split_to_columns with some Nothings and Empty Strings" <|
cols = [["foo", [0, 1, 2, 3, 4]], ["bar", ["a|c", "c|d|ef", Nothing, "gh|ij|u", ""]]]
t = Table.new cols
expected_rows = [[0, "a", "c", Nothing], [1, "c", "d", "ef"], [2, Nothing, Nothing, Nothing], [3, "gh", "ij", "u"], [4, "", Nothing, Nothing]]
@ -57,7 +57,7 @@ spec =
t2 = t.split_to_columns "bar" "|"
t2.should_equal expected
Test.specify "can do split_to_rows with some Nothings and Empty Strings" <|
group_builder.specify "can do split_to_rows with some Nothings and Empty Strings" <|
cols = [["foo", [0, 1, 2, 3, 4]], ["bar", ["a|c", "c|d|ef", Nothing, "gh|ij|u", ""]]]
t = Table.new cols
expected_rows = [[0, "a"], [0, "c"], [1, "c"], [1, "d"], [1, "ef"], [2, Nothing], [3, "gh"], [3, "ij"], [3, "u"], [4, ""]]
@ -65,7 +65,7 @@ spec =
t2 = t.split_to_rows "bar" "|"
t2.should_equal expected
Test.specify "can do split_to_columns with one output column, no column suffix added" <|
group_builder.specify "can do split_to_columns with one output column, no column suffix added" <|
cols = [["foo", [0, 1, 2, 3, 4]], ["bar", ["abc", "cbdbef", "ghbijbu", Nothing, ""]]]
t = Table.new cols
expected_rows = [[0, "abc"], [1, "cbdbef"], [2, "ghbijbu"], [3, Nothing], [4, ""]]
@ -73,8 +73,8 @@ spec =
t2 = t.split_to_columns "bar" "|"
t2.should_equal expected
Test.group "Table.tokenize" <|
Test.specify "can do tokenize_to_columns" <|
suite_builder.group "Table.tokenize" group_builder->
group_builder.specify "can do tokenize_to_columns" <|
cols = [["foo", [0, 1, 2]], ["bar", ["a12b34r5", "23", "2r4r55"]]]
t = Table.new cols
expected_rows = [[0, "12", "34", "5"], [1, "23", Nothing, Nothing], [2, "2", "4", "55"]]
@ -82,7 +82,7 @@ spec =
t2 = t.tokenize_to_columns "bar" "\d+"
t2.should_equal expected
Test.specify "can do tokenize_to_rows" <|
group_builder.specify "can do tokenize_to_rows" <|
cols = [["foo", [0, 1, 2]], ["bar", ["a12b34r5", "23", "2r4r55"]]]
t = Table.new cols
expected_rows = [[0, "12"], [0, "34"], [0, "5"], [1, "23"], [2, "2"], [2, "4"], [2, "55"]]
@ -90,7 +90,7 @@ spec =
t2 = t.tokenize_to_rows "bar" "\d+"
t2.should_equal expected
Test.specify "can do tokenize_to_columns with some nothings" <|
group_builder.specify "can do tokenize_to_columns with some nothings" <|
cols = [["foo", [0, 1, 2, 3]], ["bar", ["a12b34r5", Nothing, "23", "2r4r55"]]]
t = Table.new cols
expected_rows = [[0, "12", "34", "5"], [1, Nothing, Nothing, Nothing], [2, "23", Nothing, Nothing], [3, "2", "4", "55"]]
@ -98,7 +98,7 @@ spec =
t2 = t.tokenize_to_columns "bar" "\d+"
t2.should_equal expected
Test.specify "can do tokenize_to_rows with some Nothings" <|
group_builder.specify "can do tokenize_to_rows with some Nothings" <|
cols = [["foo", [0, 1, 2, 3]], ["bar", ["a12b34r5", Nothing, "23", "2r4r55"]]]
t = Table.new cols
expected_rows = [[0, "12"], [0, "34"], [0, "5"], [2, "23"], [3, "2"], [3, "4"], [3, "55"]]
@ -106,7 +106,7 @@ spec =
t2 = t.tokenize_to_rows "bar" "\d+"
t2.should_equal expected
Test.specify "can do tokenize_to_columns with one output column, no column suffix needed" <|
group_builder.specify "can do tokenize_to_columns with one output column, no column suffix needed" <|
cols = [["foo", [0, 1, 2]], ["bar", ["a12b", "23", "2r"]]]
t = Table.new cols
expected_rows = [[0, "12"], [1, "23"], [2, "2"]]
@ -114,7 +114,7 @@ spec =
t2 = t.tokenize_to_columns "bar" "\d+"
t2.should_equal expected
Test.specify "can do tokenize_to_rows with some rows that have no matches" <|
group_builder.specify "can do tokenize_to_rows with some rows that have no matches" <|
cols = [["foo", [0, 1, 2, 3]], ["bar", ["a12b34r5", "23", "q", "2r4r55"]]]
t = Table.new cols
expected_rows = [[0, "12"], [0, "34"], [0, "5"], [1, "23"], [3, "2"], [3, "4"], [3, "55"]]
@ -122,7 +122,7 @@ spec =
t2 = t.tokenize_to_rows "bar" "\d+"
t2.should_equal expected
Test.specify "can do tokenize_to_columns with groups" <|
group_builder.specify "can do tokenize_to_columns with groups" <|
cols = [["foo", [0, 1]], ["bar", ["r a-1, b-12,qd-50", "ab-10:bc-20c"]]]
t = Table.new cols
expected_rows = [[0, "a1", "b12", "d50"], [1, "b10", "c20", Nothing]]
@ -130,7 +130,7 @@ spec =
t2 = t.tokenize_to_columns "bar" "([a-z]).(\d+)"
t2.should_equal expected
Test.specify "can do tokenize_to_rows with groups" <|
group_builder.specify "can do tokenize_to_rows with groups" <|
cols = [["foo", [0, 1]], ["bar", ["r a-1, b-12,qd-50", "ab-10:bc-20c"]]]
t = Table.new cols
expected_rows = [[0, "a1"], [0, "b12"], [0, "d50"], [1, "b10"], [1, "c20"]]
@ -138,7 +138,7 @@ spec =
t2 = t.tokenize_to_rows "bar" "([a-z]).(\d+)"
t2.should_equal expected
Test.specify "can do tokenize_to_columns case-insensitively" <|
group_builder.specify "can do tokenize_to_columns case-insensitively" <|
cols = [["foo", [0, 1, 2]], ["bar", ["aBqcE", "qcBr", "cCb"]]]
t = Table.new cols
expected_rows = [[0, "B", "c", Nothing], [1, "c", "B", Nothing], [2, "c", "C", "b"]]
@ -146,7 +146,7 @@ spec =
t2 = t.tokenize_to_columns "bar" "[bc]" case_sensitivity=Case_Sensitivity.Insensitive
t2.should_equal expected
Test.specify "can do tokenize_to_rows case-insensitively" <|
group_builder.specify "can do tokenize_to_rows case-insensitively" <|
cols = [["foo", [0, 1, 2]], ["bar", ["aBqcE", "qcBr", "cCb"]]]
t = Table.new cols
expected_rows = [[0, "B"], [0, "c"], [1, "c"], [1, "B"], [2, "c"], [2, "C"], [2, "b"]]
@ -154,20 +154,20 @@ spec =
t2 = t.tokenize_to_rows "bar" "[bc]" case_sensitivity=Case_Sensitivity.Insensitive
t2.should_equal expected
Test.specify "can do tokenize_to_rows with at_least_one_row=True" <|
group_builder.specify "can do tokenize_to_rows with at_least_one_row=True" <|
input = Table.from_rows ["foo", "bar"] [[0, "a12b34r5"], [1, "qqq"], [2, "2r4r55"]]
expected = Table.from_rows ["foo", "bar"] [[0, "12"], [0, "34"], [0, "5"], [1, Nothing], [2, "2"], [2, "4"], [2, "55"]]
actual = input.tokenize_to_rows "bar" "\d+" at_least_one_row=True
actual.should_equal expected
Test.specify "can do tokenize_to_rows with at_least_one_row=True, with groups" <|
group_builder.specify "can do tokenize_to_rows with at_least_one_row=True, with groups" <|
input = Table.from_rows ["foo", "bar"] [[0, "a12b34r5"], [1, "qqq"], [2, "2r44r55"], [3, Nothing]]
expected = Table.from_rows ["foo", "bar"] [[0, "12"], [0, "34"], [1, Nothing], [2, "44"], [2, "55"], [3, Nothing]]
actual = input.tokenize_to_rows "bar" "(\d)(\d)" at_least_one_row=True
actual.should_equal expected
Test.group "Table.split/tokenize column count" <|
Test.specify "should generate extra empty columns if column_count is set" <|
suite_builder.group "Table.split/tokenize column count" group_builder->
group_builder.specify "should generate extra empty columns if column_count is set" <|
cols = [["foo", [0, 1, 2]], ["bar", ["abc", "cbdbef", "ghbijbu"]]]
t = Table.new cols
expected_rows = [[0, "a", "c", Nothing, Nothing], [1, "c", "d", "ef", Nothing], [2, "gh", "ij", "u", Nothing]]
@ -176,7 +176,7 @@ spec =
t2.should_equal expected
t2.at "bar 3" . value_type . is_text . should_be_true
Test.specify "split should limit columns and return problems when exceeding the column limit" <|
group_builder.specify "split should limit columns and return problems when exceeding the column limit" <|
cols = [["foo", [0, 1, 2]], ["bar", ["abc", "cbdbef", "ghbijbu"]]]
t = Table.new cols
expected_rows = [[0, "a", "c"], [1, "c", "d"], [2, "gh", "ij"]]
@ -186,7 +186,7 @@ spec =
problems = [Column_Count_Exceeded.Error 2 3]
Problems.test_problem_handling action problems tester
Test.specify "tokenize should limit columns and return problems when exceeding the column limit" <|
group_builder.specify "tokenize should limit columns and return problems when exceeding the column limit" <|
cols = [["foo", [0, 1]], ["bar", ["r a-1, b-12,qd-50", "ab-10:bc-20c"]]]
t = Table.new cols
expected_rows = [[0, "a1", "b12", "d50"], [1, "b10", "c20", Nothing]]
@ -196,7 +196,7 @@ spec =
problems = [Column_Count_Exceeded.Error 2 3]
Problems.test_problem_handling action problems tester
Test.specify "should generate extra empty columns if column_count is set (with rows in a different order)" <|
group_builder.specify "should generate extra empty columns if column_count is set (with rows in a different order)" <|
cols = [["foo", [0, 1, 2]], ["bar", ["ghbijbu", "cbdbef", "abc"]]]
t = Table.new cols
expected_rows = [[0, "gh", "ij", "u", Nothing], [1, "c", "d", "ef", Nothing], [2, "a", "c", Nothing, Nothing]]
@ -205,8 +205,8 @@ spec =
t2.should_equal expected
t2.at "bar 3" . value_type . is_text . should_be_true
Test.group "Table.split/tokenize errors" <|
Test.specify "won't work on a non-text column" <|
suite_builder.group "Table.split/tokenize errors" group_builder->
group_builder.specify "won't work on a non-text column" <|
cols = [["foo", [0, 1]], ["bar", ["r a-1, b-12,qd-50", "ab-10:bc-20c"]]]
t = Table.new cols
t.split_to_columns "foo" "x" . should_fail_with Invalid_Value_Type
@ -214,7 +214,7 @@ spec =
t.tokenize_to_columns "foo" "x" . should_fail_with Invalid_Value_Type
t.tokenize_to_rows "foo" "x" . should_fail_with Invalid_Value_Type
Test.specify "won't work on a mixed column" <|
group_builder.specify "won't work on a mixed column" <|
cols = [["foo", [0, 1]], ["bar", [500, "ab-10:bc-20c"]]]
t = Table.new cols
t.split_to_columns "bar" "x" . should_fail_with Invalid_Value_Type
@ -222,18 +222,18 @@ spec =
t.tokenize_to_columns "bar" "x" . should_fail_with Invalid_Value_Type
t.tokenize_to_rows "bar" "x" . should_fail_with Invalid_Value_Type
Test.specify "*_to_columns handles missing input column" <|
group_builder.specify "*_to_columns handles missing input column" <|
cols = [["foo", [0, 1]], ["bar", ["r a-1, b-12,qd-50", "ab-10:bc-20c"]]]
t = Table.new cols
t.tokenize_to_columns "invalid_name" "([a-z]).(\d+)" . should_fail_with No_Such_Column
Test.specify "*_to_rows handles missing input column" <|
group_builder.specify "*_to_rows handles missing input column" <|
cols = [["foo", [0, 1]], ["bar", ["r a-1, b-12,qd-50", "ab-10:bc-20c"]]]
t = Table.new cols
t.tokenize_to_rows "invalid_name" "([a-z]).(\d+)" . should_fail_with No_Such_Column
Test.group "Table.split/tokenize name conflicts" <|
Test.specify "split will make column names unique" <|
suite_builder.group "Table.split/tokenize name conflicts" group_builder->
group_builder.specify "split will make column names unique" <|
cols = [["foo", [0, 1, 2]], ["bar", ["abc", "cbdbef", "ghbijbu"]], ["bar 1", ["a", "b", "c"]]]
t = Table.new cols
expected_rows = [[0, "a", "c", Nothing, "a"], [1, "c", "d", "ef", "b"], [2, "gh", "ij", "u", "c"]]
@ -243,7 +243,7 @@ spec =
problems = [Duplicate_Output_Column_Names.Error ["bar 1"]]
Problems.test_problem_handling action problems tester
Test.specify "tokenize will make column names unique" <|
group_builder.specify "tokenize will make column names unique" <|
cols = [["foo", [0, 1, 2]], ["bar", ["a12b34r5", "23", "2r4r55"]], ["bar 1", ["a", "b", "c"]]]
t = Table.new cols
expected_rows = [[0, "12", "34", "5", "a"], [1, "23", Nothing, Nothing, "b"], [2, "2", "4", "55", "c"]]
@ -253,8 +253,8 @@ spec =
problems = [Duplicate_Output_Column_Names.Error ["bar 1"]]
Problems.test_problem_handling action problems tester
Test.group "Table.split/tokenize column order" <|
Test.specify "preserves column order" <|
suite_builder.group "Table.split/tokenize column order" group_builder->
group_builder.specify "preserves column order" <|
cols = [["foo", [0, 1, 2]], ["bar", ["abc", "cbdbef", "ghbijbu"]], ["baz", [1, 2, 3]]]
t = Table.new cols
expected_rows = [[0, "a", "c", Nothing, 1], [1, "c", "d", "ef", 2], [2, "gh", "ij", "u", 3]]
@ -262,74 +262,74 @@ spec =
t2 = t.split_to_columns "bar" "b"
t2.should_equal expected
Test.group "Table.parse_to_columns" <|
Test.specify "can parse to columns" <|
suite_builder.group "Table.parse_to_columns" group_builder->
group_builder.specify "can parse to columns" <|
t = Table.from_rows ["foo", "bar", "baz"] [["x", "12 34p q56", "y"], ["xx", "a48 59b", "yy"]]
expected = Table.from_rows ["foo", "bar 1", "bar 2", "baz"] [["x", 1, 2, "y"], ["x", 3, 4, "y"], ["x", 5, 6, "y"], ["xx", 4, 8, "yy"], ["xx", 5, 9, "yy"]]
actual = t.parse_to_columns "bar" "(\d)(\d)"
actual.should_equal expected
Test.specify "no regex groups" <|
group_builder.specify "no regex groups" <|
t = Table.from_rows ["foo", "bar", "baz"] [["x", "12 34p q56", "y"], ["xx", "a48 59b", "yy"]]
expected = Table.from_rows ["foo", "bar", "baz"] [["x", 12, "y"], ["x", 34, "y"], ["x", 56, "y"], ["xx", 48, "yy"], ["xx", 59, "yy"]]
actual = t.parse_to_columns "bar" "\d\d"
actual.should_equal expected
Test.specify "named groups" <|
group_builder.specify "named groups" <|
t = Table.from_rows ["foo", "bar", "baz"] [["x", "12 34p q56", "y"], ["xx", "a48 59b", "yy"]]
expected = Table.from_rows ["foo", "xomt", "biff", "baz"] [["x", 1, 2, "y"], ["x", 3, 4, "y"], ["x", 5, 6, "y"], ["xx", 4, 8, "yy"], ["xx", 5, 9, "yy"]]
actual = t.parse_to_columns "bar" "(?<xomt>\d)(?<biff>\d)"
actual.should_equal expected
Test.specify "non-participating groups" <|
group_builder.specify "non-participating groups" <|
t = Table.from_rows ["foo", "bar", "baz"] [["x", "q1", "y"], ["xx", "qp", "yy"]]
expected = Table.from_rows ["foo", "bar 1", "bar 2", "bar 3", "baz"] [["x", "1", 1, Nothing, "y"], ["xx", "p", Nothing, "p", "yy"]]
actual = t.parse_to_columns "bar" "q((\d)|([a-z]))"
actual.should_equal expected
Test.specify "case-insensitive" <|
group_builder.specify "case-insensitive" <|
t = Table.from_rows ["foo", "bar", "baz"] [["x", "qq", "y"], ["xx", "qQ", "yy"]]
expected = Table.from_rows ["foo", "bar 1", "baz"] [["x", "q", "y"], ["xx", "Q", "yy"]]
actual = t.parse_to_columns "bar" "q(q)" case_sensitivity=Case_Sensitivity.Insensitive
actual.should_equal expected
Test.specify "no post-parsing" <|
group_builder.specify "no post-parsing" <|
t = Table.from_rows ["foo", "bar", "baz"] [["x", "12 34p q56", "y"], ["xx", "a48 59b", "yy"]]
expected = Table.from_rows ["foo", "bar 1", "bar 2", "baz"] [["x", "1", "2", "y"], ["x", "3", "4", "y"], ["x", "5", "6", "y"], ["xx", "4", "8", "yy"], ["xx", "5", "9", "yy"]]
actual = t.parse_to_columns "bar" "(\d)(\d)" parse_values=False
actual.should_equal expected
Test.specify "column name clash" <|
group_builder.specify "column name clash" <|
t = Table.from_rows ["foo", "bar", "bar 1"] [["x", "12 34p q56", "y"], ["xx", "a48 59b", "yy"]]
expected = Table.from_rows ["foo", "bar 1 1", "bar 2", "bar 1"] [["x", 1, 2, "y"], ["x", 3, 4, "y"], ["x", 5, 6, "y"], ["xx", 4, 8, "yy"], ["xx", 5, 9, "yy"]]
actual = t.parse_to_columns "bar" "(\d)(\d)"
actual.should_equal expected
Test.specify "column and group name clash" <|
group_builder.specify "column and group name clash" <|
t = Table.from_rows ["foo", "bar", "baz"] [["x", "123", "y"]]
expected = Table.from_rows ["foo", "bar", "baz 1", "quux", "baz"] [["x", 1, 2, 3, "y"]]
actual = t.parse_to_columns "bar" "(?<bar>\d)(?<baz>\d)(?<quux>\d)"
actual.should_equal expected
Test.specify "empty table" <|
group_builder.specify "empty table" <|
t = Table.from_rows ["foo", "bar", "baz"] [["x", "a", "y"]] . take 0
expected = Table.from_rows ["foo", "bar", "baz"] []
actual = t.parse_to_columns "bar" "\d+"
actual.should_equal expected
Test.specify "empty table, with regex groups" <|
group_builder.specify "empty table, with regex groups" <|
t = Table.from_rows ["foo", "bar", "baz"] [["x", "a", "y"]] . take 0
expected = Table.from_rows ["foo", "bar 1", "bar 2", "baz"] [["x", "a", "a", "y"]] . take 0
actual = t.parse_to_columns "bar" "(\d)(\d)"
actual.should_equal expected
Test.specify "empty table, with named and unnamed regex groups" <|
group_builder.specify "empty table, with named and unnamed regex groups" <|
t = Table.from_rows ["foo", "bar", "baz"] [["x", "a", "y"]] . take 0
expected = Table.from_rows ["foo", "quux", "bar 1", "foo 1", "bar 2", "baz"] [["x", "a", "a", "a", "a", "y"]] . take 0
actual = t.parse_to_columns "bar" "(?<quux>)(\d)(?<foo>\d)(\d)"
actual.should_equal expected
Test.specify "input with no matches" <|
group_builder.specify "input with no matches" <|
t = Table.from_rows ["foo", "bar", "baz"] [["x", "a", "y"], ["xx", "q12", "yy"], ["xxx", "34r", "yyyy"]]
expected = Table.from_rows ["foo", "bar", "baz"] [["x", Nothing, "y"], ["xx", 12, "yy"], ["xxx", 34, "yyyy"]]
actual = t.parse_to_columns "bar" "\d+"
@ -340,16 +340,20 @@ spec =
actual2 = t2.parse_to_columns "amount" "\$(?<Raised>\d+(?:.\d+)?)(?<Scale>B|M)"
actual2.should_equal expected2
Test.specify "input with no matches, with regex groups" <|
group_builder.specify "input with no matches, with regex groups" <|
t = Table.from_rows ["foo", "bar", "baz"] [["x", "a", "y"]]
expected = Table.from_rows ["foo", "bar 1", "bar 2", "baz"] [["x", Nothing, Nothing, "y"]]
actual = t.parse_to_columns "bar" "(\d)(\d)"
actual.should_equal expected
Test.specify "input with no matches, with named and unnamed regex groups" <|
group_builder.specify "input with no matches, with named and unnamed regex groups" <|
t = Table.from_rows ["foo", "bar", "baz"] [["x", "a", "y"]]
expected = Table.from_rows ["foo", "quux", "bar 1", "foo 1", "bar 2", "baz"] [["x", Nothing, Nothing, Nothing, Nothing, "y"]]
actual = t.parse_to_columns "bar" "(?<quux>)(\d)(?<foo>\d)(\d)"
actual.should_equal expected
main = Test_Suite.run_main spec
main =
suite = Test.build suite_builder->
add_specs suite_builder
suite.run_with_filter

View File

@ -8,8 +8,7 @@ from Standard.Table import Table, Column
from Standard.Table.Errors import No_Such_Column
from Standard.Table.Extensions.Table_Conversions import all
from Standard.Test import Test, Test_Suite, Problems
import Standard.Test.Extensions
from Standard.Test_New import all
from project.Util import all
@ -18,7 +17,17 @@ type My_Mod_Type
to_text self = "x%10="+(self.x%10).to_text
spec =
type Data
Value ~data
single_values self = self.data.at 0
uniform_json self = self.data.at 1
non_uniform_json self = self.data.at 2
library_document self = self.data.at 3
library_root self = self.data.at 4
setup = Data.Value <|
single_values = [Nothing, 12, 13.4, True, "hello", Date.new 2023 10 6, Time_Of_Day.new 3 4 5 200, Date_Time.new 2023 11 7 2 3 4]
uniform_json = Json.parse <| '''
[
@ -34,155 +43,160 @@ spec =
library_test_file = enso_project.data / "xml" / "library.xml"
library_document = XML_Document.from_file library_test_file
library_root = library_document . root_element
[single_values, uniform_json, non_uniform_json, library_document, library_root]
add_specs suite_builder =
trim_if_text x = case x of
_ : Text -> x.trim
_ -> x
Test.group "from_objects with single values" <|
Test.specify "Can create a table from a single value" <|
single_values.map v->
data = Data.setup
suite_builder.group "from_objects with single values" group_builder->
group_builder.specify "Can create a table from a single value" <|
data.single_values.map v->
expected = Table.from_rows ["Value"] [[v]]
Table.from_objects v . should_equal expected
Test.specify "Can create a table from a vector of single values" <|
expected = Table.new [["Value", single_values]]
Table.from_objects single_values . should_equal expected
group_builder.specify "Can create a table from a vector of single values" <|
expected = Table.new [["Value", data.single_values]]
Table.from_objects data.single_values . should_equal expected
Test.specify "A single value with a field list results in columns of Nothing" <|
group_builder.specify "A single value with a field list results in columns of Nothing" <|
expected = Table.new [["aaa", [Nothing]], ["bbb", [Nothing]]]
Table.from_objects 1 ['aaa', 'bbb'] . should_equal expected
Test.specify "A single value with the field list [Value] results in a column with the value" <|
group_builder.specify "A single value with the field list [Value] results in a column with the value" <|
expected = Table.new [["Value", [1]], ["bbb", [Nothing]]]
Table.from_objects 1 ["Value", "bbb"] . should_equal expected
Test.specify "A vector of single values with a field list results in a column of Nothing" <|
group_builder.specify "A vector of single values with a field list results in a column of Nothing" <|
expected = Table.new [["aaa", [Nothing, Nothing]], ["bbb", [Nothing, Nothing]]]
Table.from_objects [1, 2] ['aaa', 'bbb'] . should_equal expected
Test.group "from_objects with JSON (single values)" <|
Test.specify "Generates a single-row table from a JSON object" <|
suite_builder.group "from_objects with JSON (single values)" group_builder->
group_builder.specify "Generates a single-row table from a JSON object" <|
expected = Table.from_rows ["first", "last", "age"] [["Mary", "Smith", 23]]
Table.from_objects (uniform_json.at 0) . should_equal expected
Table.from_objects (data.uniform_json.at 0) . should_equal expected
Test.specify "works fine even if requested fields are duplicated" <|
group_builder.specify "works fine even if requested fields are duplicated" <|
expected = Table.from_rows ["first", "last"] [["Mary", "Smith"]]
Table.from_objects (uniform_json.at 0) ["first", "last", "first", "first"] . should_equal expected
Table.from_objects (data.uniform_json.at 0) ["first", "last", "first", "first"] . should_equal expected
Test.group "from_objects with uniform JSON vector" <|
Test.specify "Generates a table from a vector of JSON objects" <|
suite_builder.group "from_objects with uniform JSON vector" group_builder->
group_builder.specify "Generates a table from a vector of JSON objects" <|
expected = Table.from_rows ["first", "last", "age"] [["Mary", "Smith", 23], ["Joe", "Burton", 34]]
Table.from_objects uniform_json . should_equal expected
Table.from_objects data.uniform_json . should_equal expected
Test.specify "Generates a table from a vector of JSON objects, with exact fields" <|
group_builder.specify "Generates a table from a vector of JSON objects, with exact fields" <|
expected = Table.from_rows ["first", "last", "age"] [["Mary", "Smith", 23], ["Joe", "Burton", 34]]
Table.from_objects uniform_json ["first", "last", "age"] . should_equal expected
Table.from_objects data.uniform_json ["first", "last", "age"] . should_equal expected
Test.specify "Generates a table from a vector of JSON objects, with a subset of fields" <|
group_builder.specify "Generates a table from a vector of JSON objects, with a subset of fields" <|
expected = Table.from_rows ["last", "age"] [["Smith", 23], ["Burton", 34]]
Table.from_objects uniform_json ["last", "age"] . should_equal expected
Table.from_objects data.uniform_json ["last", "age"] . should_equal expected
Test.specify "Generates a table from a vector of JSON objects, with extra fields" <|
group_builder.specify "Generates a table from a vector of JSON objects, with extra fields" <|
expected = Table.from_rows ["first", "middle", "last", "age"] [["Mary", Nothing, "Smith", 23], ["Joe", Nothing, "Burton", 34]]
Table.from_objects uniform_json ["first", "middle", "last", "age"] . should_equal expected
Table.from_objects data.uniform_json ["first", "middle", "last", "age"] . should_equal expected
Test.specify "Generates a table from a vector of JSON objects, with ignored and extra fields" <|
group_builder.specify "Generates a table from a vector of JSON objects, with ignored and extra fields" <|
expected = Table.from_rows ["first", "middle", "age"] [["Mary", Nothing, 23], ["Joe", Nothing, 34]]
Table.from_objects uniform_json ["first", "middle", "age"] . should_equal expected
Table.from_objects data.uniform_json ["first", "middle", "age"] . should_equal expected
Test.group "from_objects with non-uniform JSON vector" <|
Test.specify "Generates a table from a non-uniform vector of JSON objects" <|
suite_builder.group "from_objects with non-uniform JSON vector" group_builder->
group_builder.specify "Generates a table from a non-uniform vector of JSON objects" <|
expected = Table.from_rows ["first", "last", "age", "height", "weight"] [["Mary", "Smith", 23, Nothing, Nothing], [Nothing, Nothing, Nothing, 1.9, 70]]
Table.from_objects non_uniform_json . should_equal expected
Table.from_objects data.non_uniform_json . should_equal expected
Test.specify "Generates a table from a non-uniform vector of JSON objects, with exact fields" <|
group_builder.specify "Generates a table from a non-uniform vector of JSON objects, with exact fields" <|
expected = Table.from_rows ["first", "last", "age", "height", "weight"] [["Mary", "Smith", 23, Nothing, Nothing], [Nothing, Nothing, Nothing, 1.9, 70]]
Table.from_objects non_uniform_json ["first", "last", "age", "height", "weight"] . should_equal expected
Table.from_objects data.non_uniform_json ["first", "last", "age", "height", "weight"] . should_equal expected
Test.specify "Generates a table from a non-uniform vector of JSON objects, with ignored fields" <|
group_builder.specify "Generates a table from a non-uniform vector of JSON objects, with ignored fields" <|
expected = Table.from_rows ["last", "weight"] [["Smith", Nothing], [Nothing, 70]]
Table.from_objects non_uniform_json ["last", "weight"] . should_equal expected
Table.from_objects data.non_uniform_json ["last", "weight"] . should_equal expected
Test.specify "Generates a table from a non-uniform vector of JSON objects, with extra fields" <|
group_builder.specify "Generates a table from a non-uniform vector of JSON objects, with extra fields" <|
expected = Table.from_rows ["first", "middle", "last", "age", "height", "weight"] [["Mary", Nothing, "Smith", 23, Nothing, Nothing], [Nothing, Nothing, Nothing, Nothing, 1.9, 70]]
Table.from_objects non_uniform_json ["first", "middle", "last", "age", "height", "weight"] . should_equal expected
Table.from_objects data.non_uniform_json ["first", "middle", "last", "age", "height", "weight"] . should_equal expected
Test.specify "Generates a table from a non-uniform vector of JSON objects, with ignored and extra fields" <|
group_builder.specify "Generates a table from a non-uniform vector of JSON objects, with ignored and extra fields" <|
expected = Table.from_rows ["first", "middle", "height", "weight"] [["Mary", Nothing, Nothing, Nothing], [Nothing, Nothing, 1.9, 70]]
Table.from_objects non_uniform_json ["first", "middle", "height", "weight"] . should_equal expected
Table.from_objects data.non_uniform_json ["first", "middle", "height", "weight"] . should_equal expected
Test.specify "Generates a table from a non-uniform vector of JSON objects, with ignored and extra fields, taking only from one kind of value" <|
group_builder.specify "Generates a table from a non-uniform vector of JSON objects, with ignored and extra fields, taking only from one kind of value" <|
expected = Table.from_rows ["first"] [["Mary"], [Nothing]]
Table.from_objects non_uniform_json ["first"] . should_equal expected
Table.from_objects data.non_uniform_json ["first"] . should_equal expected
Test.group "from_objects with mixed vector of single and JSON objects" <|
Test.specify "Generates a table from a mixed vector of single values and JSON objects" <|
suite_builder.group "from_objects with mixed vector of single and JSON objects" group_builder->
group_builder.specify "Generates a table from a mixed vector of single values and JSON objects" <|
expected = Table.from_rows ["first", "last", "age", "Value"] [["Mary", "Smith", 23, Nothing], ["Joe", "Burton", 34, Nothing], [Nothing, Nothing, Nothing, 12]]
Table.from_objects uniform_json+[12] . should_equal expected
Table.from_objects data.uniform_json+[12] . should_equal expected
Test.specify "Generates a table from a mixed vector of single values and JSON objects, with exact fields" <|
group_builder.specify "Generates a table from a mixed vector of single values and JSON objects, with exact fields" <|
expected = Table.from_rows ["first", "last", "age", "Value"] [["Mary", "Smith", 23, Nothing], ["Joe", "Burton", 34, Nothing], [Nothing, Nothing, Nothing, 12]]
Table.from_objects uniform_json+[12] ["first", "last", "age", "Value"] . should_equal expected
Table.from_objects data.uniform_json+[12] ["first", "last", "age", "Value"] . should_equal expected
Test.specify "Generates a table from a mixed vector of single values and JSON objects, with ignored fields" <|
group_builder.specify "Generates a table from a mixed vector of single values and JSON objects, with ignored fields" <|
expected = Table.from_rows ["last", "age", "Value"] [["Smith", 23, Nothing], ["Burton", 34, Nothing], [Nothing, Nothing, 12]]
Table.from_objects uniform_json+[12] ["last", "age", "Value"] . should_equal expected
Table.from_objects data.uniform_json+[12] ["last", "age", "Value"] . should_equal expected
Test.specify "Generates a table from a mixed vector of single values and JSON objects, with extra fields" <|
group_builder.specify "Generates a table from a mixed vector of single values and JSON objects, with extra fields" <|
expected = Table.from_rows ["first", "last", "age", "blah", "Value"] [["Mary", "Smith", 23, Nothing, Nothing], ["Joe", "Burton", 34, Nothing, Nothing], [Nothing, Nothing, Nothing, Nothing, 12]]
Table.from_objects uniform_json+[12] ["first", "last", "age", "blah", "Value"] . should_equal expected
Table.from_objects data.uniform_json+[12] ["first", "last", "age", "blah", "Value"] . should_equal expected
Test.specify "Generates a table from a mixed vector of single values and JSON objects, with ignored and extra fields" <|
group_builder.specify "Generates a table from a mixed vector of single values and JSON objects, with ignored and extra fields" <|
expected = Table.from_rows ["first", "last", "blah", "Value"] [["Mary", "Smith", Nothing, Nothing], ["Joe", "Burton", Nothing, Nothing], [Nothing, Nothing, Nothing, 12]]
Table.from_objects uniform_json+[12] ["first", "last", "blah", "Value"] . should_equal expected
Table.from_objects data.uniform_json+[12] ["first", "last", "blah", "Value"] . should_equal expected
Test.group "from_objects with Array" <|
Test.specify "Generates a table from a mixed vector of single values and JSON objects, with ignored and extra fields" <|
suite_builder.group "from_objects with Array" group_builder->
group_builder.specify "Generates a table from a mixed vector of single values and JSON objects, with ignored and extra fields" <|
expected = Table.from_rows ["first", "last", "blah", "Value"] [["Mary", "Smith", Nothing, Nothing], ["Joe", "Burton", Nothing, Nothing], [Nothing, Nothing, Nothing, 12]]
Table.from_objects (uniform_json+[12]).to_array ["first", "last", "blah", "Value"] . should_equal expected
Table.from_objects (data.uniform_json+[12]).to_array ["first", "last", "blah", "Value"] . should_equal expected
Test.group "expand_column" <|
Test.specify "Expands a column of single values" <|
suite_builder.group "expand_column" group_builder->
group_builder.specify "Expands a column of single values" <|
table = Table.new [["aaa", [1, 2]], ["bbb", [3, 4]], ["ccc", [5, 6]]]
expected = Table.new [["aaa", [1, 2]], ["bbb Value", [3, 4]], ["ccc", [5, 6]]]
table.expand_column "bbb" . should_equal expected
Test.specify "Expands a uniform column of JSON objects" <|
table = Table.new [["aaa", [1, 2]], ["bbb", uniform_json], ["ccc", [5, 6]]]
group_builder.specify "Expands a uniform column of JSON objects" <|
table = Table.new [["aaa", [1, 2]], ["bbb", data.uniform_json], ["ccc", [5, 6]]]
expected = Table.new [["aaa", [1, 2]], ["bbb first", ["Mary", "Joe"]], ["bbb last", ["Smith", "Burton"]], ["bbb age", [23, 34]], ["ccc", [5, 6]]]
table.expand_column "bbb" . should_equal expected
Test.specify "Expands a uniform column of JSON objects, with extra and ignored fields" <|
table = Table.new [["aaa", [1, 2]], ["bbb", uniform_json], ["ccc", [5, 6]]]
group_builder.specify "Expands a uniform column of JSON objects, with extra and ignored fields" <|
table = Table.new [["aaa", [1, 2]], ["bbb", data.uniform_json], ["ccc", [5, 6]]]
expected = Table.new [["aaa", [1, 2]], ["bbb age", [23, 34]], ["bbb foo", [Nothing, Nothing]], ["ccc", [5, 6]]]
table.expand_column "bbb" ["age", "foo"] . should_equal expected
Test.specify "Expands a non-uniform column of JSON objects" <|
table = Table.new [["aaa", [1, 2]], ["bbb", non_uniform_json], ["ccc", [5, 6]]]
group_builder.specify "Expands a non-uniform column of JSON objects" <|
table = Table.new [["aaa", [1, 2]], ["bbb", data.non_uniform_json], ["ccc", [5, 6]]]
expected = Table.new [["aaa", [1, 2]], ["bbb first", ["Mary", Nothing]], ["bbb last", ["Smith", Nothing]], ["bbb age", [23, Nothing]], ["bbb height", [Nothing, 1.9]], ["bbb weight", [Nothing, 70]], ["ccc", [5, 6]]]
table.expand_column "bbb" . should_equal expected
Test.specify "Expands a non-uniform column of JSON objects with extra and ignored fields" <|
table = Table.new [["aaa", [1, 2]], ["bbb", non_uniform_json], ["ccc", [5, 6]]]
group_builder.specify "Expands a non-uniform column of JSON objects with extra and ignored fields" <|
table = Table.new [["aaa", [1, 2]], ["bbb", data.non_uniform_json], ["ccc", [5, 6]]]
expected = Table.new [["aaa", [1, 2]], ["bbb last", ["Smith", Nothing]], ["bbb height", [Nothing, 1.9]], ["bbb foo", [Nothing, Nothing]], ["ccc", [5, 6]]]
table.expand_column "bbb" ["last", "height", "foo"] . should_equal expected
Test.specify "accept vectors/arrays within a column" <|
group_builder.specify "accept vectors/arrays within a column" <|
table = Table.new [["aaa", [1, 2]], ["bbb", [[1, 2, 3], [4, 5, 6].to_array]]]
expected = Table.new [["aaa", [1, 2]], ["bbb Value", [[1, 2, 3], [4, 5, 6].to_array]]]
table.expand_column "bbb" . should_equal expected
Test.specify "will work even if keys are not Text" <|
group_builder.specify "will work even if keys are not Text" <|
table = Table.new [["a", [1, 2]], ["b", [Map.from_vector [[1, "x"], [2, "y"]], Map.from_vector [[2, "z"], [3, "w"]]]]]
expected = Table.new [["a", [1, 2]], ["b 1", ["x", Nothing]], ["b 2", ["y", "z"]], ["b 3", [Nothing, "w"]]]
table.expand_column "b" . should_equal expected
table2 = Table.new [["a", [1, 2]], ["b", [Map.from_vector [[My_Mod_Type.Value 12, "x"], [My_Mod_Type.Value 23, "y"]], Map.from_vector [[My_Mod_Type.Value 32, "z"]]]]]
expected2 = Table.new [["a", [1, 2]], ["b x%10=2", ["x", "z"]], ["b x%10=3", ["y", Nothing]]]
expected2 = Table.new [["a", [1, 2]], ["b x%10=3", ["y", Nothing]], ["b x%10=2", ["x", "z"]]]
table2.expand_column "b" . should_equal expected2
Test.specify "will fail if text representation of keys is not unique" <|
group_builder.specify "will fail if text representation of keys is not unique" <|
k1 = My_Mod_Type.Value 12
k2 = My_Mod_Type.Value 32
m = Map.from_vector [[k1, "a"], [k2, "b"]]
@ -196,46 +210,46 @@ spec =
r.should_fail_with Illegal_Argument
r.catch.to_display_text . should_contain "keys are duplicated when converted to text"
Test.specify "will error when all objects have no fields" <|
group_builder.specify "will error when all objects have no fields" <|
table = Table.new [["aaa", [1, 2]], ["bbb", [Map.from_vector [], Map.from_vector []]], ["ccc", [5, 6]]]
r = table.expand_column "bbb"
r.should_fail_with Illegal_Argument
r.catch.message.should_contain "all input objects had no fields"
Test.specify "will error when fields=[]" <|
table = Table.new [["aaa", [1, 2]], ["bbb", uniform_json], ["ccc", [5, 6]]]
group_builder.specify "will error when fields=[]" <|
table = Table.new [["aaa", [1, 2]], ["bbb", data.uniform_json], ["ccc", [5, 6]]]
r = table.expand_column "bbb" fields=[]
r.should_fail_with Illegal_Argument
r.catch.message . should_equal "The fields parameter cannot be empty."
Test.specify "Can expand with no prefix" <|
table = Table.new [["aaa", [1, 2]], ["bbb", non_uniform_json], ["ccc", [5, 6]]]
group_builder.specify "Can expand with no prefix" <|
table = Table.new [["aaa", [1, 2]], ["bbb", data.non_uniform_json], ["ccc", [5, 6]]]
expected = Table.new [["aaa", [1, 2]], ["last", ["Smith", Nothing]], ["height", [Nothing, 1.9]], ["foo", [Nothing, Nothing]], ["ccc", [5, 6]]]
table.expand_column "bbb" ["last", "height", "foo"] Prefix_Name.None . should_equal expected
Test.specify "Can expand with an explicit column name prefix" <|
table = Table.new [["aaa", [1, 2]], ["bbb", non_uniform_json], ["ccc", [5, 6]]]
group_builder.specify "Can expand with an explicit column name prefix" <|
table = Table.new [["aaa", [1, 2]], ["bbb", data.non_uniform_json], ["ccc", [5, 6]]]
expected = Table.new [["aaa", [1, 2]], ["bbb last", ["Smith", Nothing]], ["bbb height", [Nothing, 1.9]], ["bbb foo", [Nothing, Nothing]], ["ccc", [5, 6]]]
table.expand_column "bbb" ["last", "height", "foo"] Prefix_Name.Column_Name . should_equal expected
Test.specify "Can expand with a custom prefix" <|
table = Table.new [["aaa", [1, 2]], ["bbb", non_uniform_json], ["ccc", [5, 6]]]
group_builder.specify "Can expand with a custom prefix" <|
table = Table.new [["aaa", [1, 2]], ["bbb", data.non_uniform_json], ["ccc", [5, 6]]]
expected = Table.new [["aaa", [1, 2]], ["expanded last", ["Smith", Nothing]], ["expanded height", [Nothing, 1.9]], ["expanded foo", [Nothing, Nothing]], ["ccc", [5, 6]]]
table.expand_column "bbb" ["last", "height", "foo"] (Prefix_Name.Custom "expanded ") . should_equal expected
Test.specify "Can expand with a custom prefix, via from conversion" <|
table = Table.new [["aaa", [1, 2]], ["bbb", non_uniform_json], ["ccc", [5, 6]]]
group_builder.specify "Can expand with a custom prefix, via from conversion" <|
table = Table.new [["aaa", [1, 2]], ["bbb", data.non_uniform_json], ["ccc", [5, 6]]]
expected = Table.new [["aaa", [1, 2]], ["expanded last", ["Smith", Nothing]], ["expanded height", [Nothing, 1.9]], ["expanded foo", [Nothing, Nothing]], ["ccc", [5, 6]]]
table.expand_column "bbb" ["last", "height", "foo"] "expanded " . should_equal expected
Test.group "expand_to_rows" <|
Test.specify "Can expand single values" <|
suite_builder.group "expand_to_rows" group_builder->
group_builder.specify "Can expand single values" <|
values_to_expand = [3, 4]
table = Table.new [["aaa", [1, 2]], ["bbb", values_to_expand], ["ccc", [5, 6]]]
expected = Table.new [["aaa", [1, 2]], ["bbb", [3, 4]], ["ccc", [5, 6]]]
table.expand_to_rows "bbb" . should_equal expected
Test.specify "Can expand Vectors" <|
group_builder.specify "Can expand Vectors" <|
values_to_expand = [[10, 11], [20, 21, 22], [30]]
table = Table.new [["aaa", [1, 2, 3]], ["bbb", values_to_expand], ["ccc", [5, 6, 7]]]
expected = Table.new [["aaa", [1, 1, 2, 2, 2, 3]], ["bbb", [10, 11, 20, 21, 22, 30]], ["ccc", [5, 5, 6, 6, 6, 7]]]
@ -243,31 +257,31 @@ spec =
r . should_equal expected
r.at "bbb" . value_type . should_equal Value_Type.Integer
Test.specify "Can expand Arrays" <|
group_builder.specify "Can expand Arrays" <|
values_to_expand = [[10, 11].to_array, [20, 21, 22].to_array, [30].to_array]
table = Table.new [["aaa", [1, 2, 3]], ["bbb", values_to_expand], ["ccc", [5, 6, 7]]]
expected = Table.new [["aaa", [1, 1, 2, 2, 2, 3]], ["bbb", [10, 11, 20, 21, 22, 30]], ["ccc", [5, 5, 6, 6, 6, 7]]]
table.expand_to_rows "bbb" . should_equal expected
Test.specify "Can expand Lists" <|
group_builder.specify "Can expand Lists" <|
values_to_expand = [[10, 11].to_list, [20, 21, 22].to_list, [30].to_list]
table = Table.new [["aaa", [1, 2, 3]], ["bbb", values_to_expand], ["ccc", [5, 6, 7]]]
expected = Table.new [["aaa", [1, 1, 2, 2, 2, 3]], ["bbb", [10, 11, 20, 21, 22, 30]], ["ccc", [5, 5, 6, 6, 6, 7]]]
table.expand_to_rows "bbb" . should_equal expected
Test.specify "Can expand Pairs" <|
group_builder.specify "Can expand Pairs" <|
values_to_expand = [Pair.new 10 20, Pair.new "a" [30], Pair.new 40 50]
table = Table.new [["aaa", [1, 2, 3]], ["bbb", values_to_expand], ["ccc", [5, 6, 7]]]
expected = Table.new [["aaa", [1, 1, 2, 2, 3, 3]], ["bbb", [10, 20, "a", [30], 40, 50]], ["ccc", [5, 5, 6, 6, 7, 7]]]
table.expand_to_rows "bbb" . should_equal expected
Test.specify "Can expand Ranges" <|
group_builder.specify "Can expand Ranges" <|
values_to_expand = [Range.new 10 12, Range.new 20 27 step=3, Range.new 30 31]
table = Table.new [["aaa", [1, 2, 3]], ["bbb", values_to_expand], ["ccc", [5, 6, 7]]]
expected = Table.new [["aaa", [1, 1, 2, 2, 2, 3]], ["bbb", [10, 11, 20, 23, 26, 30]], ["ccc", [5, 5, 6, 6, 6, 7]]]
table.expand_to_rows "bbb" . should_equal expected
Test.specify "Can expand Date_Ranges" <|
group_builder.specify "Can expand Date_Ranges" <|
range0 = (Date.new 2020 02 28).up_to (Date.new 2020 03 01)
range1 = (Date.new 2020 10 28).up_to (Date.new 2020 11 16) . with_step Date_Period.Week
range2 = (Date.new 2023 07 03).up_to (Date.new 2023 10 03) . with_step Date_Period.Month
@ -277,32 +291,32 @@ spec =
expected = Table.new [["aaa", [1, 1, 2, 2, 2, 3, 3, 3]], ["bbb", values_expanded], ["ccc", [5, 5, 6, 6, 6, 7, 7, 7]]]
table.expand_to_rows "bbb" . should_equal expected
Test.specify "Can expand mixed columns" <|
group_builder.specify "Can expand mixed columns" <|
values_to_expand = [[10, 11], 22.up_to 26, (Date.new 2020 02 28).up_to (Date.new 2020 03 01)]
values_expanded = [10, 11, 22, 23, 24, 25, Date.new 2020 02 28, Date.new 2020 02 29]
table = Table.new [["aaa", [1, 2, 3]], ["bbb", values_to_expand], ["ccc", [5, 6, 7]]]
expected = Table.new [["aaa", [1, 1, 2, 2, 2, 2, 3, 3]], ["bbb", values_expanded], ["ccc", [5, 5, 6, 6, 6, 6, 7, 7]]]
table.expand_to_rows "bbb" . should_equal expected
Test.specify "Respects `at_least_one_row=True`" <|
group_builder.specify "Respects `at_least_one_row=True`" <|
values_to_expand = [[10, 11], [], [30]]
table = Table.new [["aaa", [1, 2, 3]], ["bbb", values_to_expand], ["ccc", [5, 6, 7]]]
expected = Table.new [["aaa", [1, 1, 2, 3]], ["bbb", [10, 11, Nothing, 30]], ["ccc", [5, 5, 6, 7]]]
table.expand_to_rows "bbb" at_least_one_row=True . should_equal expected
Test.specify "Respects `at_least_one_row=False`" <|
group_builder.specify "Respects `at_least_one_row=False`" <|
values_to_expand = [[10, 11], [], [30]]
table = Table.new [["aaa", [1, 2, 3]], ["bbb", values_to_expand], ["ccc", [5, 6, 7]]]
expected = Table.new [["aaa", [1, 1, 3]], ["bbb", [10, 11, 30]], ["ccc", [5, 5, 7]]]
table.expand_to_rows "bbb" . should_equal expected
Test.specify "Missing column" <|
group_builder.specify "Missing column" <|
table = Table.new [["aaa", [1, 2, 3]], ["notbbb", [8, 8, 8]], ["ccc", [5, 6, 7]]]
table.expand_to_rows "bbb" . should_fail_with No_Such_Column
Test.group "XML" <|
Test.specify "Can convert XML_Document and XML_Element to a table" <|
[library_document, library_root].map x->
suite_builder.group "XML" group_builder->
group_builder.specify "Can convert XML_Document and XML_Element to a table" <|
[data.library_document, data.library_root].map x->
t = x.to Table
t.at "Name" . to_vector . should_equal ["Library"]
t.at "@catalog" . to_vector . should_equal ["Fiction"]
@ -314,8 +328,8 @@ spec =
t.at "Children" . at 0 . at 4 . name . should_equal "Magazine"
t.column_names . should_not_contain "Value"
Test.specify "Can expand the Children column to rows" <|
t = library_root.to Table . expand_to_rows "Children"
group_builder.specify "Can expand the Children column to rows" <|
t = data.library_root.to Table . expand_to_rows "Children"
t.at "Name" . to_vector . should_equal (Vector.fill 5 "Library")
t.at "@catalog" . to_vector . should_equal (Vector.fill 5 "Fiction")
t.at "@letter" . to_vector . should_equal (Vector.fill 5 "A")
@ -326,8 +340,8 @@ spec =
t.at "Children" . at 4 . name . should_equal "Magazine"
t.column_names . should_not_contain "Value"
Test.specify "Can expand the child nodes" <|
t = library_root.to Table . expand_to_rows "Children" . expand_column "Children"
group_builder.specify "Can expand the child nodes" <|
t = data.library_root.to Table . expand_to_rows "Children" . expand_column "Children"
t.at "Name" . to_vector . should_equal (Vector.fill 5 "Library")
t.at "@catalog" . to_vector . should_equal (Vector.fill 5 "Fiction")
t.at "@letter" . to_vector . should_equal (Vector.fill 5 "A")
@ -338,7 +352,7 @@ spec =
t.at "Children @series" . to_vector . map trim_if_text . should_equal [Nothing, Nothing, Nothing, Nothing, 'AutoCar']
t.column_names . should_not_contain "Children Children"
Test.specify "Can expand nested child nodes" <|
group_builder.specify "Can expand nested child nodes" <|
xml_string = '''
<a>
<b>
@ -354,15 +368,15 @@ spec =
t = xml.to Table . expand_to_rows "Children" . expand_column "Children" . expand_to_rows "Children Children" . expand_column "Children Children"
t.at "Children Children @id" . to_vector . should_equal ["1", "2", "3", "4"]
Test.specify "Converting a node without child nodes produces a Value column" <|
book = library_root . at 1
group_builder.specify "Converting a node without child nodes produces a Value column" <|
book = data.library_root . at 1
t = book.to Table
t.at "Name" . to_vector . should_equal ["Book"]
t.at "@author" . to_vector . should_equal ["An Author"]
t.column_names . should_not_contain "Children"
t.at "Value" . to_vector . should_equal ["My Book"]
Test.specify "Converting a node without any child nodes does not produce Value or Children columns" <|
group_builder.specify "Converting a node without any child nodes does not produce Value or Children columns" <|
xml = XML_Document.from_text '<foo id="10"></foo>'
t = xml.to Table
t.at "Name" . to_vector . should_equal ["foo"]
@ -370,4 +384,8 @@ spec =
t.column_names . should_not_contain "Children"
t.column_names . should_not_contain "Value"
main = Test_Suite.run_main spec
main =
suite = Test.build suite_builder->
add_specs suite_builder
suite.run_with_filter

View File

@ -4,51 +4,72 @@ from Standard.Table import Table, Column, Delimited, Data_Formatter
import Standard.Table.Data.Type.Value_Type.Value_Type
from Standard.Table.Extensions.Table_Conversions import all
from Standard.Test import Test, Test_Suite
import Standard.Test.Extensions
from Standard.Test_New import all
from project.Util import all
spec =
type Data
Value ~data
c_number self = self.data.at 0
c_party self = self.data.at 1
c_name self = self.data.at 2
c_from self = self.data.at 3
c_to self = self.data.at 4
expected self = self.data.at 5
table self = self.data.at 6
setup = Data.Value <|
c_number = ["Number", [71, 72, 73, 74, 75, 76, 77]]
c_party = ["Party", ["Conservative", "Conservative", "Labour", "Labour", "Conservative", "Conservative", "Conservative"]]
c_name = ["Title", ["Margaret Thatcher", "John Major", "Tony Blair", "Gordon Brown", "David Cameron", "Theresa May", "Boris Johnson"]]
c_from = ["From", [Date.new 1979 05 04, Date.new 1990 11 28, Date.new 1997 05 02, Date.new 2007 06 27, Date.new 2010 05 11, Date.new 2016 07 13, Date.new 2019 07 24]]
c_to = ["To", [Date.new 1990 11 28, Date.new 1997 05 02, Date.new 2007 06 27, Date.new 2010 05 11, Date.new 2016 07 13, Date.new 2019 07 24, Date.new 2022 07 07]]
expected = Table.new [c_number, c_party, c_name, c_from, c_to]
Test.group "File.read (Delimited) should work with Dates" <|
table = (enso_project.data / "prime_ministers.csv").read
Test.specify "should be able to read in a table with dates" <|
table.column_count.should_equal 5
table.row_count.should_equal 7
table.info.at "Column" . to_vector . should_equal ['Number','Party', 'Title', 'From', 'To']
table.info.at "Value Type" . to_vector . should_equal [Value_Type.Integer, Value_Type.Char, Value_Type.Char, Value_Type.Date, Value_Type.Date]
[c_number, c_party, c_name, c_from, c_to, expected, table]
Test.specify "should be able to treat a single value as a Date" <|
from_column = table.at 'From'
add_specs suite_builder =
data = Data.setup
suite_builder.group "File.read (Delimited) should work with Dates" group_builder->
group_builder.specify "should be able to read in a table with dates" <|
data.table.column_count.should_equal 5
data.table.row_count.should_equal 7
data.table.info.at "Column" . to_vector . should_equal ['Number','Party', 'Title', 'From', 'To']
data.table.info.at "Value Type" . to_vector . should_equal [Value_Type.Integer, Value_Type.Char, Value_Type.Char, Value_Type.Date, Value_Type.Date]
group_builder.specify "should be able to treat a single value as a Date" <|
from_column = data.table.at 'From'
from_column.at 6 . year . should_equal 2019
from_column.at 6 . should_equal (Date.new 2019 7 24)
Test.specify "should be able to compare columns and table" <|
table.at 'Number' . should_equal (Column.from_vector c_number.first c_number.second)
table.at 'Party' . should_equal (Column.from_vector c_party.first c_party.second)
table.at 'Title' . should_equal (Column.from_vector c_name.first c_name.second)
table.at 'From' . should_equal (Column.from_vector c_from.first c_from.second)
table.at 'To' . should_equal (Column.from_vector c_to.first c_to.second)
table.should_equal expected
group_builder.specify "should be able to compare columns and data.table" <|
data.table.at 'Number' . should_equal (Column.from_vector data.c_number.first data.c_number.second)
data.table.at 'Party' . should_equal (Column.from_vector data.c_party.first data.c_party.second)
data.table.at 'Title' . should_equal (Column.from_vector data.c_name.first data.c_name.second)
data.table.at 'From' . should_equal (Column.from_vector data.c_from.first data.c_from.second)
data.table.at 'To' . should_equal (Column.from_vector data.c_to.first data.c_to.second)
data.table.should_equal data.expected
Test.group "Should be able to serialise a table with Dates to Text" <|
Test.specify "should serialise back to input" <|
suite_builder.group "Should be able to serialise a table with Dates to Text" group_builder->
group_builder.specify "should serialise back to input" <|
expected_text = normalize_lines <|
(enso_project.data / "prime_ministers.csv").read_text
delimited = Text.from expected format=(Delimited "," line_endings=Line_Ending_Style.Unix)
delimited = Text.from data.expected format=(Delimited "," line_endings=Line_Ending_Style.Unix)
delimited.should_equal expected_text
Test.specify "should serialise dates with format" <|
test_table = Table.new [c_from]
group_builder.specify "should serialise dates with format" <|
test_table = Table.new [data.c_from]
expected_text = 'From\n04.05.1979\n28.11.1990\n02.05.1997\n27.06.2007\n11.05.2010\n13.07.2016\n24.07.2019\n'
data_formatter = Data_Formatter.Value.with_datetime_formats date_formats=["dd.MM.yyyy"]
delimited = Text.from test_table format=(Delimited "," value_formatter=data_formatter line_endings=Line_Ending_Style.Unix)
delimited.should_equal expected_text
main = Test_Suite.run_main spec
main =
suite = Test.build suite_builder->
add_specs suite_builder
suite.run_with_filter

View File

@ -4,49 +4,65 @@ from Standard.Table import Table, Delimited, Column, Data_Formatter
import Standard.Table.Data.Type.Value_Type.Value_Type
from Standard.Table.Extensions.Table_Conversions import all
from Standard.Test import Test, Test_Suite
import Standard.Test.Extensions
from Standard.Test_New import all
from project.Util import all
spec =
type Data
Value ~data
c_number self = self.data.at 0
c_type self = self.data.at 1
c_date self = self.data.at 2
expected self = self.data.at 3
table self = self.data.at 4
setup = Data.Value <|
c_number = ["Serial number", ["2LMXK1", "2LMXK1", "JEMLP3", "JEMLP3", "BR83GP", "BR83GP"]]
c_type = ["Movement type", [101, 301, 101, 203, 101, 301]]
c_date = ["Posting date", [Date_Time.new 2015 1 5 9 0, Date_Time.new 2015 1 5 14 0, Date_Time.new 2015 1 6 9 0, Date_Time.new 2015 1 7 17 30, Date_Time.new 2011 1 5 9 0, Date_Time.new 2011 1 9 15 30]]
expected = Table.new [c_number, c_type, c_date]
Test.group "File.read (Delimited) should work with Date_Time" <|
table = (enso_project.data / "datetime_sample.csv").read
Test.specify "should be able to read in a table with dates" <|
table.column_count.should_equal 3
table.info.at "Column" . to_vector . should_equal ['Serial number','Movement type', 'Posting date']
table.info.at "Value Type" . to_vector . should_equal [Value_Type.Char, Value_Type.Integer, Value_Type.Date_Time]
table.row_count.should_equal 6
[c_number, c_type, c_date, expected, table]
Test.specify "should be able to treat a single value as a Date_Time" <|
from_column = table.at 'Posting date'
add_specs suite_builder =
data = Data.setup
suite_builder.group "File.read (Delimited) should work with Date_Time" group_builder->
group_builder.specify "should be able to read in a table with dates" <|
data.table.column_count.should_equal 3
data.table.info.at "Column" . to_vector . should_equal ['Serial number','Movement type', 'Posting date']
data.table.info.at "Value Type" . to_vector . should_equal [Value_Type.Char, Value_Type.Integer, Value_Type.Date_Time]
data.table.row_count.should_equal 6
group_builder.specify "should be able to treat a single value as a Date_Time" <|
from_column = data.table.at 'Posting date'
from_column.at 5 . year . should_equal 2011
from_column.at 5 . should_equal (Date_Time.new 2011 1 9 15 30)
Test.specify "should be able to compare columns and table" <|
table.at 'Serial number' . should_equal (Column.from_vector c_number.first c_number.second)
table.at 'Movement type' . should_equal (Column.from_vector c_type.first c_type.second)
table.at 'Posting date' . should_equal (Column.from_vector c_date.first c_date.second)
table.should_equal expected
group_builder.specify "should be able to compare columns and table" <|
data.table.at 'Serial number' . should_equal (Column.from_vector data.c_number.first data.c_number.second)
data.table.at 'Movement type' . should_equal (Column.from_vector data.c_type.first data.c_type.second)
data.table.at 'Posting date' . should_equal (Column.from_vector data.c_date.first data.c_date.second)
data.table.should_equal data.expected
Test.group "Should be able to serialise a table with DateTimes to Text" <|
Test.specify "should serialise back to input" <|
suite_builder.group "Should be able to serialise a table with DateTimes to Text" group_builder->
group_builder.specify "should serialise back to input" <|
expected_text = normalize_lines <|
(enso_project.data / "datetime_sample_normalized_hours.csv").read_text
data_formatter = Data_Formatter.Value . with_datetime_formats datetime_formats=["yyyy-MM-dd HH:mm:ss"]
delimited = Text.from expected format=(Delimited "," line_endings=Line_Ending_Style.Unix value_formatter=data_formatter)
delimited = Text.from data.expected format=(Delimited "," line_endings=Line_Ending_Style.Unix value_formatter=data_formatter)
delimited.should_equal expected_text
Test.specify "should serialise dates with format" <|
test_table = Table.new [c_date]
group_builder.specify "should serialise dates with format" <|
test_table = Table.new [data.c_date]
expected_text = 'Posting date\n05.01.2015 09-00\n05.01.2015 14-00\n06.01.2015 09-00\n07.01.2015 17-30\n05.01.2011 09-00\n09.01.2011 15-30\n'
data_formatter = Data_Formatter.Value . with_datetime_formats datetime_formats=["dd.MM.yyyy HH-mm"]
delimited = Text.from test_table format=(Delimited "," value_formatter=data_formatter line_endings=Line_Ending_Style.Unix)
delimited.should_equal expected_text
main = Test_Suite.run_main spec
main =
suite = Test.build suite_builder->
add_specs suite_builder
suite.run_with_filter

View File

@ -6,12 +6,14 @@ import Standard.Base.Errors.Time_Error.Time_Error
from Standard.Table import Column, Table
from Standard.Table.Errors import No_Input_Columns_Selected, Missing_Input_Columns
import Standard.Test.Extensions
from Standard.Test import Problems, Test, Test_Suite
from Standard.Test_New import all
from project.Util import all
spec =
type Data
Value ~table
setup =
column0 = Column.from_vector "dates" [Date.new 2020 12 21, Date.new 2023 4 25]
column1 = Column.from_vector "datetimes" [Date_Time.new 2020 01 10 3 4 5 zone=Time_Zone.utc, Date_Time.new 2020 08 08 5 6 7 zone=Time_Zone.utc]
column2 = Column.from_vector "times" [Time_Of_Day.new 1 2 3, Time_Of_Day.new 10 30 35]
@ -24,193 +26,197 @@ spec =
column9 = Column.from_vector "date_formats" ["yyyyMMdd", "ddMMyyyy"]
column10 = Column.from_vector "time_formats" ["hh:mm:ss", "ss:mm:hh"]
table = Table.new [column0, column1, column2, column3, column4, column5, column6, column7, column8, column9, column10]
Data.Value table
add_specs suite_builder =
check_unchanged t0 t1 columns = columns.map c->
Test.with_clue c+": " <|
(t0.at c) . should_equal (t1.at c)
Test.group "Dates" <|
Test.specify "Date and Date_Time, with format string" <|
data = Data.setup
suite_builder.group "Dates" group_builder->
group_builder.specify "Date and Date_Time, with format string" <|
expected_dates = Column.from_vector "dates" ["20201221", "20230425"]
expected_datetimes = Column.from_vector "datetimes" ["20200110", "20200808"]
actual = table.format ["dates", "datetimes"] "yyyyMMdd"
actual.column_names . should_equal table.column_names
actual = data.table.format ["dates", "datetimes"] "yyyyMMdd"
actual.column_names . should_equal data.table.column_names
actual.at "dates" . should_equal expected_dates
actual.at "datetimes" . should_equal expected_datetimes
check_unchanged table actual ["times", "bools", "ints", "floats", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"]
check_unchanged data.table actual ["times", "bools", "ints", "floats", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"]
Test.specify "Date_Time and Time_Of_Day, with format string" <|
group_builder.specify "Date_Time and Time_Of_Day, with format string" <|
expected_datetimes = Column.from_vector "datetimes" ["03:04:05", "05:06:07"]
expected_times = Column.from_vector "times" ["01:02:03", "10:30:35"]
actual = table.format ["datetimes", "times"] "hh:mm:ss"
actual.column_names . should_equal table.column_names
actual = data.table.format ["datetimes", "times"] "hh:mm:ss"
actual.column_names . should_equal data.table.column_names
actual.at "datetimes" . should_equal expected_datetimes
actual.at "times" . should_equal expected_times
check_unchanged table actual ["dates", "bools", "ints", "floats", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"]
check_unchanged data.table actual ["dates", "bools", "ints", "floats", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"]
Test.specify "Date and Date_Time, with Date_Time_Formatter" <|
group_builder.specify "Date and Date_Time, with Date_Time_Formatter" <|
expected_dates = Column.from_vector "dates" ["20201221", "20230425"]
expected_datetimes = Column.from_vector "datetimes" ["20200110", "20200808"]
actual = table.format ["dates", "datetimes"] (Date_Time_Formatter.from "yyyyMMdd")
actual.column_names . should_equal table.column_names
actual = data.table.format ["dates", "datetimes"] (Date_Time_Formatter.from "yyyyMMdd")
actual.column_names . should_equal data.table.column_names
actual.at "dates" . should_equal expected_dates
actual.at "datetimes" . should_equal expected_datetimes
check_unchanged table actual ["times", "bools", "ints", "floats", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"]
check_unchanged data.table actual ["times", "bools", "ints", "floats", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"]
Test.specify "Date_Time and Time_Of_Day, Date_Time_Formatter" <|
group_builder.specify "Date_Time and Time_Of_Day, Date_Time_Formatter" <|
expected_datetimes = Column.from_vector "datetimes" ["03:04:05", "05:06:07"]
expected_times = Column.from_vector "times" ["01:02:03", "10:30:35"]
actual = table.format ["datetimes", "times"] (Date_Time_Formatter.from "hh:mm:ss")
actual.column_names . should_equal table.column_names
actual = data.table.format ["datetimes", "times"] (Date_Time_Formatter.from "hh:mm:ss")
actual.column_names . should_equal data.table.column_names
actual.at "datetimes" . should_equal expected_datetimes
actual.at "times" . should_equal expected_times
check_unchanged table actual ["dates", "bools", "ints", "floats", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"]
check_unchanged data.table actual ["dates", "bools", "ints", "floats", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"]
Test.specify "Date and Date_Time, with format column" <|
group_builder.specify "Date and Date_Time, with format column" <|
expected_dates = Column.from_vector "dates" ["20201221", "25042023"]
expected_datetimes = Column.from_vector "datetimes" ["20200110", "08082020"]
actual = table.format ["dates", "datetimes"] (table.at "date_formats")
actual.column_names . should_equal table.column_names
actual = data.table.format ["dates", "datetimes"] (data.table.at "date_formats")
actual.column_names . should_equal data.table.column_names
actual.at "dates" . should_equal expected_dates
actual.at "datetimes" . should_equal expected_datetimes
check_unchanged table actual ["times", "bools", "ints", "floats", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"]
check_unchanged data.table actual ["times", "bools", "ints", "floats", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"]
Test.specify "Date_Time and Time_Of_Day, with format column" <|
group_builder.specify "Date_Time and Time_Of_Day, with format column" <|
expected_datetimes = Column.from_vector "datetimes" ["03:04:05", "07:06:05"]
expected_times = Column.from_vector "times" ["01:02:03", "35:30:10"]
actual = table.format ["datetimes", "times"] (table.at "time_formats")
actual.column_names . should_equal table.column_names
actual = data.table.format ["datetimes", "times"] (data.table.at "time_formats")
actual.column_names . should_equal data.table.column_names
actual.at "datetimes" . should_equal expected_datetimes
actual.at "times" . should_equal expected_times
check_unchanged table actual ["dates", "bools", "ints", "floats", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"]
check_unchanged data.table actual ["dates", "bools", "ints", "floats", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"]
Test.specify "Date and Date_Time, with empty format string" <|
group_builder.specify "Date and Date_Time, with empty format string" <|
expected_dates = Column.from_vector "dates" ["2020-12-21", "2023-04-25"]
expected_datetimes = Column.from_vector "datetimes" ['2020-01-10 03:04:05Z[UTC]', '2020-08-08 05:06:07Z[UTC]']
expected_times = Column.from_vector "times" ["01:02:03", "10:30:35"]
actual = table.format ["dates", "datetimes", "times"] ""
actual.column_names . should_equal table.column_names
actual = data.table.format ["dates", "datetimes", "times"] ""
actual.column_names . should_equal data.table.column_names
actual.at "dates" . should_equal expected_dates
actual.at "datetimes" . should_equal expected_datetimes
actual.at "times" . should_equal expected_times
check_unchanged table actual ["bools", "ints", "floats", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"]
check_unchanged data.table actual ["bools", "ints", "floats", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"]
Test.specify "Date and Date_Time, with Nothing format" <|
group_builder.specify "Date and Date_Time, with Nothing format" <|
expected_dates = Column.from_vector "dates" ["2020-12-21", "2023-04-25"]
expected_datetimes = Column.from_vector "datetimes" ['2020-01-10 03:04:05Z[UTC]', '2020-08-08 05:06:07Z[UTC]']
expected_times = Column.from_vector "times" ["01:02:03", "10:30:35"]
actual = table.format ["dates", "datetimes", "times"] Nothing
actual.column_names . should_equal table.column_names
actual = data.table.format ["dates", "datetimes", "times"] Nothing
actual.column_names . should_equal data.table.column_names
actual.at "dates" . should_equal expected_dates
actual.at "datetimes" . should_equal expected_datetimes
actual.at "times" . should_equal expected_times
check_unchanged table actual ["bools", "ints", "floats", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"]
check_unchanged data.table actual ["bools", "ints", "floats", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"]
Test.group "Boolean" <|
Test.specify "Booleans, with format string" <|
suite_builder.group "Boolean" group_builder->
group_builder.specify "Booleans, with format string" <|
expected_bools = Column.from_vector "bools" ["tt", "ff"]
actual = table.format ["bools"] "tt|ff"
actual.column_names . should_equal table.column_names
actual = data.table.format ["bools"] "tt|ff"
actual.column_names . should_equal data.table.column_names
actual.at "bools" . should_equal expected_bools
check_unchanged table actual ["dates", "datetimes", "times", "ints", "floats", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"]
check_unchanged data.table actual ["dates", "datetimes", "times", "ints", "floats", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"]
Test.specify "Booleans, with column" <|
group_builder.specify "Booleans, with column" <|
expected_bools = Column.from_vector "bools" ["t", "F"]
actual = table.format ["bools"] (table.at "bool_formats")
actual.column_names . should_equal table.column_names
actual = data.table.format ["bools"] (data.table.at "bool_formats")
actual.column_names . should_equal data.table.column_names
actual.at "bools" . should_equal expected_bools
check_unchanged table actual ["dates", "datetimes", "times", "ints", "floats", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"]
check_unchanged data.table actual ["dates", "datetimes", "times", "ints", "floats", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"]
Test.specify "Booleans, with format empty format string" <|
group_builder.specify "Booleans, with format empty format string" <|
expected_bools = Column.from_vector "bools" ["True", "False"]
actual = table.format ["bools"] ""
actual.column_names . should_equal table.column_names
actual = data.table.format ["bools"] ""
actual.column_names . should_equal data.table.column_names
actual.at "bools" . should_equal expected_bools
check_unchanged table actual ["dates", "datetimes", "times", "ints", "floats", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"]
check_unchanged data.table actual ["dates", "datetimes", "times", "ints", "floats", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"]
Test.specify "Booleans, with format Nothing format" <|
group_builder.specify "Booleans, with format Nothing format" <|
expected_bools = Column.from_vector "bools" ["True", "False"]
actual = table.format ["bools"] Nothing
actual.column_names . should_equal table.column_names
actual = data.table.format ["bools"] Nothing
actual.column_names . should_equal data.table.column_names
actual.at "bools" . should_equal expected_bools
check_unchanged table actual ["dates", "datetimes", "times", "ints", "floats", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"]
check_unchanged data.table actual ["dates", "datetimes", "times", "ints", "floats", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"]
Test.group "Numeric" <|
Test.specify "Ints and floats, with format string" <|
suite_builder.group "Numeric" group_builder->
group_builder.specify "Ints and floats, with format string" <|
expected_ints = Column.from_vector "ints" ["100,000,000.00", "2,222.00"]
expected_floats = Column.from_vector "floats" ["200,000,000.90", "4,444.30"]
actual = table.format ["ints", "floats"] "#,##0.00"
actual.column_names . should_equal table.column_names
actual = data.table.format ["ints", "floats"] "#,##0.00"
actual.column_names . should_equal data.table.column_names
actual.at "ints" . should_equal expected_ints
actual.at "floats" . should_equal expected_floats
check_unchanged table actual ["dates", "datetimes", "times", "bools", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"]
check_unchanged data.table actual ["dates", "datetimes", "times", "bools", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"]
Test.specify "Ints and floats, with column" <|
group_builder.specify "Ints and floats, with column" <|
expected_ints = Column.from_vector "ints" ["100,000,000.00", "2,222"]
expected_floats = Column.from_vector "floats" ["200,000,000.90", "4,444"]
actual = table.format ["ints", "floats"] (table.at "numeric_formats")
actual.column_names . should_equal table.column_names
actual = data.table.format ["ints", "floats"] (data.table.at "numeric_formats")
actual.column_names . should_equal data.table.column_names
actual.at "ints" . should_equal expected_ints
actual.at "floats" . should_equal expected_floats
check_unchanged table actual ["dates", "datetimes", "times", "bools", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"]
check_unchanged data.table actual ["dates", "datetimes", "times", "bools", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"]
Test.specify "Ints and floats, with empty format string" <|
group_builder.specify "Ints and floats, with empty format string" <|
expected_ints = Column.from_vector "ints" ["100000000", "2222"]
expected_floats = Column.from_vector "floats" ['2.000000009E8', '4444.3']
actual = table.format ["ints", "floats"] ""
actual.column_names . should_equal table.column_names
actual = data.table.format ["ints", "floats"] ""
actual.column_names . should_equal data.table.column_names
actual.at "ints" . should_equal expected_ints
actual.at "floats" . should_equal expected_floats
check_unchanged table actual ["dates", "datetimes", "times", "bools", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"]
check_unchanged data.table actual ["dates", "datetimes", "times", "bools", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"]
Test.specify "Ints and floats, with Nothing format" <|
group_builder.specify "Ints and floats, with Nothing format" <|
expected_ints = Column.from_vector "ints" ["100000000", "2222"]
expected_floats = Column.from_vector "floats" ['2.000000009E8', '4444.3']
actual = table.format ["ints", "floats"] Nothing
actual.column_names . should_equal table.column_names
actual = data.table.format ["ints", "floats"] Nothing
actual.column_names . should_equal data.table.column_names
actual.at "ints" . should_equal expected_ints
actual.at "floats" . should_equal expected_floats
check_unchanged table actual ["dates", "datetimes", "times", "bools", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"]
check_unchanged data.table actual ["dates", "datetimes", "times", "bools", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"]
Test.group "All types" <|
Test.specify "Format all columns" <|
suite_builder.group "All types" group_builder->
group_builder.specify "Format all columns" <|
expected_dates = Column.from_vector "dates" ["2020-12-21", "2023-04-25"]
expected_datetimes = Column.from_vector "datetimes" ['2020-01-10 03:04:05Z[UTC]', '2020-08-08 05:06:07Z[UTC]']
expected_times = Column.from_vector "times" ["01:02:03", "10:30:35"]
expected_ints = Column.from_vector "ints" ["100000000", "2222"]
expected_floats = Column.from_vector "floats" ['2.000000009E8', '4444.3']
expected_bools = Column.from_vector "bools" ["True", "False"]
actual = table.format ["dates", "datetimes", "times", "bools", "ints", "floats"] ""
actual.column_names . should_equal table.column_names
actual = data.table.format ["dates", "datetimes", "times", "bools", "ints", "floats"] ""
actual.column_names . should_equal data.table.column_names
actual.at "dates" . should_equal expected_dates
actual.at "datetimes" . should_equal expected_datetimes
actual.at "times" . should_equal expected_times
actual.at "bools" . should_equal expected_bools
actual.at "ints" . should_equal expected_ints
actual.at "floats" . should_equal expected_floats
check_unchanged table actual ["strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"]
check_unchanged data.table actual ["strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"]
Test.group "Column selectors" <|
Test.specify "Date and Date_Time, with regex column selector" <|
suite_builder.group "Column selectors" group_builder->
group_builder.specify "Date and Date_Time, with regex column selector" <|
expected_dates = Column.from_vector "dates" ["20201221", "20230425"]
expected_datetimes = Column.from_vector "datetimes" ["20200110", "20200808"]
actual = table.format "date(|time)s".to_regex "yyyyMMdd"
actual.column_names . should_equal table.column_names
actual = data.table.format "date(|time)s".to_regex "yyyyMMdd"
actual.column_names . should_equal data.table.column_names
actual.at "dates" . should_equal expected_dates
actual.at "datetimes" . should_equal expected_datetimes
check_unchanged table actual ["times", "bools", "ints", "floats", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"]
check_unchanged data.table actual ["times", "bools", "ints", "floats", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"]
Test.specify "Date and Date_Time, with index column selector" <|
group_builder.specify "Date and Date_Time, with index column selector" <|
expected_dates = Column.from_vector "dates" ["20201221", "20230425"]
expected_datetimes = Column.from_vector "datetimes" ["20200110", "20200808"]
actual = table.format [0, 1] "yyyyMMdd"
actual.column_names . should_equal table.column_names
actual = data.table.format [0, 1] "yyyyMMdd"
actual.column_names . should_equal data.table.column_names
actual.at "dates" . should_equal expected_dates
actual.at "datetimes" . should_equal expected_datetimes
check_unchanged table actual ["times", "bools", "ints", "floats", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"]
check_unchanged data.table actual ["times", "bools", "ints", "floats", "strings", "date_formats", "time_formats", "numeric_formats", "bool_formats"]
Test.group "Locales" <|
Test.specify "Date with locale" <|
suite_builder.group "Locales" group_builder->
group_builder.specify "Date with locale" <|
input = Table.new [Column.from_vector "values" [Date.new 2020 6 21, Date.new 2023 4 25]]
expected_default = Table.new [Column.from_vector "values" ["21. June 2020", "25. April 2023"]]
expected_gb = Table.new [Column.from_vector "values" ["21. June 2020", "25. April 2023"]]
@ -220,7 +226,7 @@ spec =
input.format ["values"] (Date_Time_Formatter.from "d. MMMM yyyy" Locale.uk) . should_equal expected_gb
input.format ["values"] (Date_Time_Formatter.from "d. MMMM yyyy" Locale.france) . should_equal expected_fr
Test.specify "Column with locale" <|
group_builder.specify "Column with locale" <|
input = Column.from_vector "values" [100000000, 2222, 3]
formats = Column.from_vector "formats" ["#,##0.00", "0.00", "0"]
output = Column.from_vector "values" ["100000000,00", "2222,00", "3"]
@ -228,23 +234,27 @@ spec =
expected = Table.new [output, formats]
table.format ["values"] (table.at "formats") locale=(Locale.new "fr") . should_equal expected
Test.group "Errors" <|
Test.specify "Bad formats" <|
table.format ["dates", "datetimes"] "jjjjjj" . should_fail_with Date_Time_Format_Parse_Error
table.format ["ints", "floats"] "#.##0,00" . should_fail_with Illegal_Argument
table.format ["bools"] "||||" . should_fail_with Illegal_Argument
suite_builder.group "Errors" group_builder->
group_builder.specify "Bad formats" <|
data.table.format ["dates", "datetimes"] "jjjjjj" . should_fail_with Date_Time_Format_Parse_Error
data.table.format ["ints", "floats"] "#.##0,00" . should_fail_with Illegal_Argument
data.table.format ["bools"] "||||" . should_fail_with Illegal_Argument
Test.specify "Format all columns with a type-specific format" <|
table.format ["dates", "datetimes", "times", "bools", "ints", "floats"] "yyyyMMdd" . should_fail_with Time_Error
table.format ["dates", "datetimes", "times", "bools", "ints", "floats"] "#,##0.00" . should_fail_with Illegal_Argument
table.format ["dates", "datetimes", "times", "bools", "ints", "floats"] "t|f" . should_fail_with Time_Error
group_builder.specify "Format all columns with a type-specific format" <|
data.table.format ["dates", "datetimes", "times", "bools", "ints", "floats"] "yyyyMMdd" . should_fail_with Time_Error
data.table.format ["dates", "datetimes", "times", "bools", "ints", "floats"] "#,##0.00" . should_fail_with Illegal_Argument
data.table.format ["dates", "datetimes", "times", "bools", "ints", "floats"] "t|f" . should_fail_with Time_Error
Test.specify "Missing column" <|
table.format ["abc", "dates"] . should_fail_with Missing_Input_Columns
group_builder.specify "Missing column" <|
data.table.format ["abc", "dates"] . should_fail_with Missing_Input_Columns
Problems.expect_only_warning Missing_Input_Columns <|
table.format ["abc", "dates"] error_on_missing_columns=False . should_be_a Table
data.table.format ["abc", "dates"] error_on_missing_columns=False . should_be_a Table
Test.specify "No columns selected" <|
table.format [] . should_fail_with No_Input_Columns_Selected
group_builder.specify "No columns selected" <|
data.table.format [] . should_fail_with No_Input_Columns_Selected
main =
suite = Test.build suite_builder->
add_specs suite_builder
suite.run_with_filter group_filter="Errors"
main = Test_Suite.run_main spec

View File

@ -16,8 +16,7 @@ import Standard.Database.Extensions.Upload_Database_Table
import Standard.Database.Extensions.Upload_In_Memory_Table
from Standard.Database import Database, SQLite, In_Memory
from Standard.Test import Test, Test_Suite, Problems
import Standard.Test.Extensions
from Standard.Test_New import all
from project.Util import all
@ -42,7 +41,11 @@ type My_Comparator
Comparable.from (_:My) = My_Comparator
spec =
type Data
Value ~varied_type_table
setup =
make_varied_type_table =
strs = ["strs", ["a", "b", "c", Nothing]]
ints = ["ints", [Nothing, 1, 2, 4]]
@ -57,10 +60,13 @@ spec =
just_nulls = ["just_nulls", [Nothing, Nothing, Nothing, Nothing]]
Table.new [strs, ints, doubles, doubles_and_ints, custom_objects, dates, times, datetimes, mixed, mixed_dates, just_nulls]
varied_type_table = make_varied_type_table
Data.Value make_varied_type_table
Test.group "Construction" <|
Test.specify "should allow creating a table from rows" <|
add_specs suite_builder =
suite_builder.group "Construction" group_builder->
data = Data.setup
group_builder.specify "should allow creating a table from rows" <|
header = ['foo', 'bar']
rows = [[1, False], [2, True], [3, False]]
r = Table.from_rows header rows
@ -73,7 +79,7 @@ spec =
r2.at "foo" . to_vector . should_equal []
r2.at "bar" . to_vector . should_equal []
Test.specify "should allow creating a table from columns" <|
group_builder.specify "should allow creating a table from columns" <|
r = Table.new [["foo", [1, 2, 3]], ["bar", [False, True, False]]]
r.row_count.should_equal 3
r.at "foo" . to_vector . should_equal [1, 2, 3]
@ -84,7 +90,7 @@ spec =
r2.at "foo" . to_vector . should_equal []
r2.at "bar" . to_vector . should_equal []
Test.specify "should handle error scenarios gracefully" <|
group_builder.specify "should handle error scenarios gracefully" <|
Table.new [["X", [1,2,3]], ["Y", [4]]] . should_fail_with Illegal_Argument
Table.new [["X", [1]], ["X", [2]]] . should_fail_with Illegal_Argument
Table.new [["X", [1]], ["Y", [2], "Z"]] . should_fail_with Illegal_Argument
@ -99,25 +105,25 @@ spec =
Table.new [["X", [1,2,3]], ["", [4,5,6]]] . should_fail_with Invalid_Column_Names
Test.specify "should be internally guarded against creating a table without columns" <|
group_builder.specify "should be internally guarded against creating a table without columns" <|
Test.expect_panic_with (Java_Table.new []) IllegalArgumentException
Test.specify "should correctly infer storage types" <|
varied_type_table.at "strs" . value_type . should_equal Value_Type.Char
varied_type_table.at "ints" . value_type . should_equal Value_Type.Integer
varied_type_table.at "doubles" . value_type . should_equal Value_Type.Float
varied_type_table.at "doubles_and_ints" . value_type . should_equal Value_Type.Float
varied_type_table.at "custom_objects" . value_type . should_equal Value_Type.Mixed
varied_type_table.at "dates" . value_type . should_equal Value_Type.Date
varied_type_table.at "times" . value_type . should_equal Value_Type.Time
varied_type_table.at "datetimes" . value_type . should_equal Value_Type.Date_Time
varied_type_table.at "mixed" . value_type . should_equal Value_Type.Mixed
varied_type_table.at "mixed_dates" . value_type . should_equal Value_Type.Mixed
varied_type_table.at "just_nulls" . value_type . should_equal Value_Type.Mixed
group_builder.specify "should correctly infer storage types" <|
data.varied_type_table.at "strs" . value_type . should_equal Value_Type.Char
data.varied_type_table.at "ints" . value_type . should_equal Value_Type.Integer
data.varied_type_table.at "doubles" . value_type . should_equal Value_Type.Float
data.varied_type_table.at "doubles_and_ints" . value_type . should_equal Value_Type.Float
data.varied_type_table.at "custom_objects" . value_type . should_equal Value_Type.Mixed
data.varied_type_table.at "dates" . value_type . should_equal Value_Type.Date
data.varied_type_table.at "times" . value_type . should_equal Value_Type.Time
data.varied_type_table.at "datetimes" . value_type . should_equal Value_Type.Date_Time
data.varied_type_table.at "mixed" . value_type . should_equal Value_Type.Mixed
data.varied_type_table.at "mixed_dates" . value_type . should_equal Value_Type.Mixed
data.varied_type_table.at "just_nulls" . value_type . should_equal Value_Type.Mixed
pending_python_missing = if Polyglot.is_language_installed "python" . not then
"Can't run Python tests, Python is not installed."
Test.specify "should work with polyglot values coming from Python" pending=pending_python_missing <|
group_builder.specify "should work with polyglot values coming from Python" pending=pending_python_missing <|
enso_dates = ["enso_dates", [Date.new 2022 8 27, Date.new 1999 1 1]]
py_dates = ["py_dates", [py_make_date 2022 8 27, py_make_date 1999 1 1]]
py_objects = ["py_objects", [py_make_object "a" "b", py_make_object "foo" "bar"]]
@ -127,7 +133,7 @@ spec =
(table.at "enso_dates" == table.at "py_dates").to_vector . should_equal [True, True]
Test.specify "should work with polyglot values coming from JS" <|
group_builder.specify "should work with polyglot values coming from JS" <|
enso_dates = ["enso_dates", [Date.new 2022 8 27, Date.new 1999 1 1]]
js_dates = ["js_dates", [js_make_date 2022 8 27, js_make_date 1999 1 1]]
js_objects = ["js_objects", [js_make_object "a" "b", js_make_object "foo" "bar"]]
@ -142,14 +148,14 @@ spec =
(js_converted_dates == table.at "enso_dates").to_vector . should_equal [True, True]
(enso_date_times == table.at "js_dates").to_vector . should_equal [True, True]
Test.specify "should work with a Text value split into lines" <|
group_builder.specify "should work with a Text value split into lines" <|
## This tests verifies an issue with passing through a `List<String>` to the table.
words = 'The\nquick\nbrown\nfox\njumps\nover\nthe\nlazy\ndog'.lines
table = Table.new [["words", words]]
table.at "words" . value_type . should_equal Value_Type.Char
table.at "words" . to_vector . should_equal words
Test.specify "should handle Unicode normalization when accessing table columns" <|
group_builder.specify "should handle Unicode normalization when accessing table columns" <|
col1 = ['s\u0301ciana', [1, 2, 3]]
col2 = ['café', [4, 5, 6]]
t = Table.new [col1, col2]
@ -163,8 +169,8 @@ spec =
r2 = Table.new [['ściana', [1, 2, 3]], ['s\u0301ciana', [4, 5, 6]]]
r2.should_fail_with Illegal_Argument
Test.group "Vector conversion" <|
Test.specify "should allow converting columns to valid vectors" <|
suite_builder.group "Vector conversion" group_builder->
group_builder.specify "should allow converting columns to valid vectors" <|
col_1 = Column.from_vector 'x' [1, 2, 3]
col_1.to_vector.reduce (+) . should_equal 6
@ -174,8 +180,8 @@ spec =
col_3 = Column.from_vector 'z' [False, True, False]
col_3.to_vector.map .not . should_equal [True, False, True]
Test.group "Mapping Operations" <|
Test.specify "should allow mapping a function over a column" <|
suite_builder.group "Mapping Operations" group_builder->
group_builder.specify "should allow mapping a function over a column" <|
c_str = Column.from_vector 'x' ['a', 'b', Nothing, 'b']
c_str.map (+ "x") . to_vector . should_equal ['ax', 'bx', Nothing, 'bx']
c_int = Column.from_vector 'x' [1, 2, 1, 5, 1]
@ -187,13 +193,13 @@ spec =
c_any = Column.from_vector 'x' [My.Data 1 6, My.Data 6 3, My.Data 2 5, My.Data 3 4, My.Data 200 300]
c_any.map (_.frobnicate) . to_vector . should_equal [My.Data 6 1, My.Data 3 6, My.Data 5 2, My.Data 4 3, My.Data 300 200]
Test.specify "should correctly handle storage of results" <|
group_builder.specify "should correctly handle storage of results" <|
c_int = Column.from_vector 'year' [2022, 2000, 1999]
r = c_int . map Date_Time.new
r.to_vector . should_equal [Date_Time.new 2022, Date_Time.new 2000, Date_Time.new 1999]
r.value_type . should_equal Value_Type.Date_Time
Test.specify "should allow zipping columns with a custom function" <|
group_builder.specify "should allow zipping columns with a custom function" <|
b = Column.from_vector 'w' [6.3, 3.1, 5.2, 4.6, 8.0]
a = Column.from_vector 'z' ['foo', 'bar', 'baz', 'spam', 'eggs']
c = a.zip b x-> y-> x + y.to_text
@ -212,7 +218,7 @@ spec =
r2.to_vector . should_equal [Time_Of_Day.new 12 0, Time_Of_Day.new 13 30, Time_Of_Day.new 0 45]
r2.value_type . should_equal Value_Type.Time
Test.specify "should handle vectorized equality and fall back on non-vectorized if needed" <|
group_builder.specify "should handle vectorized equality and fall back on non-vectorized if needed" <|
c_str = Column.from_vector 'x' ['a', 'b', Nothing, 'b']
(c_str == 'b').to_vector.should_equal [False, True, Nothing, True]
c_int = Column.from_vector 'x' [1, 2, 1, 5, 1]
@ -224,7 +230,7 @@ spec =
c_any = Column.from_vector 'x' [My.Data 1 6, My.Data 6 3, My.Data 2 5, My.Data 3 4, My.Data 200 300]
(c_any == My.Data 7 0).to_vector.should_equal [True, False, True, True, False]
Test.specify "should switch between maps and zips based on argument type" <|
group_builder.specify "should switch between maps and zips based on argument type" <|
a = Column.from_vector 'x' [0, 1, 7, 3, 6]
b = Column.from_vector 'w' [6.3, 3.1, 5.2, Nothing, 8]
(a + 3.2).to_vector.should_equal [3.2, 4.2, 10.2, 6.2, 9.2]
@ -236,7 +242,7 @@ spec =
both = gt_const && gt_b
both.to_vector.should_equal [False, False, True, False, False]
Test.specify "should handle Text operations" <|
group_builder.specify "should handle Text operations" <|
a = Column.from_vector 'a' ["abab", "abc", Nothing, "bca", "acca"]
nils = [Nothing, Nothing, Nothing, Nothing, Nothing]
@ -254,7 +260,7 @@ spec =
a.contains c . to_vector . should_equal [True, True, Nothing, Nothing, True]
a.contains Nothing . to_vector . should_equal nils
Test.specify "should take Unicode normalization into account in Text operations" <|
group_builder.specify "should take Unicode normalization into account in Text operations" <|
x = Column.from_vector 'x' ['s', 'ś', 's\u0301']
y = Column.from_vector 'y' ['s\u0301', 's\u0301', 'ś']
@ -276,35 +282,35 @@ spec =
z.contains 'ś' . to_vector . should_equal [True, True, False, True, True]
z.contains 's\u0301' . to_vector . should_equal [True, True, False, True, True]
Test.group "Masking Tables" <|
Test.specify "should allow selecting table rows based on a boolean column" <|
suite_builder.group "Masking Tables" group_builder->
group_builder.specify "should allow selecting table rows based on a boolean column" <|
df = (enso_project.data / "simple_empty.csv").read
r = df.filter (Column.from_vector 'x' [True, False, False, True])
r.at "a" . to_vector . should_equal ["1", "10"]
r.at "b" . to_vector . should_equal [2, 11]
r.at "c" . to_vector . should_equal [Nothing, 12]
Test.specify "should treat NA values in the mask as false and extend the mask with NAs" <|
group_builder.specify "should treat NA values in the mask as false and extend the mask with NAs" <|
df = (enso_project.data / "simple_empty.csv").read
r = df.filter (Column.from_vector 'x' [Nothing, True, False])
r.at "a" . to_vector . should_equal ["4"]
r.at "b" . to_vector . should_equal [Nothing]
r.at "c" . to_vector . should_equal [6]
Test.specify "should work correctly if a mask is bigger than the table itself" <|
group_builder.specify "should work correctly if a mask is bigger than the table itself" <|
df = (enso_project.data / "simple_empty.csv").read
r = df.filter (Column.from_vector 'x' [True, False, False, False, True])
r.at "a" . to_vector . should_equal ["1"]
r.at "b" . to_vector . should_equal [2]
r.at "c" . to_vector . should_equal [Nothing]
Test.group "Counting Values" <|
Test.specify "should count missing and non-missing values" <|
suite_builder.group "Counting Values" group_builder->
group_builder.specify "should count missing and non-missing values" <|
col = Column.from_vector 'x' [1, Nothing, 2]
col.length . should_equal 3
col.count . should_equal 2
col.count_nothing . should_equal 1
Test.group "Dropping Missing Values" <|
Test.specify "should correctly handle NaNs with mixed type columns" <|
suite_builder.group "Dropping Missing Values" group_builder->
group_builder.specify "should correctly handle NaNs with mixed type columns" <|
t = Table.new [["X", [1, 2, 3, 4, 5]], ["Y", ["A", "", Nothing, Number.nan, 0]]]
t1 = t.filter_blank_rows when=Blank_Selector.Any_Cell treat_nans_as_blank=False
t1.at "X" . to_vector . should_equal [1, 4, 5]
@ -323,8 +329,8 @@ spec =
t4 = t3.remove_blank_columns treat_nans_as_blank=True
t4.columns . map .name . should_equal ["X"]
Test.group "Info" <|
Test.specify "should return Table information" <|
suite_builder.group "Info" group_builder->
group_builder.specify "should return Table information" <|
a = ["strs", ["a", "b", Nothing, "a"]]
b = ["ints", [1, 2, Nothing, Nothing]]
c = ["objs", [1, "a", "c", Any]]
@ -334,15 +340,15 @@ spec =
i.at "Items Count" . to_vector . should_equal [3, 2, 4]
i.at "Value Type" . to_vector . should_equal [Value_Type.Char, Value_Type.Integer, Value_Type.Mixed]
Test.group "Sorting Tables" <|
Test.specify 'should respect defined comparison operations for custom types' <|
suite_builder.group "Sorting Tables" group_builder->
group_builder.specify 'should respect defined comparison operations for custom types' <|
c_1 = ['id', [1, 2, 3, 4, 5, 6]]
c_2 = ['val', [My.Data 1 2, My.Data 3 4, My.Data 2 1, My.Data 5 2, My.Data 7 0, My.Data 4 -1]]
df = Table.new [c_1, c_2]
r = df.order_by (['val'])
r.at 'id' . to_vector . should_equal [1,3,6,2,4,5]
Test.specify 'should correctly reorder all kinds of columns and leave the original columns untouched' <|
group_builder.specify 'should correctly reorder all kinds of columns and leave the original columns untouched' <|
ord = [0, 3, 2, 4, 1]
ints = [1, 2, 3, 4, 5]
reals = [1.3, 4.6, 3.2, 5.2, 1.6]
@ -401,8 +407,8 @@ spec =
r6 = df.order_by (['mixed_dates'])
r6 . should_fail_with Incomparable_Values
Test.group "Sorting Columns" <|
Test.specify 'should sort columns with specified ordering and missing placement' <|
suite_builder.group "Sorting Columns" group_builder->
group_builder.specify 'should sort columns with specified ordering and missing placement' <|
c = Column.from_vector 'foo' [1, 7, Nothing, 4, 8, Nothing]
r_1 = c.sort
@ -414,13 +420,13 @@ spec =
r_3 = c.sort Sort_Direction.Descending missing_last=False
r_3.to_vector.should_equal [Nothing,Nothing,8,7,4,1]
Test.specify 'should respect defined comparison operations for custom types' <|
group_builder.specify 'should respect defined comparison operations for custom types' <|
c = Column.from_vector 'foo' [My.Data 1 2, My.Data 3 4, My.Data 2 1, My.Data 5 2, My.Data 7 0, My.Data 4 -1]
r = c.sort
r.to_vector.should_equal [My.Data 1 2, My.Data 2 1, My.Data 4 -1, My.Data 3 4, My.Data 5 2, My.Data 7 0]
Test.specify 'should allow passing a custom comparator' <|
group_builder.specify 'should allow passing a custom comparator' <|
c = Column.from_vector 'foo' [My.Data 1 2, My.Data 2 5, My.Data 3 4, My.Data 6 3, Nothing, My.Data 1 0]
cmp a b = Ordering.compare (a.x-a.y).abs (b.x-b.y).abs
r = c.sort by=cmp
@ -433,12 +439,12 @@ spec =
r3 = d.sort by=cmp2 missing_last=False
r3.to_vector.should_equal [Nothing,5,4,3,2,1]
Test.specify 'should handle Unicode characters correctly' <|
group_builder.specify 'should handle Unicode characters correctly' <|
c = Column.from_vector 'c' ['z', 'a', 'd', 'f', 's', 'e\u0301', 'ś', 'ą', 's\u0301', 'w', 'b']
c.sort.to_vector . should_equal ['a', 'ą', 'b', 'd', 'e\u0301', 'f', 's', 's\u0301', 'ś', 'w', 'z']
Test.group "Slicing Tables" <|
Test.specify 'should allow taking first n rows' <|
suite_builder.group "Slicing Tables" group_builder->
group_builder.specify 'should allow taking first n rows' <|
i_1 = ['ix', [1, 2, 3]]
c_1 = ['col', [5, 6, 7]]
c_2 = ['col2', ["a", Nothing, "c"]]
@ -461,7 +467,7 @@ spec =
t_1.at 'col' . take (First 2) . to_vector . should_equal (t_1.at 'col' . to_vector . take (First 2))
t_1.at 'col' . take 2 . to_vector . should_equal (t_1.at 'col' . to_vector . take 2)
Test.specify "should allow taking the last n rows" <|
group_builder.specify "should allow taking the last n rows" <|
i_1 = ['ix', [1, 2, 3]]
c_1 = ['col1', [5, 6, 7]]
c_2 = ['col2', ["a", Nothing, "c"]]
@ -477,7 +483,7 @@ spec =
t_1.at 'col1' . take (Last 2) . to_vector . should_equal (t_1.at 'col1' . to_vector . take (Last 2))
Test.specify "should allow taking/dropping a prefix of rows that satisfy a predicate" <|
group_builder.specify "should allow taking/dropping a prefix of rows that satisfy a predicate" <|
t1 = Table.new [["X", [1, 2, 3, 4, 5, 5]], ["Y", [9, 8, 7, 2, 10, 5]]]
t2 = t1.take (Index_Sub_Range.While row-> row.to_vector.compute Statistic.Sum == 10)
@ -500,7 +506,7 @@ spec =
t6.at "X" . to_vector . should_equal []
t6.at "Y" . to_vector . should_equal []
Test.specify "should allow reversing the table" <|
group_builder.specify "should allow reversing the table" <|
i_1 = ['ix', [1, 2, 3]]
c_1 = ['col1', [5, 6, 7]]
c_2 = ['col2', ["a", Nothing, "c"]]
@ -519,8 +525,8 @@ spec =
t_2.at 'col2' . to_vector . should_equal (expected.at 'col2' . to_vector)
t_2.at 'col3' . to_vector . should_equal (expected.at 'col3' . to_vector)
Test.group "fill"
Test.specify "should allow to fill_nothing from a value" <|
suite_builder.group "fill" group_builder->
group_builder.specify "should allow to fill_nothing from a value" <|
col0 = Column.from_vector "col0" [0, Nothing, 4, 5, Nothing, Nothing]
col1 = Column.from_vector "col1" [Nothing, 200, Nothing, 400, 500, Nothing]
default = 1000
@ -529,7 +535,7 @@ spec =
actual.at "col0" . to_vector . should_equal [0, 1000, 4, 5, 1000, 1000]
actual.at "col1" . to_vector . should_equal [1000, 200, 1000, 400, 500, 1000]
Test.specify "should allow to fill_nothing from other columns" <|
group_builder.specify "should allow to fill_nothing from other columns" <|
col0 = Column.from_vector "col0" [0, Nothing, 4, 5, Nothing, Nothing]
col1 = Column.from_vector "col1" [Nothing, 200, Nothing, 400, 500, Nothing]
defaults = Column.from_vector "def0" [1, 2, 10, 20, Nothing, 30]
@ -538,7 +544,7 @@ spec =
actual.at "col0" . to_vector . should_equal [0, 2, 4, 5, Nothing, 30]
actual.at "col1" . to_vector . should_equal [1, 200, 10, 400, 500, 30]
Test.specify "should allow to fill_empty from a value" <|
group_builder.specify "should allow to fill_empty from a value" <|
col0 = Column.from_vector "col0" ["0", Nothing, "4", "5", Nothing, Nothing]
col1 = Column.from_vector "col1" [Nothing, "200", Nothing, "400", "500", Nothing]
default = "1000"
@ -547,7 +553,7 @@ spec =
actual.at "col0" . to_vector . should_equal ["0", "1000", "4", "5", "1000", "1000"]
actual.at "col1" . to_vector . should_equal ["1000", "200", "1000", "400", "500", "1000"]
Test.specify "should allow to fill_empty from other columns" <|
group_builder.specify "should allow to fill_empty from other columns" <|
col0 = Column.from_vector "col0" ["0", Nothing, "4", "5", Nothing, Nothing]
col1 = Column.from_vector "col1" [Nothing, "200", Nothing, "400", "500", Nothing]
defaults = Column.from_vector "def0" ["1", "2", "10", "20", Nothing, "30"]
@ -556,7 +562,7 @@ spec =
actual.at "col0" . to_vector . should_equal ["0", "2", "4", "5", Nothing, "30"]
actual.at "col1" . to_vector . should_equal ["1", "200", "10", "400", "500", "30"]
Test.specify "fill_nothing should leave other columns alone" <|
group_builder.specify "fill_nothing should leave other columns alone" <|
col0 = Column.from_vector "col0" [0, Nothing, 4, 5, Nothing, Nothing]
col_between = Column.from_vector "col_between" [3, 4, 5, 6, 7, 8]
col1 = Column.from_vector "col1" [Nothing, 200, Nothing, 400, 500, Nothing]
@ -568,7 +574,7 @@ spec =
actual.at "col1" . to_vector . should_equal [1000, 200, 1000, 400, 500, 1000]
actual.column_names . should_equal ["col0", "col_between", "col1"]
Test.specify "fill_nothing should work with integer column selectors" <|
group_builder.specify "fill_nothing should work with integer column selectors" <|
col0 = Column.from_vector "col0" [0, Nothing, 4, 5, Nothing, Nothing]
col_between = Column.from_vector "col_between" [3, 4, 5, 6, 7, 8]
col1 = Column.from_vector "col1" [Nothing, 200, Nothing, 400, 500, Nothing]
@ -580,11 +586,11 @@ spec =
actual.at "col1" . to_vector . should_equal [1000, 200, 1000, 400, 500, 1000]
actual.column_names . should_equal ["col0", "col_between", "col1"]
Test.group "Use First Row As Names" <|
suite_builder.group "Use First Row As Names" group_builder->
expect_column_names names table =
table.columns . map .name . should_equal names frames_to_skip=2
Test.specify "should work happily with mixed types" <|
group_builder.specify "should work happily with mixed types" <|
c_0 = ['A', ["H", "B", "C"]]
c_1 = ['B', [Date.new 1980, Date.new 1979, Date.new 2000]]
c_2 = ['x', [1, 2, 3]]
@ -593,7 +599,7 @@ spec =
table = Table.new [c_0, c_1, c_2, c_3, c_4]
expect_column_names ["H", "1980-01-01", "1", "5.3", "True"] table.use_first_row_as_names
Test.specify "should correctly handle problems: invalid names ''" <|
group_builder.specify "should correctly handle problems: invalid names ''" <|
c_0 = ['A', ["", "B", "C"]]
c_1 = ['B', [Date.new 1980, Date.new 1979, Date.new 2000]]
c_2 = ['x', [1, 2, 3]]
@ -605,7 +611,7 @@ spec =
problems = [Invalid_Column_Names.Error [""]]
Problems.test_problem_handling action problems tester
Test.specify "should correctly handle problems: invalid names Nothing" <|
group_builder.specify "should correctly handle problems: invalid names Nothing" <|
c_0 = ['A', ["A", "B", "C"]]
c_1 = ['B', [Date.new 1980, Date.new 1979, Date.new 2000]]
c_2 = ['x', [Nothing, 2, 3]]
@ -617,7 +623,7 @@ spec =
problems = [Invalid_Column_Names.Error [Nothing]]
Problems.test_problem_handling action problems tester
Test.specify "should correctly handle problems: multiple invalid names" <|
group_builder.specify "should correctly handle problems: multiple invalid names" <|
c_0 = ['A', ["", "B", "C"]]
c_1 = ['B', [Date.new 1980, Date.new 1979, Date.new 2000]]
c_2 = ['x', [Nothing, 2, 3]]
@ -629,7 +635,7 @@ spec =
problems = [Invalid_Column_Names.Error ["", Nothing]]
Problems.test_problem_handling action problems tester
Test.specify "should correctly handle problems: duplicate names" <|
group_builder.specify "should correctly handle problems: duplicate names" <|
c_0 = ['A', ["A", "B", "C"]]
c_1 = ['B', ["A", "B", "C"]]
c_2 = ['x', ["A", "B", "C"]]
@ -640,8 +646,8 @@ spec =
problems = [Duplicate_Output_Column_Names.Error ["A", "A", "A"]]
Problems.test_problem_handling action problems tester
Test.group "[In-Memory] Table.aggregate" <|
Test.specify "should return columns with correct types" <|
suite_builder.group "[In-Memory] Table.aggregate" group_builder->
group_builder.specify "should return columns with correct types" <|
dates = ["dates", [Date.new 1999, Date.new 2000, Date.new 2000, Date.new 2000]]
texts = ["texts", ["a", "bb", "a", "bb"]]
mixed = ["mixed", [1, "a", "a", 1]]
@ -666,7 +672,7 @@ spec =
t4.info.at "Column" . to_vector . should_equal ["mixed", "Sum ints", "Sum floats"]
t4.info.at "Value Type" . to_vector . should_equal [Value_Type.Mixed, Value_Type.Float, Value_Type.Float]
Test.specify "should take Unicode normalization into account when grouping by Text" <|
group_builder.specify "should take Unicode normalization into account when grouping by Text" <|
texts = ["texts", ['ściana', 'ściana', 'łąka', 's\u0301ciana', 'ła\u0328ka', 'sciana']]
ints = ["ints", [1, 2, 4, 8, 16, 32]]
table = Table.new [texts, ints]
@ -677,7 +683,7 @@ spec =
r2 = table.aggregate [Count_Distinct "texts"]
r2.at "Count Distinct texts" . to_vector . should_equal [3]
Test.specify "should be able to aggregate over enso Types" <|
group_builder.specify "should be able to aggregate over enso Types" <|
weekday_table = Table.new [["days", [Day_Of_Week.Monday, Day_Of_Week.Monday, Day_Of_Week.Monday, Day_Of_Week.Tuesday, Day_Of_Week.Sunday]], ["group", [1,1,2,1,2]]]
r1 = weekday_table.aggregate [Group_By "days"] . order_by "days"
@ -692,8 +698,8 @@ spec =
more guarantees: preserving order of rows and always selecting the first
row of ones sharing the same distinctness key. For database tests (to be
added later) we can not rely on ordering.
Test.group "[In-Memory] Table.distinct" <|
Test.specify "should allow to select distinct rows based on a subset of columns, returning the first row from each group" <|
suite_builder.group "[In-Memory] Table.distinct" group_builder->
group_builder.specify "should allow to select distinct rows based on a subset of columns, returning the first row from each group" <|
a = ["A", ["a", "a", "a", "a", "a", "a"]]
b = ["B", [1, 1, 2, 2, 1, 2]]
c = ["C", [0.1, 0.2, 0.3, 0.4, 0.5, 0.6]]
@ -704,7 +710,7 @@ spec =
r2.at "B" . to_vector . should_equal [1, 2]
r2.at "C" . to_vector . should_equal [0.1, 0.3]
Test.specify "should handle nulls correctly and preserve original ordering" <|
group_builder.specify "should handle nulls correctly and preserve original ordering" <|
a = ["A", ["a", Nothing, "b", "a", "b", Nothing, "a", "b"]]
b = ["B", [1, 2, 3, 4, 5, 6, 7, 8]]
t = Table.new [a, b]
@ -712,11 +718,11 @@ spec =
r.at "A" . to_vector . should_equal ["a", Nothing, "b"]
r.at "B" . to_vector . should_equal [1, 2, 3]
Test.specify "should handle Unicode normalization of keys correctly" <|
group_builder.specify "should handle Unicode normalization of keys correctly" <|
t1 = Table.new [["X", ['ś', 's\u0301', 's', 'ś']]]
t1.distinct . at "X" . to_vector . should_equal ['ś', 's']
Test.specify "should allow to control case-sensitivity of keys, correctly handling Unicode folding" <|
group_builder.specify "should allow to control case-sensitivity of keys, correctly handling Unicode folding" <|
x = ["X", ['A', 'a', 'enso', 'śledź', 'Enso', 'A', 's\u0301ledz\u0301']]
y = ["Y", [1, 2, 3, 4, 5, 6, 7]]
t1 = Table.new [x, y]
@ -731,7 +737,7 @@ spec =
t2 = Table.new [["X", ["łąka", "STRASSE", "Straße", "ffi", "ŁĄka", "ffi"]]]
t2.distinct case_sensitivity=Case_Sensitivity.Insensitive . at "X" . to_vector . should_equal ["łąka", "STRASSE", "ffi"]
Test.specify "should report a warning if the key contains floating point values" <|
group_builder.specify "should report a warning if the key contains floating point values" <|
t1 = Table.new [["X", [3.0, 1.0, 2.0, 2.0, 1.0]]]
action1 = t1.distinct on_problems=_
tester1 table =
@ -746,15 +752,17 @@ spec =
problems2 = [Floating_Point_Equality.Error "X"]
Problems.test_problem_handling action2 problems2 tester2
Test.specify "should be able to create distinct on Enso objects" <|
group_builder.specify "should be able to create distinct on Enso objects" <|
t = Table.new [["X", [My.Data 1 2, My.Data 3 4, My.Data 1 2]]]
t.distinct ["X"] . at "X" . to_vector . should_equal [My.Data 1 2, My.Data 3 4]
t2 = Table.new [["X", [Day_Of_Week.Monday, Day_Of_Week.Tuesday, Day_Of_Week.Monday, Day_Of_Week.Monday, Day_Of_Week.Tuesday, Day_Of_Week.Wednesday]]]
t2.distinct ["X"] . at "X" . to_vector . should_equal [Day_Of_Week.Monday, Day_Of_Week.Tuesday, Day_Of_Week.Wednesday]
Test.group "[In-Memory] Table.filter" <|
Test.specify "by a custom predicate" <|
suite_builder.group "[In-Memory] Table.filter" group_builder->
data = Data.setup
group_builder.specify "by a custom predicate" <|
t = Table.new [["ix", [1, 2, 3, 4, 5]], ["X", [5, 0, 4, 5, 1]]]
t1 = t.filter "X" (x -> x % 2 == 0)
t1.at "ix" . to_vector . should_equal [2, 3]
@ -768,7 +776,7 @@ spec =
t3.at "ix" . to_vector . should_equal [1, 4, 5]
t3.at "X" . to_vector . should_equal [5, 5, 1]
Test.specify "by custom object comparisons" <|
group_builder.specify "by custom object comparisons" <|
t = Table.new [["ix", [1, 2, 3, 4, 5]], ["X", [My.Data 1 2, My.Data 300 400, My.Data 100 200, My.Data 5 6, My.Data 7 8]]]
t1 = t.filter "X" (Filter_Condition.Between (My.Data 10 20) (My.Data 300 400))
t1.at "ix" . to_vector . should_equal [2, 3]
@ -785,7 +793,7 @@ spec =
t.filter "X" (Filter_Condition.Less than=c) . at "X" . to_vector . should_equal [My.Data 100 200, My.Data 5 6]
t.filter "X" (Filter_Condition.Greater than=c) . at "X" . to_vector . should_equal []
Test.specify "by a boolean mask of varying length" <|
group_builder.specify "by a boolean mask of varying length" <|
t = Table.new [["A", [1, 2, 3]], ["B", [4, 5, 6]]]
t1 = t.filter (Column.from_vector "f" [False, True])
@ -796,7 +804,7 @@ spec =
t2.at "A" . to_vector . should_equal [2, 3]
t2.at "B" . to_vector . should_equal [5, 6]
Test.specify "by an Is_In check, on various types of columns" <|
group_builder.specify "by an Is_In check, on various types of columns" <|
ins = Table.new <|
str = ["str", ["c", "b", Nothing, Nothing]]
int = ["int", [1, 2, 3, 3]]
@ -809,29 +817,29 @@ spec =
nulls = ["nulls", [Nothing, Nothing, Nothing, 0]]
custom = ["custom", [2, My.Data 2 1, Nothing, Nothing]]
[str, int, int2, dbl, dates, dts, tod, mix, nulls, custom]
varied_type_table.filter "strs" (Filter_Condition.Is_In (ins.at "str")) . at "strs" . to_vector . should_equal ["b", "c"]
varied_type_table.filter "strs" (Filter_Condition.Is_In (ins.at "str" . to_vector)) . at "strs" . to_vector . should_equal ["b", "c"]
varied_type_table.filter "ints" (Filter_Condition.Is_In (ins.at "int")) . at "ints" . to_vector . should_equal [1, 2]
varied_type_table.filter "ints" (Filter_Condition.Is_In (ins.at "int" . to_vector)) . at "ints" . to_vector . should_equal [1, 2]
varied_type_table.filter "ints" (Filter_Condition.Is_In (ins.at "int2")) . at "ints" . to_vector . should_equal [1]
varied_type_table.filter "ints" (Filter_Condition.Is_In (ins.at "int2" . to_vector)) . at "ints" . to_vector . should_equal [1]
varied_type_table.filter "doubles" (Filter_Condition.Is_In (ins.at "dbl")) . at "doubles" . to_vector . should_equal [0.0]
varied_type_table.filter "doubles" (Filter_Condition.Is_In (ins.at "dbl" . to_vector)) . at "doubles" . to_vector . should_equal [0.0]
varied_type_table.filter "dates" (Filter_Condition.Is_In (ins.at "dates")) . at "dates" . to_vector . should_equal [Date.new 2000, Date.new 1999 1 1]
varied_type_table.filter "dates" (Filter_Condition.Is_In (ins.at "dates" . to_vector)) . at "dates" . to_vector . should_equal [Date.new 2000, Date.new 1999 1 1]
varied_type_table.filter "datetimes" (Filter_Condition.Is_In (ins.at "dts")) . at "datetimes" . to_vector . should_equal [Date_Time.new 2022 8 27 11 22 25]
varied_type_table.filter "datetimes" (Filter_Condition.Is_In (ins.at "dts" . to_vector)) . at "datetimes" . to_vector . should_equal [Date_Time.new 2022 8 27 11 22 25]
varied_type_table.filter "times" (Filter_Condition.Is_In (ins.at "tod")) . at "times" . to_vector . should_equal [Time_Of_Day.new 18 00]
varied_type_table.filter "times" (Filter_Condition.Is_In (ins.at "tod" . to_vector)) . at "times" . to_vector . should_equal [Time_Of_Day.new 18 00]
varied_type_table.filter "mixed" (Filter_Condition.Is_In [42, "a", 1, Nothing, Date.new 2022 8 27, Date_Time.new 2022 8 27]) . at "mixed" . to_vector . should_equal [1, "a", Date.new 2022 8 27]
varied_type_table.filter "mixed" (Filter_Condition.Is_In (ins.at "mix")) . at "mixed" . to_vector . should_equal [1]
varied_type_table.filter "mixed" (Filter_Condition.Is_In (ins.at "mix" . to_vector)) . at "mixed" . to_vector . should_equal [1]
varied_type_table.filter "just_nulls" (Filter_Condition.Is_In []) . at "just_nulls" . to_vector . should_equal []
varied_type_table.filter "just_nulls" (Filter_Condition.Is_In (ins.at "nulls")) . at "just_nulls" . to_vector . should_equal []
varied_type_table.filter "just_nulls" (Filter_Condition.Is_In (ins.at "nulls" . to_vector)) . at "just_nulls" . to_vector . should_equal []
varied_type_table.filter "just_nulls" (Filter_Condition.Is_In [0]) . at "just_nulls" . to_vector . should_equal []
varied_type_table.filter "custom_objects" (Filter_Condition.Is_In (ins.at "custom")) . at "custom_objects" . to_vector . should_equal [My.Data 1 2]
varied_type_table.filter "custom_objects" (Filter_Condition.Is_In (ins.at "custom" . to_vector)) . at "custom_objects" . to_vector . should_equal [My.Data 1 2]
data.varied_type_table.filter "strs" (Filter_Condition.Is_In (ins.at "str")) . at "strs" . to_vector . should_equal ["b", "c"]
data.varied_type_table.filter "strs" (Filter_Condition.Is_In (ins.at "str" . to_vector)) . at "strs" . to_vector . should_equal ["b", "c"]
data.varied_type_table.filter "ints" (Filter_Condition.Is_In (ins.at "int")) . at "ints" . to_vector . should_equal [1, 2]
data.varied_type_table.filter "ints" (Filter_Condition.Is_In (ins.at "int" . to_vector)) . at "ints" . to_vector . should_equal [1, 2]
data.varied_type_table.filter "ints" (Filter_Condition.Is_In (ins.at "int2")) . at "ints" . to_vector . should_equal [1]
data.varied_type_table.filter "ints" (Filter_Condition.Is_In (ins.at "int2" . to_vector)) . at "ints" . to_vector . should_equal [1]
data.varied_type_table.filter "doubles" (Filter_Condition.Is_In (ins.at "dbl")) . at "doubles" . to_vector . should_equal [0.0]
data.varied_type_table.filter "doubles" (Filter_Condition.Is_In (ins.at "dbl" . to_vector)) . at "doubles" . to_vector . should_equal [0.0]
data.varied_type_table.filter "dates" (Filter_Condition.Is_In (ins.at "dates")) . at "dates" . to_vector . should_equal [Date.new 2000, Date.new 1999 1 1]
data.varied_type_table.filter "dates" (Filter_Condition.Is_In (ins.at "dates" . to_vector)) . at "dates" . to_vector . should_equal [Date.new 2000, Date.new 1999 1 1]
data.varied_type_table.filter "datetimes" (Filter_Condition.Is_In (ins.at "dts")) . at "datetimes" . to_vector . should_equal [Date_Time.new 2022 8 27 11 22 25]
data.varied_type_table.filter "datetimes" (Filter_Condition.Is_In (ins.at "dts" . to_vector)) . at "datetimes" . to_vector . should_equal [Date_Time.new 2022 8 27 11 22 25]
data.varied_type_table.filter "times" (Filter_Condition.Is_In (ins.at "tod")) . at "times" . to_vector . should_equal [Time_Of_Day.new 18 00]
data.varied_type_table.filter "times" (Filter_Condition.Is_In (ins.at "tod" . to_vector)) . at "times" . to_vector . should_equal [Time_Of_Day.new 18 00]
data.varied_type_table.filter "mixed" (Filter_Condition.Is_In [42, "a", 1, Nothing, Date.new 2022 8 27, Date_Time.new 2022 8 27]) . at "mixed" . to_vector . should_equal [1, "a", Date.new 2022 8 27]
data.varied_type_table.filter "mixed" (Filter_Condition.Is_In (ins.at "mix")) . at "mixed" . to_vector . should_equal [1]
data.varied_type_table.filter "mixed" (Filter_Condition.Is_In (ins.at "mix" . to_vector)) . at "mixed" . to_vector . should_equal [1]
data.varied_type_table.filter "just_nulls" (Filter_Condition.Is_In []) . at "just_nulls" . to_vector . should_equal []
data.varied_type_table.filter "just_nulls" (Filter_Condition.Is_In (ins.at "nulls")) . at "just_nulls" . to_vector . should_equal []
data.varied_type_table.filter "just_nulls" (Filter_Condition.Is_In (ins.at "nulls" . to_vector)) . at "just_nulls" . to_vector . should_equal []
data.varied_type_table.filter "just_nulls" (Filter_Condition.Is_In [0]) . at "just_nulls" . to_vector . should_equal []
data.varied_type_table.filter "custom_objects" (Filter_Condition.Is_In (ins.at "custom")) . at "custom_objects" . to_vector . should_equal [My.Data 1 2]
data.varied_type_table.filter "custom_objects" (Filter_Condition.Is_In (ins.at "custom" . to_vector)) . at "custom_objects" . to_vector . should_equal [My.Data 1 2]
t2 = Table.new [["ints", [1, 2, 3]], ["doubles", [1.2, 0.0, 1.0]]]
t2.filter "ints" (Filter_Condition.Is_In [2.0, 1.5, 3, 4]) . at "ints" . to_vector . should_equal [2, 3]
@ -874,8 +882,8 @@ spec =
t2.filter "Y" (Filter_Condition.Is_In in_vector) . at "Y" . to_vector . should_equal expected_neg_vector
t2.filter "Y" (Filter_Condition.Is_In in_column) . at "Y" . to_vector . should_equal expected_neg_vector
Test.group "[In-Memory-specific] Table.join" <|
Test.specify "should correctly report unsupported cross-backend joins" <|
suite_builder.group "[In-Memory-specific] Table.join" group_builder->
group_builder.specify "should correctly report unsupported cross-backend joins" <|
t = Table.new [["X", [1, 2, 3]]]
Panic.recover Type_Error (t.join 42) . should_fail_with Type_Error
@ -886,8 +894,8 @@ spec =
r.should_fail_with Illegal_Argument
r.catch.message . contains "cross-backend" . should_be_true
Test.group "[In-Memory-specific] Table.set" <|
Test.specify "should allow using vector and range for a new column" <|
suite_builder.group "[In-Memory-specific] Table.set" group_builder->
group_builder.specify "should allow using vector and range for a new column" <|
t = Table.new [["X", [1, 2, 3]]]
t_vec = t.set [10, 20, 30]
@ -906,7 +914,7 @@ spec =
t_month_range.column_names.should_equal ["X", "Month"]
t_month_range.at "Month" . to_vector . should_equal [Date.new 2020 1 1, Date.new 2020 2 1, Date.new 2020 3 1]
Test.specify "should fail if there is a length mismatch on a new column" <|
group_builder.specify "should fail if there is a length mismatch on a new column" <|
t = Table.new [["X", [1, 2, 3]]]
c = Column.from_vector "Column" [10, 20]
@ -915,7 +923,11 @@ spec =
t.set (100.up_to 102) . should_fail_with Row_Count_Mismatch
t.set ((Date.new 2020 1 1).up_to (Date.new 2020 1 3)) . should_fail_with Row_Count_Mismatch
main = Test_Suite.run_main spec
main =
suite = Test.build suite_builder->
add_specs suite_builder
suite.run_with_filter
## JS indexes months form 0, so we need to subtract 1.
foreign js js_make_date year month day = """

View File

@ -4,49 +4,66 @@ from Standard.Table import Table, Delimited, Column, Data_Formatter
import Standard.Table.Data.Type.Value_Type.Value_Type
from Standard.Table.Extensions.Table_Conversions import all
from Standard.Test import Test, Test_Suite
import Standard.Test.Extensions
from Standard.Test_New import all
from project.Util import all
spec =
type Data
Value ~data
c_number self = self.data.at 0
c_type self = self.data.at 1
c_time self = self.data.at 2
expected self = self.data.at 3
table self = self.data.at 4
setup = Data.Value <|
c_number = ["Serial number", ["2LMXK1", "2LMXK1", "JEMLP3", "JEMLP3", "BR83GP", "BR83GP"]]
c_type = ["Movement type", [101, 301, 101, 203, 101, 301]]
c_time = ["Posting time", [Time_Of_Day.new 9 0, Time_Of_Day.new 14 0 12, Time_Of_Day.new 9 0, Time_Of_Day.new 17 30, Time_Of_Day.new 9 0 4, Time_Of_Day.new 15 30]]
expected = Table.new [c_number, c_type, c_time]
Test.group "File.read (Delimited) should work with Time_Of_Days" <|
table = (enso_project.data / "time_of_day_sample.csv").read
Test.specify "should be able to read in a table with dates" <|
table.column_count.should_equal 3
table.info.at "Column" . to_vector . should_equal ['Serial number','Movement type', 'Posting time']
table.info.at "Value Type" . to_vector . should_equal [Value_Type.Char, Value_Type.Integer, Value_Type.Time]
table.row_count.should_equal 6
[c_number, c_type, c_time, expected, table]
Test.specify "should be able to treat a single value as a Time_Of_Days" <|
from_column = table.at 'Posting time'
add_specs suite_builder =
data = Data.setup
suite_builder.group "File.read (Delimited) should work with Time_Of_Days" group_builder->
group_builder.specify "should be able to read in a data.table with dates" <|
data.table.column_count.should_equal 3
data.table.info.at "Column" . to_vector . should_equal ['Serial number','Movement type', 'Posting time']
data.table.info.at "Value Type" . to_vector . should_equal [Value_Type.Char, Value_Type.Integer, Value_Type.Time]
data.table.row_count.should_equal 6
group_builder.specify "should be able to treat a single value as a Time_Of_Days" <|
from_column = data.table.at 'Posting time'
from_column.at 5 . hour . should_equal 15
from_column.at 5 . minute . should_equal 30
from_column.at 5 . should_equal (Time_Of_Day.new 15 30)
Test.specify "should be able to compare columns and table" <|
table.at 'Serial number' . should_equal (Column.from_vector c_number.first c_number.second)
table.at 'Movement type' . should_equal (Column.from_vector c_type.first c_type.second)
table.at 'Posting time' . should_equal (Column.from_vector c_time.first c_time.second)
table.should_equal expected
group_builder.specify "should be able to compare columns and data.table" <|
data.table.at 'Serial number' . should_equal (Column.from_vector data.c_number.first data.c_number.second)
data.table.at 'Movement type' . should_equal (Column.from_vector data.c_type.first data.c_type.second)
data.table.at 'Posting time' . should_equal (Column.from_vector data.c_time.first data.c_time.second)
data.table.should_equal data.expected
Test.group "Should be able to serialise a table with Time_Of_Days to Text" <|
Test.specify "should serialise back to input" <|
suite_builder.group "Should be able to serialise a table with Time_Of_Days to Text" group_builder->
group_builder.specify "should serialise back to input" <|
expected_text = normalize_lines <|
(enso_project.data / "time_of_day_sample_normalized_hours.csv").read_text
delimited = Text.from expected format=(Delimited "," line_endings=Line_Ending_Style.Unix)
delimited = Text.from data.expected format=(Delimited "," line_endings=Line_Ending_Style.Unix)
delimited.should_equal expected_text
Test.specify "should serialise dates with format" <|
test_table = Table.new [c_time]
group_builder.specify "should serialise dates with format" <|
test_table = Table.new [data.c_time]
expected_text = 'Posting time\n09-00-00\n14-00-12\n09-00-00\n17-30-00\n09-00-04\n15-30-00\n'
data_formatter = Data_Formatter.Value . with_datetime_formats time_formats=["HH-mm-ss"]
delimited = Text.from test_table format=(Delimited "," value_formatter=data_formatter line_endings=Line_Ending_Style.Unix)
delimited.should_equal expected_text
main = Test_Suite.run_main spec
main =
suite = Test.build suite_builder->
add_specs suite_builder
suite.run_with_filter

View File

@ -1,6 +1,6 @@
from Standard.Base import all hiding IO
from Standard.Test import Test_Suite
from Standard.Test_New import Test
import project.Database
import project.Formatting
@ -8,9 +8,11 @@ import project.Helpers
import project.In_Memory
import project.IO
main = Test_Suite.run_main <|
In_Memory.Main.spec
IO.Main.spec
Formatting.Main.spec
Database.Main.spec
Helpers.Main.spec
main =
suite = Test.build suite_builder->
In_Memory.Main.add_specs suite_builder
IO.Main.add_specs suite_builder
Formatting.Main.add_specs suite_builder
Database.Main.add_specs suite_builder
Helpers.Main.add_specs suite_builder
suite.run_with_filter

View File

@ -5,13 +5,11 @@ import Standard.Database.Data.Column.Column as Database_Column
import Standard.Table.Data.Table.Table as In_Memory_Table
import Standard.Table.Data.Column.Column as In_Memory_Column
from Standard.Test import Test
import Standard.Test.Extensions
import Standard.Test.Test_Result.Test_Result
from Standard.Test_New import all
polyglot java import org.enso.base_test_helpers.FileSystemHelper
In_Memory_Table.should_equal : Any -> Integer -> Test_Result
In_Memory_Table.should_equal : Any -> Integer -> Any
In_Memory_Table.should_equal self expected frames_to_skip=0 =
loc = Meta.get_source_location 1+frames_to_skip
case expected of
@ -26,7 +24,7 @@ In_Memory_Table.should_equal self expected frames_to_skip=0 =
Test.fail msg
_ -> Test.fail "Got a Table, but expected a "+expected.to_display_text+' (at '+loc+').'
In_Memory_Column.should_equal : Any -> Integer -> Test_Result
In_Memory_Column.should_equal : Any -> Integer -> Any
In_Memory_Column.should_equal self expected frames_to_skip=0 =
loc = Meta.get_source_location 1+frames_to_skip
case expected of
@ -38,13 +36,13 @@ In_Memory_Column.should_equal self expected frames_to_skip=0 =
self.to_vector.should_equal expected.to_vector 2+frames_to_skip
_ -> Test.fail "Got a Column, but expected a "+expected.to_display_text+' (at '+loc+').'
Database_Table.should_equal : Database_Table -> Integer -> Test_Result
Database_Table.should_equal : Database_Table -> Integer -> Any
Database_Table.should_equal self expected frames_to_skip=0 =
t0 = self.read
t1 = expected.read
t0 . should_equal t1 frames_to_skip
Database_Column.should_equal : Database_Column -> Integer -> Test_Result
Database_Column.should_equal : Database_Column -> Integer -> Any
Database_Column.should_equal self expected frames_to_skip=0 =
t0 = self.read
t1 = expected.read