Fixing Database tests and Snowflake Dialect - part 2 out of ... (#10319)

- Part of #9486
- Fixing our tests to not rely on deterministic ordering of created Tables in Database backends
- Before, SQLite and Postgres used to mostly return rows in the order they were inserted in, but Snowflake does not.
- Fixing various parts of Snowflake dialect.
This commit is contained in:
Radosław Waśko 2024-06-27 16:54:00 +02:00 committed by GitHub
parent 410298e173
commit db4f7ab3b5
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
28 changed files with 568 additions and 700 deletions

View File

@ -20,6 +20,7 @@ import project.Dialect.Dialect
import project.Internal.Connection.Entity_Naming_Properties.Entity_Naming_Properties
import project.Internal.Hidden_Table_Registry
import project.Internal.IR.Context.Context
import project.Internal.IR.From_Spec.From_Spec
import project.Internal.IR.Query.Query
import project.Internal.IR.SQL_Expression.SQL_Expression
import project.Internal.SQL_Type_Reference.SQL_Type_Reference
@ -442,6 +443,22 @@ type Connection
ref = self.hidden_table_registry.make_reference table_name
make_table_for_name self table_name table_name ref
## PRIVATE
ADVANCED
Creates a `DB_Table` that is not backed by an existing table in the
Database, but is created in a query by constructing a `VALUES` expression.
We limit these tables to at most 256 cells to avoid creating too large
queries. If you need a larger table, create a temporary table instead.
Note that the types of columns in the created table will depend on how
the Database interprets the provided values and may not reflect the types
of the source table. If you need more sophisticated type mapping
mechanism, use `create_table` instead.
create_literal_table self (source : Table) (alias : Text) -> DB_Table =
DB_Table_Module.make_literal_table self (source.columns.map .to_vector) source.column_names alias
## PRIVATE
make_table_types_selector : Connection -> Widget
make_table_types_selector connection =

View File

@ -1069,38 +1069,8 @@ type DB_Table
- column_names: The names of the columns of the new table.
make_table_from_vectors : Vector (Vector Any) -> Vector Text -> DB_Table
make_table_from_vectors self column_vectors column_names =
Runtime.assert (column_vectors.length == column_names.length) "column_vectors and column_names must have the same length"
# Assume the columns are all the same length; if not, it will be an error anyway.
total_size = if column_vectors.is_empty || column_vectors.at 0 . is_empty then 0 else
column_vectors.length * (column_vectors.at 0 . length)
if total_size == 0 then Error.throw (Illegal_Argument.Error "Vectors cannot be empty") else
if total_size > MAX_LITERAL_ELEMENT_COUNT then Error.throw (Illegal_Argument.Error "Too many elements for table literal ("+total_size.to_text+"): materialize a table into the database instead") else
type_mapping = self.connection.dialect.get_type_mapping
values_to_type_ref column_vector =
value_type = Value_Type_Helpers.find_common_type_for_arguments column_vector
sql_type = case value_type of
Nothing -> SQL_Type.null
_ -> type_mapping.value_type_to_sql value_type Problem_Behavior.Ignore
SQL_Type_Reference.from_constant sql_type
literal_table_name = self.connection.base_connection.table_naming_helper.generate_random_table_name "enso-literal-"
from_spec = From_Spec.Literal_Values column_vectors column_names literal_table_name
context = Context.for_subquery from_spec
internal_columns = 0.up_to column_vectors.length . map i->
column_vector = column_vectors.at i
column_name = column_names.at i
type_ref = values_to_type_ref column_vector.to_vector
generated_literal_column_name = "column"+(i+1).to_text
sql_expression = SQL_Expression.Column literal_table_name generated_literal_column_name
Internal_Column.Value column_name type_ref sql_expression
DB_Table.Value literal_table_name self.connection internal_columns context
literal_table_name = self.connection.base_connection.table_naming_helper.generate_random_table_name "enso-literal-"
make_literal_table self.connection column_vectors column_names literal_table_name
## PRIVATE
@ -3073,3 +3043,35 @@ Table_Ref.from (that:DB_Table) = Table_Ref.Value that
## PRIVATE
The largest dataset that can be used to make a literal table, expressed in number of elements.
MAX_LITERAL_ELEMENT_COUNT = 256
## PRIVATE
make_literal_table connection column_vectors column_names alias =
Runtime.assert (column_vectors.length == column_names.length) "column_vectors and column_names must have the same length"
# Assume the columns are all the same length; if not, it will be an error anyway.
total_size = if column_vectors.is_empty || column_vectors.at 0 . is_empty then 0 else
column_vectors.length * (column_vectors.at 0 . length)
if total_size == 0 then Error.throw (Illegal_Argument.Error "Vectors cannot be empty") else
if total_size > MAX_LITERAL_ELEMENT_COUNT then Error.throw (Illegal_Argument.Error "Too many elements for table literal ("+total_size.to_text+"): materialize a table into the database instead") else
type_mapping = connection.dialect.get_type_mapping
values_to_type_ref column_vector =
value_type = Value_Type_Helpers.find_common_type_for_arguments column_vector
sql_type = case value_type of
Nothing -> SQL_Type.null
_ -> type_mapping.value_type_to_sql value_type Problem_Behavior.Ignore
SQL_Type_Reference.from_constant sql_type
from_spec = From_Spec.Literal_Values column_vectors column_names alias
context = Context.for_subquery from_spec
internal_columns = 0.up_to column_vectors.length . map i->
column_vector = column_vectors.at i
column_name = column_names.at i
type_ref = values_to_type_ref column_vector.to_vector
sql_expression = SQL_Expression.Column alias column_name
Internal_Column.Value column_name type_ref sql_expression
DB_Table.Value alias connection internal_columns context

View File

@ -29,7 +29,9 @@ type Internal_Dialect
identifier name in such a way that it can be used in the query; that
usually consists of wrapping the name in quotes and escaping any quotes
within it.
Value (operation_map:(Map Text (Vector (SQL_Builder->SQL_Builder)))) (wrap_identifier_raw:(Text->Text))
- make_table_literal: A function that generates a SQL expression for a
table literal.
Value (operation_map:(Map Text (Vector (SQL_Builder->SQL_Builder)))) (wrap_identifier_raw:(Text->Text)) (make_table_literal : Vector (Vector Text) -> Vector Text -> Text -> SQL_Builder)
## PRIVATE
wrap_identifier : Text -> SQL_Builder
@ -45,7 +47,7 @@ type Internal_Dialect
extend_with : Vector Any -> Internal_Dialect
extend_with self mappings =
new_map = mappings.fold self.operation_map (m -> el -> m.insert (el.at 0) (el.at 1))
Internal_Dialect.Value new_map self.wrap_identifier_raw
Internal_Dialect.Value new_map self.wrap_identifier_raw self.make_table_literal
## PRIVATE
Checks if an operation is supported by the dialect.
@ -53,6 +55,10 @@ type Internal_Dialect
is_supported self operation =
self.operation_map.contains_key operation
## PRIVATE
override_make_table_literal self (make_table_literal : Vector (Vector Text) -> Vector Text -> Text -> SQL_Builder) -> Internal_Dialect =
Internal_Dialect.Value self.operation_map self.wrap_identifier_raw make_table_literal
## PRIVATE
A helper function to create a binary operator.
@ -209,7 +215,7 @@ base_dialect =
types = [simple_cast]
windows = [["ROW_NUMBER", make_row_number], ["ROW_NUMBER_IN_GROUP", make_row_number_in_group]]
base_map = Map.from_vector (arith + logic + compare + functions + agg + counts + text + nulls + contains + types + windows)
Internal_Dialect.Value base_map wrap_in_quotes
Internal_Dialect.Value base_map wrap_in_quotes (default_make_table_literal wrap_in_quotes)
## PRIVATE
is_empty = lift_unary_op "IS_EMPTY" arg->
@ -357,8 +363,7 @@ generate_from_part dialect from_spec = case from_spec of
SQL_Builder.code raw_sql . paren ++ alias dialect as_name
From_Spec.Literal_Values vecs column_names as_name ->
Runtime.assert (vecs.length == column_names.length) "Vectors and column names must have the same length"
values = SQL_Builder.join ", " (vecs.transpose.map (vec-> SQL_Builder.join ", " (vec.map SQL_Builder.interpolation) . paren))
SQL_Builder.code "(VALUES " ++ values ++ ")" ++ alias dialect as_name
dialect.make_table_literal vecs column_names as_name
From_Spec.Join kind left_spec right_spec on ->
left = generate_from_part dialect left_spec
right = generate_from_part dialect right_spec
@ -376,6 +381,14 @@ generate_from_part dialect from_spec = case from_spec of
sub = generate_query dialect (Query.Select columns context)
sub.paren ++ alias dialect as_name
## PRIVATE
default_make_table_literal wrap_identifier vecs column_names as_name =
values = SQL_Builder.join ", " (vecs.transpose.map (vec-> SQL_Builder.join ", " (vec.map SQL_Builder.interpolation) . paren))
wrap_identifier_as_builder n =
SQL_Builder.code (wrap_identifier n)
structure = (wrap_identifier_as_builder as_name) ++ (SQL_Builder.join ", " (column_names.map wrap_identifier_as_builder) . paren)
SQL_Builder.code "(VALUES " ++ values ++ ") AS " ++ structure
## PRIVATE
fold_case = lift_unary_op "FOLD_CASE" arg->
SQL_Builder.code "LOWER(UPPER(" ++ arg ++ "))"

View File

@ -1,4 +1,5 @@
from Standard.Base import all
import Standard.Base.Data.Numbers.Positive_Integer
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
from Standard.Table import Bits, Value_Type
@ -43,7 +44,9 @@ type Postgres_Type_Mapping
Value_Type.Char size variable ->
case variable of
True ->
is_unbounded = size.is_nothing || (size == max_precision)
is_unbounded = case size of
Nothing -> True
Positive_Integer.Value integer -> integer == max_precision
case is_unbounded of
True -> SQL_Type.Value Types.VARCHAR "text"
False -> SQL_Type.Value Types.VARCHAR "varchar" size

View File

@ -278,7 +278,9 @@ make_internal_generator_dialect =
bool = [bool_or]
my_mappings = text + counts + stats + arith_extensions + bool + other
Base_Generator.base_dialect . extend_with my_mappings
Base_Generator.base_dialect
. extend_with my_mappings
. override_make_table_literal make_sqlite_table_literal
## PRIVATE
unsupported name =
@ -496,3 +498,11 @@ make_runtime_error_op arguments =
variable_to_defer = arguments.at 1
# We have to ensure that the implementation of SQLite that we use does not have a MATCH function defined which would make the code below succeed.
SQL_Builder.code "match('[ENSO INVARIANT VIOLATED: '||" ++ error_message ++ "||'] ', " ++ variable_to_defer ++ ")"
## PRIVATE
make_sqlite_table_literal vecs column_names as_name =
values = SQL_Builder.join ", " (vecs.transpose.map (vec-> SQL_Builder.join ", " (vec.map SQL_Builder.interpolation) . paren))
wrap_identifier_as_builder n = SQL_Builder.code (Base_Generator.wrap_in_quotes n)
wrapped_name = wrap_identifier_as_builder as_name
structure = wrapped_name ++ (SQL_Builder.join ", " (column_names.map wrap_identifier_as_builder) . paren)
SQL_Builder.code "(WITH " ++ structure ++ " AS (VALUES " ++ values ++ ") SELECT * FROM " ++ wrapped_name ++ ") AS " ++ wrapped_name

View File

@ -33,7 +33,10 @@ type Snowflake_Connection
create : Text -> Vector -> (Text -> Text -> Snowflake_Connection) -> Snowflake_Connection
create url properties make_new =
jdbc_connection = JDBC_Connection.create url properties
entity_naming_properties = Entity_Naming_Properties.from_jdbc_connection jdbc_connection is_case_sensitive=False
## As long as names are quoted, the Snowflake SQL dialect is case sensitive.
Our generator always quotes identifiers, so we can rely on the case sensitivity.
This is the same as in Postgres.
entity_naming_properties = Entity_Naming_Properties.from_jdbc_connection jdbc_connection is_case_sensitive=True
Snowflake_Connection.Value (Connection.new jdbc_connection Snowflake_Dialect.snowflake entity_naming_properties) make_new
## PRIVATE

View File

@ -271,7 +271,7 @@ agg_count_is_null = Base_Generator.lift_unary_op "COUNT_IS_NULL" arg->
## PRIVATE
agg_count_empty = Base_Generator.lift_unary_op "COUNT_EMPTY" arg->
SQL_Builder.code "COUNT_IF("++ arg.paren ++ " IS NULL OR " ++ arg.paren ++ " == '')"
SQL_Builder.code "COUNT_IF(" ++ arg.paren ++ " IS NULL OR " ++ arg.paren ++ " = '')"
## PRIVATE
agg_count_not_empty = Base_Generator.lift_unary_op "COUNT_NOT_EMPTY" arg->
@ -326,7 +326,7 @@ agg_longest = Base_Generator.lift_unary_op "LONGEST" arg->
## PRIVATE
concat_ops =
make_raw_concat_expr expr separator =
SQL_Builder.code "string_agg(" ++ expr ++ ", " ++ separator ++ ")"
SQL_Builder.code "LISTAGG(" ++ expr ++ ", " ++ separator ++ ")"
concat = Base_Generator.make_concat make_raw_concat_expr make_contains_expr
[["CONCAT", concat (has_quote=False)], ["CONCAT_QUOTE_IF_NEEDED", concat (has_quote=True)]]
@ -363,7 +363,9 @@ agg_count_distinct args = if args.is_empty then (Error.throw (Illegal_Argument.E
## PRIVATE
agg_count_distinct_include_null args =
## If we always count as tuples, then even null fields are counted.
SQL_Builder.code "COUNT(DISTINCT (" ++ SQL_Builder.join ", " args ++ ", 0))"
## But Snowflake seems to not like tuples?
#SQL_Builder.code "COUNT(DISTINCT (" ++ SQL_Builder.join ", " args ++ ", 0))"
SQL_Builder.code "COUNT(DISTINCT (" ++ SQL_Builder.join ", " args ++ "))"
## PRIVATE
starts_with = Base_Generator.lift_binary_sql_function "STARTS_WITH" "STARTSWITH"
@ -375,11 +377,11 @@ ends_with = Base_Generator.lift_binary_sql_function "ENDS_WITH" "ENDSWITH"
contains = Base_Generator.lift_binary_sql_function "CONTAINS" "CONTAINS"
## PRIVATE
make_contains_expr expr substring = contains [expr, substring]
make_contains_expr expr substring = contains.second [expr, substring]
## PRIVATE
make_case_sensitive = Base_Generator.lift_unary_op "MAKE_CASE_SENSITIVE" arg->
SQL_Builder.code "((" ++ arg ++ ') COLLATE "ucs_basic")'
SQL_Builder.code "((" ++ arg ++ ") COLLATE 'ucs_basic')"
## PRIVATE
left = Base_Generator.lift_binary_op "LEFT" str-> n->
@ -415,11 +417,11 @@ make_order_descriptor internal_column sort_direction text_ordering =
## PRIVATE
is_nan = Base_Generator.lift_unary_op "IS_NAN" arg->
(arg ++ " in (double precision 'NaN')").paren
(arg ++ " in ('NaN'::float)").paren
## PRIVATE
is_inf = Base_Generator.lift_unary_op "IS_INF" arg->
(arg ++ " in (double precision 'Infinity', double precision '-Infinity')").paren
(arg ++ " in ('Infinity'::float, '-Infinity'::float)").paren
## PRIVATE
bool_or = Base_Generator.lift_unary_op "BOOL_OR" arg->
@ -427,11 +429,11 @@ bool_or = Base_Generator.lift_unary_op "BOOL_OR" arg->
## PRIVATE
floating_point_div = Base_Generator.lift_binary_op "/" x-> y->
SQL_Builder.code "CAST(" ++ x ++ " AS double precision) / CAST(" ++ y ++ " AS double precision)"
SQL_Builder.code "CAST(" ++ x ++ " AS float) / CAST(" ++ y ++ " AS float)"
## PRIVATE
mod_op = Base_Generator.lift_binary_op "MOD" x-> y->
x ++ " - FLOOR(CAST(" ++ x ++ " AS double precision) / CAST(" ++ y ++ " AS double precision)) * " ++ y
x ++ " - FLOOR(CAST(" ++ x ++ " AS float) / CAST(" ++ y ++ " AS float)) * " ++ y
## PRIVATE
decimal_div = Base_Generator.lift_binary_op "DECIMAL_DIV" x-> y->
@ -463,6 +465,12 @@ replace args metadata =
raw_pattern = metadata.at 0
replace_params = metadata.at 1
## The REGEXP_REPLACE function in Snowflake takes the following parameters:
<subject>, <pattern> [, <replacement>, <position>, <occurrence>, <parameters>
The `position` starts at 1 to search the whole string. Defaults to 1.
If `occurrence` is set to 0 all occurrences are replaced, otherwise the first N are replaced. Defaults to 0.
See: https://docs.snowflake.com/en/sql-reference/functions/regexp_replace
expression = case replace_params.input_type of
Text ->
## To use REGEXP_REPLACE on a non-regex, we have to escape it.
@ -470,27 +478,27 @@ replace args metadata =
case replace_params.only_first of
False -> case replace_params.case_sensitivity of
Case_Sensitivity.Insensitive _ ->
SQL_Builder.code "REGEXP_REPLACE(" ++ input ++ ", " ++ escaped_pattern ++ ", " ++ replacement ++ ", 'ig')"
SQL_Builder.code "REGEXP_REPLACE(" ++ input ++ ", " ++ escaped_pattern ++ ", " ++ replacement ++ ", 1, 0, 'i')"
_ ->
SQL_Builder.code "REPLACE(" ++ input ++ ", " ++ pattern ++ ", " ++ replacement ++ ")"
True -> case replace_params.case_sensitivity of
Case_Sensitivity.Insensitive _ ->
SQL_Builder.code "REGEXP_REPLACE(" ++ input ++ ", " ++ escaped_pattern ++ ", " ++ replacement ++ ", 'i')"
SQL_Builder.code "REGEXP_REPLACE(" ++ input ++ ", " ++ escaped_pattern ++ ", " ++ replacement ++ ", 1, 1, 'i')"
_ ->
SQL_Builder.code "REGEXP_REPLACE(" ++ input ++ ", " ++ escaped_pattern ++ ", " ++ replacement ++ ")"
SQL_Builder.code "REGEXP_REPLACE(" ++ input ++ ", " ++ escaped_pattern ++ ", " ++ replacement ++ ", 1, 1)"
Regex ->
pattern_string = SQL_Builder.interpolation raw_pattern.pattern_string
case replace_params.only_first of
False -> case replace_params.case_sensitivity of
Case_Sensitivity.Insensitive _ ->
SQL_Builder.code "REGEXP_REPLACE(" ++ input ++ ", " ++ pattern_string ++ ", " ++ replacement ++ ", 'ig')"
_ ->
SQL_Builder.code "REGEXP_REPLACE(" ++ input ++ ", " ++ pattern_string ++ ", " ++ replacement ++ ", 'g')"
True -> case replace_params.case_sensitivity of
Case_Sensitivity.Insensitive _ ->
SQL_Builder.code "REGEXP_REPLACE(" ++ input ++ ", " ++ pattern_string ++ ", " ++ replacement ++ ", 'i')"
SQL_Builder.code "REGEXP_REPLACE(" ++ input ++ ", " ++ pattern_string ++ ", " ++ replacement ++ ", 1, 0, 'i')"
_ ->
SQL_Builder.code "REGEXP_REPLACE(" ++ input ++ ", " ++ pattern_string ++ ", " ++ replacement ++ ")"
True -> case replace_params.case_sensitivity of
Case_Sensitivity.Insensitive _ ->
SQL_Builder.code "REGEXP_REPLACE(" ++ input ++ ", " ++ pattern_string ++ ", " ++ replacement ++ ", 1, 1, 'i')"
_ ->
SQL_Builder.code "REGEXP_REPLACE(" ++ input ++ ", " ++ pattern_string ++ ", " ++ replacement ++ ", 1, 1)"
DB_Column ->
case replace_params.only_first of
False -> case replace_params.case_sensitivity of

View File

@ -1,6 +1,7 @@
private
from Standard.Base import all
import Standard.Base.Data.Numbers.Positive_Integer
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
import Standard.Table.Internal.Java_Exports
@ -36,7 +37,9 @@ type Snowflake_Type_Mapping
_ -> SQL_Type.Value Types.DECIMAL "decimal" precision scale
Value_Type.Char size _ ->
# Snowflake does not support fixed length strings, so we use VARCHAR.
is_unbounded = size.is_nothing || (size >= max_length)
is_unbounded = case size of
Nothing -> True
Positive_Integer.Value integer -> integer >= max_length
case is_unbounded of
True -> SQL_Type.Value Types.VARCHAR "varchar"
False -> SQL_Type.Value Types.VARCHAR "varchar" size
@ -47,7 +50,9 @@ type Snowflake_Type_Mapping
SQL_Type.Value Types.TIMESTAMP type_name
Value_Type.Binary size _ ->
## Snowflake does not support fixed length binary types, so we use BINARY.
is_unbounded = size.is_nothing || (size >= max_length)
is_unbounded = case size of
Nothing -> True
Positive_Integer.Value integer -> integer >= max_length
case is_unbounded of
True -> SQL_Type.Value Types.BINARY "binary"
False -> SQL_Type.Value Types.BINARY "binary" size

View File

@ -56,13 +56,13 @@ type Bench_Builder
group self (name:Text) (configuration:Bench_Options) fn =
validate_name name
group = Vector.build b->
fn (Group_Builder.Impl b)
fn (Group_Builder.Impl name b)
self.builder.append <| Bench.Group name configuration group
## Builder to create a group of benchmarks.
type Group_Builder
## PRIVATE
Impl builder
Impl name builder
## Adds a benchmark specification to the group.

View File

@ -12,8 +12,10 @@ type Group_Builder
## PRIVATE
Arguments:
- name: The name of the group.
- builder: Vector builder used for storing specs.
- teardown_ref: A reference to a teardown method.
Impl (builder = Builder.new) (teardown_ref = Ref.new (_ -> Nothing))
Impl (name : Text) (builder = Builder.new) (teardown_ref = Ref.new (_ -> Nothing))
## Specifies a single test.
@ -67,4 +69,3 @@ type Group_Comparator
text_comp.hash g.name
Comparable.from (_:Group) = Group_Comparator

View File

@ -31,7 +31,7 @@ type Suite_Builder
is not ignored.
group : Text -> (Group_Builder -> Any) -> (Text | Nothing) -> Nothing
group self (name:Text) (fn : (Group_Builder -> Any)) (pending : (Text | Nothing) = Nothing) =
group_builder = Group_Builder.Impl
group_builder = Group_Builder.Impl name
case pending of
Nothing ->
fn group_builder

View File

@ -224,7 +224,9 @@ public final class AtomConstructor implements EnsoObject {
*/
@TruffleBoundary
public String getDisplayName() {
return name.equals("Value") || name.equals("Error") ? type.getName() + "." + name : name;
return name.equals("Value") || name.equals("Error") || name.equals("Warning")
? type.getName() + "." + name
: name;
}
/**

View File

@ -4,6 +4,7 @@ from Standard.Test import all
import project.Credentials_Spec
import project.Inter_Backend_File_Operations_Spec
import project.Redshift_Spec
import project.S3_Spec
main filter=Nothing =
@ -11,4 +12,6 @@ main filter=Nothing =
Credentials_Spec.add_specs suite_builder
S3_Spec.add_specs suite_builder
Inter_Backend_File_Operations_Spec.add_specs suite_builder
Redshift_Spec.add_specs suite_builder
suite.run_with_filter filter

View File

@ -9,9 +9,10 @@ from Standard.AWS import Redshift_Details, AWS_Credential
from Standard.Test import all
import project.Database.Common.Common_Spec
import project.Database.Helpers.Name_Generator
import project.Common_Table_Operations
import enso_dev.Table_Tests
import enso_dev.Table_Tests.Database.Common.Common_Spec
import enso_dev.Table_Tests.Database.Helpers.Name_Generator
import enso_dev.Table_Tests.Common_Table_Operations
type Data
Value ~data
@ -53,34 +54,37 @@ add_redshift_specific_specs suite_builder create_connection_fn =
data.t.at "bools" . value_type . is_boolean . should_be_true
data.t.at "reals" . value_type . is_floating_point . should_be_true
type Lazy_Ref
Value ~get
add_database_specs suite_builder create_connection_fn =
prefix = "[Redshift] "
name_counter = Ref.new 0
table_builder columns connection=(create_connection_fn Nothing) =
default_connection = Lazy_Ref.Value (create_connection_fn Nothing)
table_builder columns connection=Nothing =
ix = name_counter.get
name_counter . put ix+1
name = Name_Generator.random_name "table_"+ix.to_text
in_mem_table = Table.new columns
in_mem_table.select_into_database_table connection name primary_key=Nothing temporary=True
in_mem_table.select_into_database_table (connection.if_nothing default_connection.get) name primary_key=Nothing temporary=True
light_table_builder columns =
default_connection.get.base_connection.create_literal_table (Table.new columns) "literal_table"
materialize = .read
Common_Spec.add_specs suite_builder prefix create_connection_fn
add_redshift_specific_specs suite_builder create_connection_fn
common_selection = Common_Table_Operations.Main.Test_Selection.Config supports_case_sensitive_columns=True order_by_unicode_normalization_by_default=True allows_mixed_type_comparisons=False supports_decimal_type=True
common_selection = Common_Table_Operations.Main.Test_Selection.Config supports_case_sensitive_columns=True order_by_unicode_normalization_by_default=True allows_mixed_type_comparisons=False supports_decimal_type=True run_advanced_edge_case_tests_by_default=False
aggregate_selection = Common_Table_Operations.Aggregate_Spec.Test_Selection.Config first_last_row_order=False aggregation_problems=False date_support=False
agg_in_memory_table = (enso_project.data / "data.csv") . read
agg_table_fn = _->
connection = create_connection_fn Nothing
agg_in_memory_table.select_into_database_table connection (Name_Generator.random_name "Agg1") primary_key=Nothing temporary=True
agg_in_memory_table.select_into_database_table default_connection.get (Name_Generator.random_name "Agg1") primary_key=Nothing temporary=True
empty_agg_table_fn = _->
connection = create_connection_fn Nothing
(agg_in_memory_table.take (First 0)).select_into_database_table connection (Name_Generator.random_name "Agg_Empty") primary_key=Nothing temporary=True
(agg_in_memory_table.take (First 0)).select_into_database_table default_connection.get (Name_Generator.random_name "Agg_Empty") primary_key=Nothing temporary=True
setup = Common_Table_Operations.Main.Test_Setup.Config prefix agg_table_fn empty_agg_table_fn table_builder materialize is_database=True test_selection=common_selection aggregate_test_selection=aggregate_selection create_connection_func=create_connection_fn
setup = Common_Table_Operations.Main.Test_Setup.Config prefix agg_table_fn empty_agg_table_fn table_builder materialize is_database=True test_selection=common_selection aggregate_test_selection=aggregate_selection create_connection_func=create_connection_fn light_table_builder=light_table_builder
Common_Table_Operations.Main.add_specs suite_builder setup
connect_via_json_config =

View File

@ -178,7 +178,7 @@ add_specs group_builder round_fun =
round_fun -1.22222222222235 13 use_bankers=True . should_equal -1.2222222222224
round_fun -1.222222222222235 14 use_bankers=True . should_equal -1.22222222222224
group_builder.specify "Floating point imperfect representation counter-examples" <|
group_builder.specify "Floating point imperfect representation counter-examples" pending=(if group_builder.name.contains "Snowflake" then "TODO: https://github.com/enso-org/enso/issues/10307") <|
round_fun 1.225 2 use_bankers=True . should_equal 1.22 # Actual result 1.23
round_fun 37.785 2 . should_equal 37.79
@ -279,4 +279,3 @@ add_specs group_builder round_fun =
Test.expect_panic_with (round_fun 123 "two") Type_Error
Test.expect_panic_with (round_fun 123 use_bankers="no") Type_Error
Test.expect_panic_with (round_fun 123 use_bankers=0) Type_Error

View File

@ -25,8 +25,6 @@ import enso_dev.Table_Tests.Database.Upload_Spec
import enso_dev.Table_Tests.Database.Helpers.Name_Generator
import enso_dev.Table_Tests.Common_Table_Operations
from enso_dev.Table_Tests.Common_Table_Operations.Util import all
from enso_dev.Table_Tests.Database.Types.Postgres_Type_Mapping_Spec import default_text
from enso_dev.Table_Tests.Database.Postgres_Spec import Basic_Test_Data, Postgres_Tables_Data
import enso_dev.Base_Tests.Network.Enso_Cloud.Cloud_Tests_Setup.Cloud_Tests_Setup
@ -37,8 +35,8 @@ type Snowflake_Info_Data
tinfo self = self.data.at 1
t self = self.data.at 2
setup create_connection_fn = Snowflake_Info_Data.Value <|
connection = create_connection_fn Nothing
setup default_connection = Snowflake_Info_Data.Value <|
connection = default_connection.get
tinfo = Name_Generator.random_name "Tinfo"
connection.execute_update 'CREATE TEMPORARY TABLE "'+tinfo+'" ("strs" VARCHAR, "ints" NUMBER(38,0), "bools" BOOLEAN, "doubles" FLOAT8)'
t = connection.query (SQL_Query.Table_Name tinfo)
@ -51,49 +49,88 @@ type Snowflake_Info_Data
teardown self =
self.connection.execute_update 'DROP TABLE "'+self.tinfo+'"'
self.connection.close
type Tables_And_Views_Data
Value ~data
snowflake_specific_spec suite_builder create_connection_fn db_name setup =
connection self = self.data.at 0
tinfo self = self.data.at 1
vinfo self = self.data.at 2
temporary_table self = self.data.at 3
setup default_connection = Tables_And_Views_Data.Value <|
connection = default_connection.get
tinfo = Name_Generator.random_name "TestTable"
connection.execute_update 'CREATE TABLE "'+tinfo+'" ("A" VARCHAR)'
vinfo = Name_Generator.random_name "TestView"
connection.execute_update 'CREATE VIEW "'+vinfo+'" AS SELECT "A" FROM "'+tinfo+'";'
temporary_table = Name_Generator.random_name "TemporaryTable"
(Table.new [["X", [1, 2, 3]]]).select_into_database_table connection temporary_table temporary=True
[connection, tinfo, vinfo, temporary_table]
teardown self =
self.connection.execute_update 'DROP VIEW "'+self.vinfo+'";'
self.connection.execute_update 'DROP TABLE "'+self.tinfo+'";'
type Snowflake_Aggregate_Data
Value ~data
connection self = self.data.at 0
name self = self.data.at 1
t self = self.data.at 2
setup default_connection = Snowflake_Aggregate_Data.Value <|
connection = default_connection.get
name = Name_Generator.random_name "Ttypes"
connection.execute_update 'CREATE TEMPORARY TABLE "'+name+'" ("txt" VARCHAR, "i1" SMALLINT, "i2" INT, "i3" BIGINT, "i4" NUMERIC, "r1" REAL, "r2" DOUBLE PRECISION, "bools" BOOLEAN)' . if_not_error <|
t = connection.query (SQL_Query.Table_Name name)
[connection, name, t]
teardown self =
self.connection.execute_update 'DROP TABLE "'+self.name+'"'
snowflake_specific_spec suite_builder default_connection db_name setup =
table_builder = setup.table_builder
light_table_builder = setup.light_table_builder
materialize = setup.materialize
suite_builder.group "[Snowflake] Schemas and Databases" group_builder->
data = Basic_Test_Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
group_builder.specify "should be able to get current database and list databases" <|
data.connection.database.equals_ignore_case db_name . should_be_true
data.connection.databases.length . should_not_equal 0
data.connection.databases.find (name-> name.equals_ignore_case db_name) . should_succeed
Meta.is_same_object data.connection (data.connection.set_database db_name) . should_be_true
connection = default_connection.get
connection.database.equals_ignore_case db_name . should_be_true
connection.databases.length . should_not_equal 0
connection.databases.find (name-> name.equals_ignore_case db_name) . should_succeed
Meta.is_same_object connection (connection.set_database db_name) . should_be_true
group_builder.specify "should be able to get current schema and list schemas" <|
data.connection.schema.equals_ignore_case "public" . should_be_true
data.connection.schemas.length . should_not_equal 0
data.connection.schemas.find (name-> name.equals_ignore_case "public") . should_succeed
Meta.is_same_object data.connection (data.connection.set_schema "public") . should_be_true
connection = default_connection.get
connection.schema.equals_ignore_case "public" . should_be_true
connection.schemas.length . should_not_equal 0
connection.schemas.find (name-> name.equals_ignore_case "public") . should_succeed
Meta.is_same_object connection (connection.set_schema "public") . should_be_true
group_builder.specify "should allow changing schema" pending="TODO?" <|
new_connection = data.connection.set_schema "information_schema"
connection = default_connection.get
new_connection = connection.set_schema "information_schema"
new_schema = new_connection.read (SQL_Query.Raw_SQL "SELECT current_schema()") . at 0 . to_vector . first
new_schema . should_equal "information_schema"
group_builder.specify "should allow changing database" <|
databases = data.connection.databases.filter d->((d!=db_name) && (d!='rdsadmin'))
connection = default_connection.get
databases = connection.databases.filter d->((d!=db_name) && (d!='rdsadmin'))
pending_database = if databases.length != 0 then Nothing else "Cannot test changing database unless two databases defined."
case pending_database of
Nothing ->
new_connection = data.connection.set_database databases.first
new_connection = connection.set_database databases.first
new_database = new_connection.read (SQL_Query.Raw_SQL "SELECT current_database()") . at 0 . to_vector . first
new_database . should_equal databases.first
# Nop - skip the test
_ -> Nothing
suite_builder.group "[Snowflake] Tables and Table Types" group_builder->
data = Postgres_Tables_Data.setup create_connection_fn
data = Tables_And_Views_Data.setup default_connection
group_builder.teardown <|
data.teardown
@ -135,7 +172,7 @@ snowflake_specific_spec suite_builder create_connection_fn db_name setup =
suite_builder.group "[Snowflake] Info" group_builder->
data = Snowflake_Info_Data.setup create_connection_fn
data = Snowflake_Info_Data.setup default_connection
group_builder.teardown <|
data.teardown
@ -144,13 +181,13 @@ snowflake_specific_spec suite_builder create_connection_fn db_name setup =
i = data.t.column_info
i.at "Column" . to_vector . should_equal ["strs", "ints", "bools", "doubles"]
i.at "Items Count" . to_vector . should_equal [3, 1, 2, 3]
i.at "Value Type" . to_vector . should_equal [default_text, Value_Type.Integer, Value_Type.Boolean, Value_Type.Float]
i.at "Value Type" . to_vector . should_equal [Value_Type.Char, Value_Type.Integer, Value_Type.Boolean, Value_Type.Float]
group_builder.specify "should return Table information, also for aggregated results" <|
i = data.t.aggregate columns=[Aggregate_Column.Concatenate "strs", Aggregate_Column.Sum "ints", Aggregate_Column.Count_Distinct "bools"] . column_info
i.at "Column" . to_vector . should_equal ["Concatenate strs", "Sum ints", "Count Distinct bools"]
i.at "Items Count" . to_vector . should_equal [1, 1, 1]
i.at "Value Type" . to_vector . should_equal [default_text, Value_Type.Decimal, Value_Type.Integer]
i.at "Value Type" . to_vector . should_equal [Value_Type.Char, Value_Type.Integer, Value_Type.Integer]
group_builder.specify "should infer standard types correctly" <|
data.t.at "strs" . value_type . is_text . should_be_true
@ -175,7 +212,7 @@ snowflake_specific_spec suite_builder create_connection_fn db_name setup =
in_memory.at "txt-fixed" . value_type . should_equal (Value_Type.Char size=3 variable_length=False)
suite_builder.group "[Snowflake] Dialect-specific codegen" group_builder->
data = Snowflake_Info_Data.setup create_connection_fn
data = Snowflake_Info_Data.setup default_connection
group_builder.teardown <|
data.teardown
@ -187,16 +224,16 @@ snowflake_specific_spec suite_builder create_connection_fn db_name setup =
t.distinct ["strs"] . to_sql . prepare . should_equal [expected_code, []]
suite_builder.group "[Snowflake] Table.aggregate should correctly infer result types" group_builder->
data = Snowflake_Info_Data.setup create_connection_fn
data = Snowflake_Aggregate_Data.setup default_connection
group_builder.teardown <|
data.teardown
group_builder.specify "Concatenate, Shortest and Longest" <|
r = data.t.aggregate columns=[Aggregate_Column.Concatenate "txt", Aggregate_Column.Shortest "txt", Aggregate_Column.Longest "txt"]
r.columns.at 0 . value_type . should_equal default_text
r.columns.at 1 . value_type . should_equal default_text
r.columns.at 2 . value_type . should_equal default_text
r.columns.at 0 . value_type . should_equal Value_Type.Char
r.columns.at 1 . value_type . should_equal Value_Type.Char
r.columns.at 2 . value_type . should_equal Value_Type.Char
group_builder.specify "Counts" <|
r = data.t.aggregate columns=[Aggregate_Column.Count, Aggregate_Column.Count_Empty "txt", Aggregate_Column.Count_Not_Empty "txt", Aggregate_Column.Count_Distinct "i1", Aggregate_Column.Count_Not_Nothing "i2", Aggregate_Column.Count_Nothing "i3"]
@ -208,37 +245,33 @@ snowflake_specific_spec suite_builder create_connection_fn db_name setup =
r = data.t.aggregate columns=[Aggregate_Column.Sum "i1", Aggregate_Column.Sum "i2", Aggregate_Column.Sum "i3", Aggregate_Column.Sum "i4", Aggregate_Column.Sum "r1", Aggregate_Column.Sum "r2"]
r.columns.at 0 . value_type . should_equal Value_Type.Integer
r.columns.at 1 . value_type . should_equal Value_Type.Integer
r.columns.at 2 . value_type . should_equal Value_Type.Decimal
r.columns.at 3 . value_type . should_equal Value_Type.Decimal
r.columns.at 4 . value_type . should_equal (Value_Type.Float Bits.Bits_32)
# TODO are these types right??
r.columns.at 2 . value_type . should_equal Value_Type.Integer
r.columns.at 3 . value_type . should_equal Value_Type.Integer
r.columns.at 4 . value_type . should_equal (Value_Type.Float Bits.Bits_64)
r.columns.at 5 . value_type . should_equal (Value_Type.Float Bits.Bits_64)
group_builder.specify "Average" <|
r = data.t.aggregate columns=[Aggregate_Column.Average "i1", Aggregate_Column.Average "i2", Aggregate_Column.Average "i3", Aggregate_Column.Average "i4", Aggregate_Column.Average "r1", Aggregate_Column.Average "r2"]
r.columns.at 0 . value_type . should_equal Value_Type.Decimal
r.columns.at 1 . value_type . should_equal Value_Type.Decimal
r.columns.at 2 . value_type . should_equal Value_Type.Decimal
r.columns.at 3 . value_type . should_equal Value_Type.Decimal
r.columns.at 0 . value_type . should_equal (Value_Type.Decimal 38 6)
r.columns.at 1 . value_type . should_equal (Value_Type.Decimal 38 6)
r.columns.at 2 . value_type . should_equal (Value_Type.Decimal 38 6)
r.columns.at 3 . value_type . should_equal (Value_Type.Decimal 38 6)
r.columns.at 4 . value_type . should_equal Value_Type.Float
r.columns.at 5 . value_type . should_equal Value_Type.Float
suite_builder.group "[Snowflake] Warning/Error handling" group_builder->
data = Basic_Test_Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
group_builder.specify "query warnings should be propagated" <|
long_name = (Name_Generator.random_name "T") + ("a" * 100)
r = data.connection.execute_update 'CREATE TEMPORARY TABLE "'+long_name+'" ("A" VARCHAR)'
r = default_connection.get.execute_update 'CREATE TEMPORARY TABLE "'+long_name+'" ("A" VARCHAR)'
w1 = Problems.expect_only_warning SQL_Warning r
# The display text may itself be truncated, so we just check the first words.
w1.to_display_text . should_contain "identifier"
# And check the full message for words that could be truncated in short message.
w1.message . should_contain "truncated to"
table = data.connection.query (SQL_Query.Raw_SQL 'SELECT 1 AS "'+long_name+'"')
table = default_connection.get.query (SQL_Query.Raw_SQL 'SELECT 1 AS "'+long_name+'"')
w2 = Problems.expect_only_warning SQL_Warning table
w2.message . should_contain "truncated"
effective_name = table.column_names . at 0
@ -246,34 +279,29 @@ snowflake_specific_spec suite_builder create_connection_fn db_name setup =
long_name.should_contain effective_name
group_builder.specify "is capable of handling weird tables" <|
data.connection.execute_update 'CREATE TEMPORARY TABLE "empty-column-name" ("" VARCHAR)' . should_fail_with SQL_Error
default_connection.get.execute_update 'CREATE TEMPORARY TABLE "empty-column-name" ("" VARCHAR)' . should_fail_with SQL_Error
Problems.assume_no_problems <|
data.connection.execute_update 'CREATE TEMPORARY TABLE "clashing-unicode-names" ("ś" VARCHAR, "s\u0301" INTEGER)'
default_connection.get.execute_update 'CREATE TEMPORARY TABLE "clashing-unicode-names" ("ś" VARCHAR, "s\u0301" INTEGER)'
Problems.assume_no_problems <|
data.connection.execute_update 'INSERT INTO "clashing-unicode-names" VALUES (\'A\', 2)'
t2 = data.connection.query (SQL_Query.Table_Name "clashing-unicode-names")
default_connection.get.execute_update 'INSERT INTO "clashing-unicode-names" VALUES (\'A\', 2)'
t2 = default_connection.get.query (SQL_Query.Table_Name "clashing-unicode-names")
Problems.expect_only_warning Duplicate_Output_Column_Names t2
t2.column_names . should_equal ["ś", "ś 1"]
m2 = t2.read
m2.at "ś" . to_vector . should_equal ["A"]
m2.at "ś 1" . to_vector . should_equal [2]
r3 = data.connection.query 'SELECT 1 AS "A", 2 AS "A"'
r3 = default_connection.get.query 'SELECT 1 AS "A", 2 AS "A"'
r3.should_fail_with Illegal_Argument
r3.catch.cause . should_be_a Duplicate_Output_Column_Names
r4 = data.connection.query 'SELECT 1 AS ""'
r4 = default_connection.get.query 'SELECT 1 AS ""'
r4.should_fail_with SQL_Error
suite_builder.group "[Snowflake] Edge Cases" group_builder->
data = Basic_Test_Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
group_builder.specify "materialize should respect the overridden type" pending="TODO" <|
t0 = table_builder [["x", [False, True, False]], ["A", ["a", "b", "c"]], ["B", ["xyz", "abc", "def"]]] connection=data.connection
t0 = table_builder [["x", [False, True, False]], ["A", ["a", "b", "c"]], ["B", ["xyz", "abc", "def"]]]
t1 = t0 . cast "A" (Value_Type.Char size=1 variable_length=False) . cast "B" (Value_Type.Char size=3 variable_length=False)
x = t1.at "x"
@ -297,10 +325,10 @@ snowflake_specific_spec suite_builder create_connection_fn db_name setup =
m1 = Table.new [["X", [10, x]]]
m1.at "X" . value_type . should_be_a (Value_Type.Decimal ...)
t1 = m1.select_into_database_table data.connection (Name_Generator.random_name "BigInteger") primary_key=[] temporary=True
t1 = m1.select_into_database_table default_connection.get (Name_Generator.random_name "BigInteger") primary_key=[] temporary=True
t1.at "X" . value_type . should_be_a (Value_Type.Decimal ...)
t1.at "X" . value_type . scale . should_equal 0
# If we want to enforce the scale, Postgres requires us to enforce a precision too, so we use the biggest one we can:
# TODO revise
t1.at "X" . value_type . precision . should_equal 1000
w1 = Problems.expect_only_warning Inexact_Type_Coercion t1
w1.requested_type . should_equal (Value_Type.Decimal precision=Nothing scale=0)
@ -313,7 +341,7 @@ snowflake_specific_spec suite_builder create_connection_fn db_name setup =
t2 = t1.set (expr "[X] + 10") "Y"
t2.at "X" . value_type . should_be_a (Value_Type.Decimal ...)
t2.at "Y" . value_type . should_be_a (Value_Type.Decimal ...)
# Unfortunately, performing operations on a Decimal column in postgres can lose information about it being an integer column.
# TODO revise
t2.at "Y" . value_type . scale . should_equal Nothing
t2.at "X" . to_vector . should_equal [10, x]
# Only works by approximation:
@ -334,7 +362,7 @@ snowflake_specific_spec suite_builder create_connection_fn db_name setup =
super_large = 11^2000
m3 = Table.new [["X", [super_large]]]
m3.at "X" . value_type . should_be_a (Value_Type.Decimal ...)
t3 = m3.select_into_database_table data.connection (Name_Generator.random_name "BigInteger2") primary_key=[] temporary=True
t3 = m3.select_into_database_table default_connection.get (Name_Generator.random_name "BigInteger2") primary_key=[] temporary=True
t3 . at "X" . value_type . should_be_a (Value_Type.Decimal ...)
# If we exceed the 1000 digits precision, we cannot enforce neither scale nor precision anymore.
t3 . at "X" . value_type . precision . should_equal Nothing
@ -355,7 +383,7 @@ snowflake_specific_spec suite_builder create_connection_fn db_name setup =
group_builder.specify "should round-trip timestamptz column, preserving instant but converting to UTC" pending="TODO" <|
table_name = Name_Generator.random_name "TimestampTZ"
table = data.connection.create_table table_name [Column_Description.Value "A" (Value_Type.Date_Time with_timezone=True)] primary_key=[]
table = default_connection.get.create_table table_name [Column_Description.Value "A" (Value_Type.Date_Time with_timezone=True)] primary_key=[]
dt1 = Date_Time.new 2022 05 04 15 30
dt2 = Date_Time.new 2022 05 04 15 30 zone=(Time_Zone.utc)
@ -388,7 +416,7 @@ snowflake_specific_spec suite_builder create_connection_fn db_name setup =
group_builder.specify "will round-trip timestamp column without timezone by converting it to UTC" pending="TODO" <|
table_name = Name_Generator.random_name "Timestamp"
table = data.connection.create_table table_name [Column_Description.Value "A" (Value_Type.Date_Time with_timezone=False)] primary_key=[]
table = default_connection.get.create_table table_name [Column_Description.Value "A" (Value_Type.Date_Time with_timezone=False)] primary_key=[]
Problems.assume_no_problems table
dt1 = Date_Time.new 2022 05 04 15 30
@ -449,13 +477,8 @@ snowflake_specific_spec suite_builder create_connection_fn db_name setup =
t2.at "A" . to_vector . should_equal []
suite_builder.group "[Snowflake] math functions" group_builder->
data = Basic_Test_Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
group_builder.specify "round, trunc, ceil, floor" <|
col = (table_builder [["x", [0.1, 0.9, 3.1, 3.9, -0.1, -0.9, -3.1, -3.9]]] connection=data.connection) . at "x"
col = table_builder [["x", [0.1, 0.9, 3.1, 3.9, -0.1, -0.9, -3.1, -3.9]]] . at "x"
col . cast Value_Type.Integer . ceil . value_type . should_equal Value_Type.Float
col . cast Value_Type.Float . round . value_type . should_equal Value_Type.Float
@ -482,76 +505,81 @@ snowflake_specific_spec suite_builder create_connection_fn db_name setup =
col . cast Value_Type.Integer . truncate . value_type . should_equal Value_Type.Float
col . cast Value_Type.Decimal . truncate . value_type . should_equal Value_Type.Decimal
do_op data n op =
table = table_builder [["x", [n]]] connection=data.connection
do_op n op =
table = light_table_builder [["x", [n]]]
result = table.at "x" |> op
result.to_vector.at 0
do_round data n dp=0 use_bankers=False = do_op data n (_.round dp use_bankers)
do_round n dp=0 use_bankers=False = do_op n (_.round dp use_bankers)
group_builder.specify "Can round correctly near the precision limit" <|
do_round data 1.2222222222222225 15 . should_equal 1.222222222222223
do_round data -1.2222222222222225 15 . should_equal -1.222222222222223
do_round data 1.2222222222222235 15 . should_equal 1.222222222222224
do_round data -1.2222222222222235 15 . should_equal -1.222222222222224
do_round 1.2222222222222225 15 . should_equal 1.222222222222223
do_round -1.2222222222222225 15 . should_equal -1.222222222222223
do_round 1.2222222222222235 15 . should_equal 1.222222222222224
do_round -1.2222222222222235 15 . should_equal -1.222222222222224
group_builder.specify "Can round correctly near the precision limit, using banker's rounding" <|
do_round data 1.2222222222222225 15 use_bankers=True . should_equal 1.222222222222222
do_round data -1.2222222222222225 15 use_bankers=True . should_equal -1.222222222222222
do_round data 1.2222222222222235 15 use_bankers=True . should_equal 1.222222222222224
do_round data -1.2222222222222235 15 use_bankers=True . should_equal -1.222222222222224
do_round 1.2222222222222225 15 use_bankers=True . should_equal 1.222222222222222
do_round -1.2222222222222225 15 use_bankers=True . should_equal -1.222222222222222
do_round 1.2222222222222235 15 use_bankers=True . should_equal 1.222222222222224
do_round -1.2222222222222235 15 use_bankers=True . should_equal -1.222222222222224
group_builder.specify "Can handle NaN/Infinity" <|
nan_result = if setup.test_selection.is_nan_and_nothing_distinct then Number.nan else Nothing
ops = [.round, .truncate, .ceil, .floor]
ops.each op->
do_op data Number.nan op . should_equal nan_result
do_op data Number.positive_infinity op . should_equal Number.positive_infinity
do_op data Number.negative_infinity op . should_equal Number.negative_infinity
do_op Number.nan op . should_equal nan_result
do_op Number.positive_infinity op . should_equal Number.positive_infinity
do_op Number.negative_infinity op . should_equal Number.negative_infinity
group_builder.specify "round returns the correct type" <|
do_round data 231.2 1 . should_be_a Float
do_round data 231.2 0 . should_be_a Float
do_round data 231.2 . should_be_a Float
do_round data 231.2 -1 . should_be_a Float
do_round 231.2 1 . should_be_a Float
do_round 231.2 0 . should_be_a Float
do_round 231.2 . should_be_a Float
do_round 231.2 -1 . should_be_a Float
group_builder.specify "round returns the correct type" <|
do_round data 231 1 . should_be_a Float
do_round data 231 0 . should_be_a Float
do_round data 231 . should_be_a Float
do_round data 231 -1 . should_be_a Float
do_round 231 1 . should_be_a Float
do_round 231 0 . should_be_a Float
do_round 231 . should_be_a Float
do_round 231 -1 . should_be_a Float
type Lazy_Ref
Value ~get
add_snowflake_specs suite_builder create_connection_fn db_name =
prefix = "[Snowflake] "
name_counter = Ref.new 0
table_builder columns connection=(create_connection_fn Nothing) =
## We prefer to keep a single connection for most tests, to avoid the overhead of initializing a new connection multiple times.
It is initialized lazily, so that it is actually established only if actually used. Merely listing the tests to run should not establish the connection.
default_connection = Lazy_Ref.Value (create_connection_fn Nothing)
table_builder columns connection=Nothing =
ix = name_counter.get
name_counter . put ix+1
name = Name_Generator.random_name "table_"+ix.to_text
in_mem_table = Table.new columns
in_mem_table.select_into_database_table connection name primary_key=Nothing temporary=True
in_mem_table.select_into_database_table (connection.if_nothing default_connection.get) name primary_key=Nothing temporary=True
light_table_builder columns =
default_connection.get.base_connection.create_literal_table (Table.new columns) "literal_table"
materialize = .read
Common_Spec.add_specs suite_builder prefix create_connection_fn
common_selection = Common_Table_Operations.Main.Test_Selection.Config supports_case_sensitive_columns=True order_by_unicode_normalization_by_default=True allows_mixed_type_comparisons=False fixed_length_text_columns=True removes_trailing_whitespace_casting_from_char_to_varchar=True supports_decimal_type=True supported_replace_params=supported_replace_params
common_selection = Common_Table_Operations.Main.Test_Selection.Config supports_case_sensitive_columns=True order_by_unicode_normalization_by_default=True allows_mixed_type_comparisons=False fixed_length_text_columns=True removes_trailing_whitespace_casting_from_char_to_varchar=True supports_decimal_type=True supported_replace_params=supported_replace_params run_advanced_edge_case_tests_by_default=False
aggregate_selection = Common_Table_Operations.Aggregate_Spec.Test_Selection.Config first_last_row_order=False aggregation_problems=False
agg_in_memory_table = ((Project_Description.new enso_dev.Table_Tests).data / "data.csv") . read
agg_table_fn = _->
connection = create_connection_fn Nothing
agg_in_memory_table.select_into_database_table connection (Name_Generator.random_name "Agg1") primary_key=Nothing temporary=True
agg_in_memory_table.select_into_database_table default_connection.get (Name_Generator.random_name "Agg1") primary_key=Nothing temporary=True
empty_agg_table_fn = _->
connection = create_connection_fn Nothing
(agg_in_memory_table.take (First 0)).select_into_database_table connection (Name_Generator.random_name "Agg_Empty") primary_key=Nothing temporary=True
(agg_in_memory_table.take (First 0)).select_into_database_table default_connection.get (Name_Generator.random_name "Agg_Empty") primary_key=Nothing temporary=True
setup = Common_Table_Operations.Main.Test_Setup.Config prefix agg_table_fn empty_agg_table_fn table_builder materialize is_database=True test_selection=common_selection aggregate_test_selection=aggregate_selection create_connection_func=create_connection_fn
setup = Common_Table_Operations.Main.Test_Setup.Config prefix agg_table_fn empty_agg_table_fn table_builder materialize is_database=True test_selection=common_selection aggregate_test_selection=aggregate_selection create_connection_func=create_connection_fn light_table_builder=light_table_builder
snowflake_specific_spec suite_builder create_connection_fn db_name setup
snowflake_specific_spec suite_builder default_connection db_name setup
Common_Table_Operations.Main.add_specs suite_builder setup
## PRIVATE
@ -580,7 +608,7 @@ add_table_specs suite_builder =
base_details = get_configured_connection_details
group_builder.specify "should allow to set up a connection with the password passed as a secret" pending=cloud_setup.pending <|
cloud_setup.with_prepared_environment <|
with_secret "my_postgres_username" base_details.credentials.username username_secret-> with_secret "my_postgres_password" base_details.credentials.password password_secret->
with_secret "my_snowflake_username" base_details.credentials.username username_secret-> with_secret "my_snowflake_password" base_details.credentials.password password_secret->
secret_credentials = Credentials.Username_And_Password username_secret password_secret
details = Snowflake_Details.Snowflake base_details.account_name secret_credentials base_details.database base_details.schema base_details.warehouse
connection = Database.connect details

View File

@ -12,6 +12,7 @@ from Standard.Test import all
import project.Database.Helpers.Name_Generator
from project.Common_Table_Operations.Util import run_default_backend
import project.Util
polyglot java import java.lang.Long as Java_Long
@ -33,14 +34,7 @@ add_specs suite_builder setup =
create_connection_fn = setup.create_connection_func
suite_builder.group prefix+"Table.add_row_number (common)" group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
table_builder cols =
setup.table_builder cols connection=data.connection
table_builder = setup.table_builder
group_builder.specify "should rename existing column upon a name clash" <|
t1 = table_builder [["X", ['a', 'b']], ["Y", ['c', 'd']], ["Z", [40, 20]]]
t2 = t1.add_row_number name="Y" order_by=["X"] |> materialize |> _.sort "X"
@ -84,7 +78,7 @@ add_specs suite_builder setup =
t3 = t.add_row_number order_by=["X"] |> materialize |> _.sort "row_id"
Problems.assume_no_problems t3
t3.at "Row" . to_vector . should_equal [1, 3, 2, 4, 5]
t3.at "Row" . to_vector . should_have_relative_ordering [[1, 3], [2], [4, 5]]
if setup.is_database.not then
t4 = table_builder [["X", [1, "A", 1, 24.0, 24.0, 24.0, 24]], ["row_id", [1, 2, 3, 4, 5, 6, 7]]]

View File

@ -10,7 +10,7 @@ from Standard.Database.Errors import Unsupported_Database_Operation
from Standard.Test import all
from project.Common_Table_Operations.Util import run_default_backend
from project.Common_Table_Operations.Util import run_default_backend, within_table
polyglot java import java.lang.Double
@ -360,21 +360,23 @@ add_specs suite_builder setup =
grouped = data.empty_table.aggregate columns=[Shortest "TextWithNothing", Longest "TextWithNothing"]
materialized = materialize grouped
Problems.assume_no_problems materialized
grouped.row_count . should_equal 1
materialized.column_count . should_equal 2
materialized.columns.at 0 . name . should_equal "Shortest TextWithNothing"
materialized.columns.at 0 . at 0 . should_equal Nothing
materialized.columns.at 1 . name . should_equal "Longest TextWithNothing"
materialized.columns.at 1 . at 0 . should_equal Nothing
within_table grouped <|
grouped.row_count . should_equal 1
materialized.column_count . should_equal 2
materialized.columns.at 0 . name . should_equal "Shortest TextWithNothing"
materialized.columns.at 0 . at 0 . should_equal Nothing
materialized.columns.at 1 . name . should_equal "Longest TextWithNothing"
materialized.columns.at 1 . at 0 . should_equal Nothing
group_builder.specify "should be able to get concatenated text values" (pending = resolve_pending test_selection.text_concat) <|
grouped = data.empty_table.aggregate columns=[Concatenate "Code"]
materialized = materialize grouped
Problems.assume_no_problems materialized
grouped.row_count . should_equal 1
materialized.column_count . should_equal 1
materialized.columns.at 0 . name . should_equal "Concatenate Code"
materialized.columns.at 0 . at 0 . should_equal Nothing
within_table grouped <|
grouped.row_count . should_equal 1
materialized.column_count . should_equal 1
materialized.columns.at 0 . name . should_equal "Concatenate Code"
materialized.columns.at 0 . at 0 . should_equal Nothing
suite_builder.group prefix+"Table.aggregate should not summarize empty table when grouped" group_builder->
data = Data.setup create_connection_fn table_fn empty_table_fn
@ -922,14 +924,15 @@ add_specs suite_builder setup =
group_builder.specify "should correctly handle empty strings versus missing (null) strings" <|
table = table_builder [["A", ["abcd", "f", ""]], ["B", [Nothing, "f", "abc"]]]
result = table.aggregate [] [Shortest "A", Shortest "B"]
result.row_count . should_equal 1
materialized = materialize result
Problems.assume_no_problems materialized
materialized.column_count . should_equal 2
materialized.columns.at 0 . name . should_equal "Shortest A"
materialized.columns.at 0 . to_vector . should_equal [""]
materialized.columns.at 1 . name . should_equal "Shortest B"
materialized.columns.at 1 . to_vector . should_equal ["f"]
within_table result <|
result.row_count . should_equal 1
materialized = materialize result
Problems.assume_no_problems materialized
materialized.column_count . should_equal 2
materialized.columns.at 0 . name . should_equal "Shortest A"
materialized.columns.at 0 . to_vector . should_equal [""]
materialized.columns.at 1 . name . should_equal "Shortest B"
materialized.columns.at 1 . to_vector . should_equal ["f"]
suite_builder.group prefix+"Table.aggregate Concatenate" (pending = resolve_pending test_selection.text_concat) group_builder->
data = Data.setup create_connection_fn table_fn empty_table_fn
@ -940,8 +943,15 @@ add_specs suite_builder setup =
table_builder cols =
setup.table_builder cols connection=data.connection
build_sorted_table table_structure =
# Workaround for https://github.com/enso-org/enso/issues/10321
if setup.prefix.contains "Snowflake" . not then table_builder table_structure else
row_count = table_structure.first.second.length
new_structure = table_structure+[["row_id", (0.up_to row_count) . to_vector]]
table_builder new_structure . order_by "row_id" . remove_columns ["row_id"]
group_builder.specify "should insert the separator, add prefix and suffix" <|
table = table_builder [["A", ["foo", "bar", "foo", "foo"]], ["B", ["a", "b", "c", "d"]]]
table = build_sorted_table [["A", ["foo", "bar", "foo", "foo"]], ["B", ["a", "b", "c", "d"]]]
result = table.aggregate ["A"] [Concatenate "B" prefix="[[" suffix="]]" separator="; "]
result.row_count . should_equal 2
materialized = materialize result . sort ([..Name "A"])
@ -953,7 +963,7 @@ add_specs suite_builder setup =
materialized.columns.at 1 . to_vector . should_equal ["[[b]]", "[[a; c; d]]"]
group_builder.specify "should correctly escape separator and quote characters but only if necessary" <|
table = table_builder [["A", ["1,0", "b", "'c", "''", ","]]]
table = build_sorted_table [["A", ["1,0", "b", "'c", "''", ","]]]
result = table.aggregate columns=[(Concatenate "A" prefix="[[" suffix="]]" separator="," quote_char="'")]
result.row_count . should_equal 1
materialized = materialize result
@ -963,7 +973,7 @@ add_specs suite_builder setup =
materialized.columns.at 0 . to_vector . should_equal ["[['1,0',b,'''c','''''',',']]"]
group_builder.specify "should correctly handle missing values and empty values with quote character" <|
table = table_builder [["A", ["1,0", "A", "", "", "B", Nothing, Nothing, "C"]]]
table = build_sorted_table [["A", ["1,0", "A", "", "", "B", Nothing, Nothing, "C"]]]
result = table.aggregate columns=[(Concatenate "A" prefix="[[" suffix="]]" separator="," quote_char="'")]
result.row_count . should_equal 1
materialized = materialize result
@ -973,7 +983,7 @@ add_specs suite_builder setup =
materialized.columns.at 0 . to_vector . should_equal ["[['1,0',A,'','',B,,,C]]"]
group_builder.specify "will not be able to distinguish missing values from empty values without quote character" <|
table = table_builder [["A", ["1,0", "A", "", "", "B", Nothing, Nothing, "C"]]]
table = build_sorted_table [["A", ["1,0", "A", "", "", "B", Nothing, Nothing, "C"]]]
result = table.aggregate columns=[(Concatenate "A" prefix="[[" suffix="]]" separator=",")]
result.row_count . should_equal 1
materialized = materialize result
@ -988,7 +998,7 @@ add_specs suite_builder setup =
materialized.columns.at 0 . to_vector . should_equal ["[[1,0,A,,,B,,,C]]"]
group_builder.specify "should work with empty separator" <|
table = table_builder [["A", ["1,0", "A", "", "", "B", Nothing, Nothing, "C"]]]
table = build_sorted_table [["A", ["1,0", "A", "", "", "B", Nothing, Nothing, "C"]]]
result = table.aggregate columns=[(Concatenate "A")]
result.row_count . should_equal 1
materialized = materialize result
@ -998,7 +1008,7 @@ add_specs suite_builder setup =
materialized.columns.at 0 . to_vector . should_equal ["1,0ABC"]
group_builder.specify "should work with empty separator but non-empty quote" <|
table = table_builder [["A", ["1'0", "A", "", "", "B", Nothing, Nothing, "C"]]]
table = build_sorted_table [["A", ["1'0", "A", "", "", "B", Nothing, Nothing, "C"]]]
result = table.aggregate columns=[(Concatenate "A" quote_char="'")]
result.row_count . should_equal 1
materialized = materialize result

View File

@ -43,7 +43,7 @@ add_specs suite_builder setup =
False ->
t2.should_fail_with Clashing_Column_Name
True ->
Test.with_clue "The columns should be correctly distinguished." <|
Test.with_clue "The columns should be correctly distinguished. " <|
t2.at "X" . to_vector . should_equal [1]
t2.at "x" . to_vector . should_equal [101]

View File

@ -18,162 +18,111 @@ from Standard.Test import all
import enso_dev.Base_Tests.Data.Round_Spec
from project.Common_Table_Operations.Util import run_default_backend
from project.Common_Table_Operations.Util import run_default_backend, within_table
main filter=Nothing = run_default_backend add_specs filter
type Data
Value ~connection
setup create_connection_fn = Data.Value <|
connection = create_connection_fn Nothing
connection
teardown self =
self.connection.close
type Arithmetic_Data
Value ~data
connection self = self.data.at 0
x self = self.data.at 1
y self = self.data.at 2
x self = self.data.at 0
y self = self.data.at 1
setup create_connection_fn table_builder = Arithmetic_Data.Value <|
connection = create_connection_fn Nothing
t2 = table_builder [["x", [1, 4, 5, Nothing]], ["y", [2.0, 3.25, 5.0, Nothing]]] connection=connection
setup table_builder = Arithmetic_Data.Value <|
t2 = table_builder [["x", [1, 4, 5, Nothing]], ["y", [2.0, 3.25, 5.0, Nothing]], ["row_id", [1, 2, 3, 4]]] . order_by "row_id"
x = t2.at "x"
y = t2.at "y"
[connection, x, y]
teardown self =
self.connection.close
[x, y]
type Min_Max_Data
Value ~data
connection self = self.data.at 0
a self = self.data.at 1
b self = self.data.at 2
c self = self.data.at 3
t self = self.data.at 4
a self = self.data.at 0
b self = self.data.at 1
c self = self.data.at 2
t self = self.data.at 3
setup create_connection_fn table_builder = Min_Max_Data.Value <|
connection = create_connection_fn Nothing
t = table_builder [["a", [1, 2, 3]], ["b", [4.5, 5.5, 6.5]], ["c", ['a', 'b', 'c']], ["d", [True, False, True]]] connection=connection
setup table_builder = Min_Max_Data.Value <|
t = table_builder [["a", [1, 2, 3]], ["b", [4.5, 5.5, 6.5]], ["c", ['a', 'b', 'c']], ["d", [True, False, True]]] . order_by "a"
a = t.at "a"
b = t.at "b"
c = t.at "c"
[connection, a, b, c, t]
teardown self =
drop_table self.connection self.t
self.connection.close
[a, b, c, t]
type Literal_Data
Value ~data
connection self = self.data.at 0
col0 self = self.data.at 1
col1 self = self.data.at 2
col0 self = self.data.at 0
col1 self = self.data.at 1
setup create_connection_fn table_builder = Literal_Data.Value <|
connection = create_connection_fn Nothing
col0 = table_builder [["x", ['hello Hello', 'hello hello', 'HELLO HELLO']]] . at "x"
col1 = table_builder [["x", ['a[bcd] A[bCd] a[bcd]', 'abac ad Ab aCAd']]] . at "x"
[connection, col0, col1]
teardown self =
self.connection.close
setup table_builder = Literal_Data.Value <|
col0 = table_builder [["x", ['hello Hello', 'hello hello', 'HELLO HELLO']], ["row_id", [1, 2, 3]]] . order_by "row_id" . at "x"
col1 = table_builder [["x", ['a[bcd] A[bCd] a[bcd]', 'abac ad Ab aCAd']], ["row_id", [1, 2]]] . order_by "row_id" . at "x"
[col0, col1]
type Replace_Data
Value ~data
connection self = self.data.at 0
col self = self.data.at 1
patterns self = self.data.at 2
replacements self = self.data.at 3
col self = self.data.at 0
patterns self = self.data.at 1
replacements self = self.data.at 2
setup create_connection_fn table_builder = Replace_Data.Value <|
connection = create_connection_fn Nothing
table = table_builder [["x", ['hello Hello', 'hello hello', 'HELLO HELLO', 'a[bcd] A[bCd] a[bcd]', 'abac ad Ab aCAd']], ["patterns", ['hello', 'hello', 'hello', 'a[bcd]', 'a[bcd]']], ["replacements", ['bye', 'bye', 'bye', 'hey', 'hey']]]
setup table_builder = Replace_Data.Value <|
table = table_builder [["x", ['hello Hello', 'hello hello', 'HELLO HELLO', 'a[bcd] A[bCd] a[bcd]', 'abac ad Ab aCAd']], ["patterns", ['hello', 'hello', 'hello', 'a[bcd]', 'a[bcd]']], ["replacements", ['bye', 'bye', 'bye', 'hey', 'hey']], ["row_id", [1, 2, 3, 4, 5]]] . order_by "row_id"
col = table.at "x"
patterns = table.at "patterns"
replacements = table.at "replacements"
[connection, col, patterns, replacements]
teardown self =
self.connection.close
[col, patterns, replacements]
type Text_Replace_Data
Value ~data
connection self = self.data.at 0
a self = self.data.at 1
b self = self.data.at 2
c self = self.data.at 3
d self = self.data.at 4
a self = self.data.at 0
b self = self.data.at 1
c self = self.data.at 2
d self = self.data.at 3
setup create_connection_fn table_builder = Text_Replace_Data.Value <|
connection = create_connection_fn Nothing
t4 = table_builder [["A", ["Alpha", "Bravo", "Charlie", "Delta", "Echo", "Foxtrot"]], ["B", ["A","O","a","E","o","O"]], ["C", [1,2,3,4,5,6]], ["D", ['',Nothing,'',Nothing,'','']]] connection=connection
setup table_builder = Text_Replace_Data.Value <|
t4 = table_builder [["A", ["Alpha", "Bravo", "Charlie", "Delta", "Echo", "Foxtrot"]], ["B", ["A","O","a","E","o","O"]], ["C", [1,2,3,4,5,6]], ["D", ['',Nothing,'',Nothing,'','']]] . order_by "C"
a = t4.at "A"
b = t4.at "B"
c = t4.at "C"
d = t4.at "D"
[connection, a, b, c, d]
teardown self =
self.connection.close
[a, b, c, d]
type Trim_Data
Value ~data
connection self = self.data.at 0
a self = self.data.at 1
b self = self.data.at 2
c self = self.data.at 3
a self = self.data.at 0
b self = self.data.at 1
c self = self.data.at 2
setup create_connection_fn table_builder = Trim_Data.Value <|
connection = create_connection_fn Nothing
table = table_builder [["A", [" A ", ' \t\n\rA\r\n\t ', "xxxAxx"]], ["B", [" ",' \t',"x"]], ["C", [1,2,3]]]
setup table_builder = Trim_Data.Value <|
table = table_builder [["A", [" A ", ' \t\n\rA\r\n\t ', "xxxAxx"]], ["B", [" ",' \t',"x"]], ["C", [1,2,3]]] . order_by "C"
a = table.at "A"
b = table.at "B"
c = table.at "C"
[connection, a, b, c]
teardown self =
self.connection.close
[a, b, c]
type Names_Data
Value ~data
connection self = self.data.at 0
t self = self.data.at 1
t self = self.data.at 0
setup create_connection_fn table_builder = Names_Data.Value <|
connection = create_connection_fn Nothing
t = table_builder [["a", [1, 2, 3]], ["b", ['x', 'y', 'z']], ["c", [1.0, 2.0, 3.0]], ["d", [True, False, True]]]
[connection, t]
teardown self =
self.connection.close
setup table_builder = Names_Data.Value <|
t = table_builder [["a", [1, 2, 3]], ["b", ['x', 'y', 'z']], ["c", [1.0, 2.0, 3.0]], ["d", [True, False, True]]] . order_by "a"
[t]
add_specs suite_builder setup =
prefix = setup.prefix
create_connection_fn = setup.create_connection_func
table_builder = setup.table_builder
light_table_builder = setup.light_table_builder
pending_datetime = if setup.test_selection.date_time.not then "Date/Time operations are not supported by this backend."
suite_builder.group prefix+"Rounding numeric tests" group_builder->
data = Data.setup create_connection_fn
suite_builder.group prefix+"(Column_Operations_Spec) Rounding numeric tests" group_builder->
do_op n op =
table = setup.table_builder [["x", [n]]] connection=data.connection
table = light_table_builder [["x", [n]]]
result = table.at "x" |> op
result.to_vector.at 0
@ -185,6 +134,13 @@ add_specs suite_builder setup =
3.1 . round 16 . should_fail_with Illegal_Argument
3.1 . round -16 . should_fail_with Illegal_Argument
build_sorted_table table_structure table_builder=setup.table_builder =
# Workaround for https://github.com/enso-org/enso/issues/10321
if setup.prefix.contains "Snowflake" . not then table_builder table_structure else
row_count = table_structure.first.second.length
new_structure = table_structure+[["row_id", (0.up_to row_count) . to_vector]]
table_builder new_structure . order_by "row_id" . remove_columns ["row_id"]
## Runs the provided callback with a few combinations of columns, where some
of them are made Mixed (but still contain only the original values).
If the backend does not support mixed columns, the callback is run only
@ -201,28 +157,23 @@ add_specs suite_builder setup =
Test.with_clue clue <|
callback table
case setup.test_selection.supports_mixed_columns of
False -> callback_with_clue (setup.table_builder table_structure)
False -> callback_with_clue (build_sorted_table table_structure)
True ->
all_combinations (Vector.fill table_structure.length [Nothing, Mixed_Type_Object]) . each combination->
amended_table_structure = table_structure.zip combination column_definition-> prefix->
name = column_definition.first
values = column_definition.second
[name, [prefix]+values]
mixed_table = setup.table_builder amended_table_structure
mixed_table = build_sorted_table amended_table_structure
aligned_table = mixed_table.drop 1
callback_with_clue aligned_table
suite_builder.group prefix+"Boolean Column Operations" group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
table_builder cols =
setup.table_builder cols connection=data.connection
suite_builder.group prefix+"(Column_Operations_Spec) Boolean Column Operations" group_builder->
table_builder = light_table_builder
group_builder.specify "iif" <|
t = table_builder [["X", [True, False, Nothing, True]]]
# TODO
IO.println (t.at "X" . value_type)
c1 = t.at "X" . iif 22 33
c1.to_vector . should_equal [22, 33, Nothing, 22]
c1.value_type . is_integer . should_be_true
@ -372,14 +323,8 @@ add_specs suite_builder setup =
((t.at "X") . not) . should_fail_with Invalid_Value_Type
((t.at "Y") . iif 10 20) . should_fail_with Invalid_Value_Type
suite_builder.group prefix+"Column Operations - Equality & Null Handling" group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
table_builder cols =
setup.table_builder cols connection=data.connection
suite_builder.group prefix+"(Column_Operations_Spec) Column Operations - Equality & Null Handling" group_builder->
table_builder = setup.light_table_builder
group_builder.specify "should provide basic == and != comparisons" pending="TODO figure out proper null handling" <|
with_mixed_columns_if_supported [["x", [1, 4, 5, Nothing]], ["y", [2.0, 3.25, 5.0, Nothing]]] t2->
@ -458,15 +403,8 @@ add_specs suite_builder setup =
r2.to_vector . should_equal [False, True, False]
Problems.expect_warning Floating_Point_Equality r2
suite_builder.group prefix+"Column.fill_nothing/empty" group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
table_builder cols =
setup.table_builder cols connection=data.connection
suite_builder.group prefix+"(Column_Operations_Spec) Column.fill_nothing/empty" group_builder->
table_builder = build_sorted_table table_builder=setup.light_table_builder
group_builder.specify "should allow to fill_nothing from a value" <|
col = table_builder [["col", [0, Nothing, 4, 5, Nothing, Nothing]]] . at "col"
default = 300
@ -493,15 +431,8 @@ add_specs suite_builder setup =
r.to_vector . should_equal ["0", "2", "4", "5", Nothing, "30"]
r.name . should_equal "col"
suite_builder.group prefix+"Table.fill_nothing/empty" group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
table_builder cols =
setup.table_builder cols connection=data.connection
suite_builder.group prefix+"(Column_Operations_Spec) Table.fill_nothing/empty" group_builder->
table_builder = build_sorted_table table_builder=setup.light_table_builder
group_builder.specify "should allow to fill_nothing from a value" <|
t = table_builder [["col0", [0, Nothing, 4, 5, Nothing, Nothing]], ["col1", [Nothing, 200, Nothing, 400, 500, Nothing]]]
default = 1000
@ -629,15 +560,8 @@ add_specs suite_builder setup =
s_true_2 = s.fill_nothing ["col1"] (s.at "colTrue")
s_true_2.at "col1" . to_vector . should_equal [False, True, True, False]
suite_builder.group prefix+"Table.text_replace" group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
table_builder cols =
setup.table_builder cols connection=data.connection
suite_builder.group prefix+"(Column_Operations_Spec) Table.text_replace" group_builder->
table_builder = build_sorted_table table_builder=setup.light_table_builder
group_builder.specify "should allow to replace values in a table" <|
with_mixed_columns_if_supported [["col0", ["abc", "def", "ghi"]], ["col1", ["nabc", "ndef", "asdf"]]] t->
actual = t.text_replace ["col0", "col1"] "ab" "xy"
@ -698,15 +622,8 @@ add_specs suite_builder setup =
False ->
t1.at "txt" . to_vector . should_equal ["aXc", "Yef", "ghZ"]
suite_builder.group prefix+"Column Comparisons" group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
table_builder cols =
setup.table_builder cols connection=data.connection
suite_builder.group prefix+"(Column_Operations_Spec) Column Comparisons" group_builder->
table_builder = build_sorted_table table_builder=setup.light_table_builder
group_builder.specify "should allow to compare numbers" <|
with_mixed_columns_if_supported [["x", [1, 4, 5, Nothing]], ["y", [2.0, 3.25, 5.0, Nothing]]] t2->
x = t2.at "x"
@ -816,15 +733,8 @@ add_specs suite_builder setup =
t = table_builder [["A", a], ["B", b], ["C", c]]
((t.at "A").between (t.at "B") (t.at "C")) . to_vector . should_equal r
suite_builder.group prefix+"Arithmetic Column Operations" group_builder->
data = Arithmetic_Data.setup create_connection_fn setup.table_builder
group_builder.teardown <|
data.teardown
table_builder cols =
setup.table_builder cols connection=data.connection
suite_builder.group prefix+"(Column_Operations_Spec) Arithmetic Column Operations" group_builder->
data = Arithmetic_Data.setup setup.table_builder
group_builder.specify "should allow basic operations" <|
(data.x + data.y).to_vector . should_equal [3, 7.25, 10, Nothing]
(data.x - data.y).to_vector . should_equal [-1.0, 0.75, 0.0, Nothing]
@ -899,7 +809,7 @@ add_specs suite_builder setup =
case setup.test_selection.is_nan_and_nothing_distinct of
True ->
t = table_builder [["X", [1.5, 3.0, Number.positive_infinity, Number.negative_infinity, Number.nan, Nothing]], ["Y", [1, 2, 3, 4, 5, Nothing]], ["Z", ["1", "2", "3", "4", "5", Nothing]]]
t = build_sorted_table [["X", [1.5, 3.0, Number.positive_infinity, Number.negative_infinity, Number.nan, Nothing]], ["Y", [1, 2, 3, 4, 5, Nothing]], ["Z", ["1", "2", "3", "4", "5", Nothing]]]
group_builder.specify "should support is_nan" <|
t.at "X" . is_nan . to_vector . should_equal [False, False, False, False, True, Nothing]
@ -916,13 +826,13 @@ add_specs suite_builder setup =
t.at "X" . is_nan . should_fail_with Unsupported_Database_Operation
group_builder.specify "should support is_infinite" <|
t = table_builder [["X", [1.5, 3.0, Number.positive_infinity, Number.negative_infinity, Nothing]], ["Y", [1, 2, 3, 4, Nothing]], ["Z", ["1", "2", "3", "4", Nothing]]]
t = build_sorted_table [["X", [1.5, 3.0, Number.positive_infinity, Number.negative_infinity, Nothing]], ["Y", [1, 2, 3, 4, Nothing]], ["Z", ["1", "2", "3", "4", Nothing]]]
t.at "X" . is_infinite . to_vector . should_equal [False, False, True, True, Nothing]
t.at "Y" . is_infinite . to_vector . should_equal [False, False, False, False, Nothing]
t.at "Z" . is_infinite . to_vector . should_fail_with Invalid_Value_Type
group_builder.specify "should support is_blank" <|
t = table_builder [["X", [1.5, 2, Number.nan, Nothing]], ["Y", [1, Nothing, 3, 4]]]
t = build_sorted_table [["X", [1.5, 2, Number.nan, Nothing]], ["Y", [1, Nothing, 3, 4]]]
t.at "X" . is_blank treat_nans_as_blank=True . to_vector . should_equal [False, False, True, True]
t.at "Y" . is_blank treat_nans_as_blank=True . to_vector . should_equal [False, True, False, False]
@ -932,7 +842,7 @@ add_specs suite_builder setup =
r = [0.5, 2.5, 2.5, 20.0]
a.zip b (/) . should_equal r
t = table_builder [["A", a], ["B", b]]
t = build_sorted_table [["A", a], ["B", b]]
t.at "A" . value_type . is_integer . should_be_true
t.at "B" . value_type . is_integer . should_be_true
@ -949,7 +859,7 @@ add_specs suite_builder setup =
r4 = [1.0, 2.5, 5.1, 20.0]
a2.zip b2 (/) . should_equal r4
t2 = table_builder [["A", a2], ["B", b2]]
t2 = build_sorted_table [["A", a2], ["B", b2]]
t2.at "A" . value_type . is_floating_point . should_be_true
t2.at "B" . value_type . is_floating_point . should_be_true
@ -963,7 +873,7 @@ add_specs suite_builder setup =
db_pending = if setup.is_database then "Arithmetic error handling is currently not implemented for the Database backend."
group_builder.specify "should allow division by 0 and report warnings" pending=db_pending <|
t = table_builder [["a", [3, 1, 0]], ["b", [2, 0, 0]], ["c", [1.5, 1.5, 0.0]], ["d", [1.5, 0.0, 0.0]]]
t = build_sorted_table [["a", [3, 1, 0]], ["b", [2, 0, 0]], ["c", [1.5, 1.5, 0.0]], ["d", [1.5, 0.0, 0.0]]]
a = t.at "a"
b = t.at "b"
c = t.at "c"
@ -1024,15 +934,8 @@ add_specs suite_builder setup =
(data.x * Nothing).to_vector . should_equal nulls
(data.x / Nothing).to_vector . should_equal nulls
suite_builder.group prefix+"Rounding-like operations" group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
table_builder cols =
setup.table_builder cols connection=data.connection
suite_builder.group prefix+"(Column_Operations_Spec) Rounding-like operations" group_builder->
table_builder = build_sorted_table table_builder=setup.light_table_builder
group_builder.specify "should name a rounding column correctly" <|
table = table_builder [["x", [0.1, 0.9, 3.1, 3.9, -0.1, -0.9, -3.1, -3.9]]]
table.at "x" . round . name . should_equal "round([x])"
@ -1047,6 +950,9 @@ add_specs suite_builder setup =
group_builder.specify "should allow round on a float column (to >0 decimal places)" <|
table = table_builder [["x", [0.51, 0.59, 3.51, 3.59, -0.51, -0.59, -3.51, -3.59]]]
result = table.at "x" . cast type . round 1
# TODO why it's becoming an Int?
IO.println (result.value_type)
IO.println (result.cast Value_Type.Char . to_vector)
result.to_vector.should_equal [0.5, 0.6, 3.5, 3.6, -0.5, -0.6, -3.5, -3.6]
result.name . should_equal "round([x])"
@ -1141,15 +1047,8 @@ add_specs suite_builder setup =
x.round . to_vector . should_equal [1, -2, 4]
x.truncate . to_vector . should_equal [1, -2, 3]
suite_builder.group prefix+"Date truncation" pending=pending_datetime group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
table_builder cols =
setup.table_builder cols connection=data.connection
suite_builder.group prefix+"(Column_Operations_Spec) Date truncation" pending=pending_datetime group_builder->
table_builder = build_sorted_table table_builder=setup.light_table_builder
group_builder.specify "should be able to truncate a column of Date_Times" <|
dates = [Date_Time.new 2020 10 24 1 2 3, Date_Time.new 2020 10 24 1 2 3]
table = table_builder [["foo", dates]]
@ -1159,15 +1058,8 @@ add_specs suite_builder setup =
truncated . value_type . should_equal Value_Type.Date
truncated.name . should_equal "truncate([foo])"
suite_builder.group prefix+"Text Column Operations" group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
table_builder cols =
setup.table_builder cols connection=data.connection
suite_builder.group prefix+"(Column_Operations_Spec) Text Column Operations" group_builder->
table_builder = build_sorted_table table_builder=setup.light_table_builder
group_builder.specify "should handle operations like starts_with, ends_with, contains" <|
with_mixed_columns_if_supported [["s1", ["foobar", "bar", "baz", "BAB", Nothing]], ["s2", ["foo", "ar", "a", "b", Nothing]]] t3->
s1 = t3.at "s1"
@ -1345,12 +1237,8 @@ add_specs suite_builder setup =
c3.to_vector . should_equal [Nothing, Nothing, Nothing, Nothing, Nothing]
c3.value_type.is_text . should_be_true
suite_builder.group prefix+"Min/Max Operations" group_builder->
data = Min_Max_Data.setup create_connection_fn setup.table_builder
group_builder.teardown <|
data.teardown
suite_builder.group prefix+"(Column_Operations_Spec) Min/Max Operations" group_builder->
data = Min_Max_Data.setup setup.table_builder
group_builder.specify "should allow one or more args and return the correct type" <|
c1 = data.a.min 2
c1.to_vector . should_equal [1, 2, 2]
@ -1418,12 +1306,8 @@ add_specs suite_builder setup =
result.value_type . should_equal Value_Type.Char
result . to_vector . should_equal expected
suite_builder.group prefix+"replace: literal text pattern and replacement" group_builder->
data = Literal_Data.setup create_connection_fn setup.table_builder
group_builder.teardown <|
data.teardown
suite_builder.group prefix+"(Column_Operations_Spec) replace: literal text pattern and replacement" group_builder->
data = Literal_Data.setup setup.table_builder
group_builder.specify "case_sensitivity=sensitive/default use_regex=false only_first=false"
do_replace data.col0 'hello' 'bye' expected=['bye Hello', 'bye bye', 'HELLO HELLO']
do_replace data.col0 'hello' 'bye' case_sensitivity=Case_Sensitivity.Default expected=['bye Hello', 'bye bye', 'HELLO HELLO']
@ -1442,15 +1326,8 @@ add_specs suite_builder setup =
do_replace data.col0 'hello' 'bye' case_sensitivity=Case_Sensitivity.Insensitive only_first=True expected=['bye Hello', 'bye hello', 'bye HELLO']
do_replace data.col1 'a[bcd]' 'hey' case_sensitivity=Case_Sensitivity.Insensitive only_first=True expected=['hey A[bCd] a[bcd]', 'abac ad Ab aCAd']
suite_builder.group prefix+"replace: literal regex pattern and replacement" group_builder->
data = Literal_Data.setup create_connection_fn setup.table_builder
group_builder.teardown <|
data.teardown
table_builder cols =
setup.table_builder cols connection=data.connection
suite_builder.group prefix+"(Column_Operations_Spec) replace: literal regex pattern and replacement" group_builder->
data = Literal_Data.setup setup.table_builder
group_builder.specify "case_sensitivity=sensitive/default use_regex=True only_first=false"
do_replace data.col1 'a[bcd]'.to_regex 'hey' expected=['a[bcd] A[bCd] a[bcd]', 'heyhey hey Ab aCAd']
@ -1468,22 +1345,14 @@ add_specs suite_builder setup =
col = table_builder [["x", [regex]]] . at "x"
do_replace col regex "asdf" ["asdf"]
suite_builder.group prefix+"replace: Named_Pattern and replacement" group_builder->
data = Literal_Data.setup create_connection_fn setup.table_builder
group_builder.teardown <|
data.teardown
suite_builder.group prefix+"(Column_Operations_Spec) replace: Named_Pattern and replacement" group_builder->
data = Literal_Data.setup setup.table_builder
group_builder.specify "can use Named_Patterns" <|
do_replace data.col1 Named_Pattern.Symbols 'hey' expected=['aheybcdhey AheybCdhey aheybcdhey', 'abac ad Ab aCAd']
suite_builder.group prefix+"replace: pattern and replacement columns" group_builder->
data = Replace_Data.setup create_connection_fn setup.table_builder
group_builder.teardown <|
data.teardown
suite_builder.group prefix+"(Column_Operations_Spec) replace: pattern and replacement columns" group_builder->
data = Replace_Data.setup setup.table_builder
group_builder.specify "case_sensitivity=sensitive/default use_regex=false only_first=false"
do_replace data.col data.patterns data.replacements expected=['bye Hello', 'bye bye', 'HELLO HELLO', 'hey A[bCd] hey', 'abac ad Ab aCAd']
do_replace data.col data.patterns data.replacements case_sensitivity=Case_Sensitivity.Default expected=['bye Hello', 'bye bye', 'HELLO HELLO', 'hey A[bCd] hey', 'abac ad Ab aCAd']
@ -1498,15 +1367,8 @@ add_specs suite_builder setup =
group_builder.specify "case_sensitivity=insensitive use_regex=false only_first=true"
do_replace data.col data.patterns data.replacements case_sensitivity=Case_Sensitivity.Insensitive only_first=True expected=['bye Hello', 'bye hello', 'bye HELLO', 'hey A[bCd] a[bcd]', 'abac ad Ab aCAd']
suite_builder.group prefix+"replace: empty table and nothings" group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
table_builder cols =
setup.table_builder cols connection=data.connection
suite_builder.group prefix+"(Column_Operations_Spec) replace: empty table and nothings" group_builder->
table_builder = build_sorted_table table_builder=setup.light_table_builder
group_builder.specify "should work on empty tables" <|
col = table_builder [["x", ['hello Hello']]] . filter "x" (Filter_Condition.Is_Nothing) . at "x"
do_replace col 'hello' 'bye' expected=[]
@ -1515,15 +1377,8 @@ add_specs suite_builder setup =
col = table_builder [["x", ['hello Hello', Nothing]]] . filter "x" (Filter_Condition.Is_Nothing) . at "x"
do_replace col 'hello' 'bye' expected=[Nothing]
if setup.is_database then suite_builder.group prefix+"replace: DB specific edge-cases" group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
table_builder cols =
setup.table_builder cols connection=data.connection
if setup.is_database then suite_builder.group prefix+"(Column_Operations_Spec) replace: DB specific edge-cases" group_builder->
table_builder = build_sorted_table table_builder=setup.light_table_builder
group_builder.specify "should not allow Case_Sensitivity.Insensitive with a non-default locale" <|
col = table_builder [["A", ["Alpha", "Bravo", "Charlie", "Delta", "Echo", "Foxtrot"]]] . at 'A'
locale = Locale.new "en" "GB" "UTF-8"
@ -1543,17 +1398,10 @@ add_specs suite_builder setup =
if supported_replace_params.contains (Replace_Params.Value DB_Column Case_Sensitivity.Default False) then
col.text_replace patterns replacements . name . should_equal 'replace([x], [patterns], [replacements])'
suite_builder.group prefix+"Column Operations - Text Replace (in-memory only)" group_builder->
suite_builder.group prefix+"(Column_Operations_Spec) Column Operations - Text Replace (in-memory only)" group_builder->
if setup.is_database.not then
data = Text_Replace_Data.setup create_connection_fn setup.table_builder
group_builder.teardown <|
data.teardown
table_builder cols =
setup.table_builder cols connection=data.connection
data = Text_Replace_Data.setup setup.table_builder
group_builder.specify "should allow simple replacing" <|
data.a.text_replace "a" "#" . to_vector . should_equal ["Alph#", "Br#vo", "Ch#rlie", "Delt#", "Echo", "Foxtrot"]
data.a.text_replace "o" "#" . to_vector . should_equal ["Alpha", "Brav#", "Charlie", "Delta", "Ech#", "F#xtr#t"]
@ -1616,12 +1464,8 @@ add_specs suite_builder setup =
vt3.should_be_a (Value_Type.Char ...)
vt3.variable_length.should_be_true
suite_builder.group prefix+"Column Operations - Text Trim" group_builder->
data = Trim_Data.setup create_connection_fn setup.table_builder
group_builder.teardown <|
data.teardown
suite_builder.group prefix+"(Column_Operations_Spec) Column Operations - Text Trim" group_builder->
data = Trim_Data.setup setup.table_builder
group_builder.specify "should trim whitespace by default" <|
with_mixed_columns_if_supported [["A", [" A ", ' \t\n\rA\r\n\t ', "xxxAxx"]]] t->
a = t.at "A"
@ -1643,15 +1487,8 @@ add_specs suite_builder setup =
data.a.trim what=1 . should_fail_with Invalid_Value_Type
data.a.trim what=data.c . should_fail_with Invalid_Value_Type
suite_builder.group prefix+"Other Column Operations" group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
table_builder cols =
setup.table_builder cols connection=data.connection
suite_builder.group prefix+"(Column_Operations_Spec) Other Column Operations" group_builder->
table_builder = build_sorted_table table_builder=setup.light_table_builder
group_builder.specify "is_in" <|
t = table_builder [["X", [1, 2, 3, 4]], ["Y", [4, 3, 100, 200]]]
x = t.at "X"
@ -1665,15 +1502,8 @@ add_specs suite_builder setup =
c2.to_vector . should_equal [False, False, True, True]
c2.value_type.should_equal Value_Type.Boolean
suite_builder.group prefix+"Colum Operations - Names" group_builder->
data = Names_Data.setup create_connection_fn setup.table_builder
group_builder.teardown <|
data.teardown
table_builder cols =
setup.table_builder cols connection=data.connection
suite_builder.group prefix+"(Column_Operations_Spec) Colum Operations - Names" group_builder->
data = Names_Data.setup setup.table_builder
group_builder.specify "arithmetic" <|
((data.t.at "a") + 42) . name . should_equal "[a] + 42"
((data.t.at "a") - (data.t.at "c")) . name . should_equal "[a] - [c]"
@ -1737,15 +1567,7 @@ add_specs suite_builder setup =
data.t.at "a" . map (x -> x + 1) . name . should_equal "a"
data.t.at "a" . zip (data.t.at "b") [_, _] . name . should_equal "[a] x [b]"
suite_builder.group prefix+"Column.rename" group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
table_builder cols =
setup.table_builder cols connection=data.connection
suite_builder.group prefix+"(Column_Operations_Spec) Column.rename" group_builder->
group_builder.specify "should not allow illegal names" <|
t = table_builder [["a", [1, 2, 3]]]
c = t.at "a"
@ -1755,15 +1577,7 @@ add_specs suite_builder setup =
c.rename 'a\0b' . should_fail_with Invalid_Column_Names
c.rename '\0' . should_fail_with Invalid_Column_Names
suite_builder.group prefix+"Column.const" group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
table_builder cols =
setup.table_builder cols connection=data.connection
suite_builder.group prefix+"(Column_Operations_Spec) Column.const" group_builder->
group_builder.specify "Should allow the creation of constant columns" <|
t = table_builder [["x", ["1", "2", "3"]]]
t.at "x" . const True . to_vector . should_equal [True, True, True]
@ -1779,15 +1593,7 @@ add_specs suite_builder setup =
t = table_builder [["x", ["1", "2", "3"]]]
t.at "x" . const (t.at "x") . should_fail_with Illegal_Argument
suite_builder.group prefix+"Table.make_constant_column" group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
table_builder cols =
setup.table_builder cols connection=data.connection
suite_builder.group prefix+"(Column_Operations_Spec) Table.make_constant_column" group_builder->
group_builder.specify "Should allow the creation of constant columns" <|
t = table_builder [["x", ["1", "2", "3"]]]
t.make_constant_column True . to_vector . should_equal [True, True, True]
@ -1821,15 +1627,8 @@ add_specs suite_builder setup =
(empty.set c).at c.name . value_type . should_equal Value_Type.Mixed
decimal_db_pending = if setup.is_database then "Decimals are currently not implemented for the Database backend."
suite_builder.group prefix+"Decimal" pending=decimal_db_pending group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
table_builder cols =
setup.table_builder cols connection=data.connection
suite_builder.group prefix+"(Column_Operations_Spec) Decimal" pending=decimal_db_pending group_builder->
table_builder = setup.light_table_builder
group_builder.specify "can store and retrieve values" <|
t = table_builder [["x", [Decimal.new "23257245345.345345345"]]]
t.at "x" . at 0 . should_be_a Decimal

View File

@ -61,7 +61,10 @@ type Test_Setup
support particular features.
- create_connection_func: A function that takes Nothing and creates a related
database connection or Nothing for in-memory tests.
Config prefix table_fn empty_table_fn (table_builder : (Vector Any -> (Any|Nothing)) -> Any) materialize is_database test_selection aggregate_test_selection create_connection_func
- light_table_builder: Like `table_builder`, but for Database backends it
will not create a new table, but instead just create a query with the values inlined.
Note that it has less guarantees about column types, but is faster.
Config prefix table_fn empty_table_fn (table_builder : (Vector Any -> (Any|Nothing)) -> Any) materialize is_database test_selection aggregate_test_selection create_connection_func light_table_builder
## Specifies if the given Table backend supports custom Enso types.
@ -122,7 +125,26 @@ type Test_Selection
columns.
- supported_replace_params: Specifies the possible values of
Replace_Params that a backend supports.
Config supports_case_sensitive_columns=True order_by=True natural_ordering=False case_insensitive_ordering=True order_by_unicode_normalization_by_default=False case_insensitive_ascii_only=False allows_mixed_type_comparisons=True supports_unicode_normalization=False is_nan_and_nothing_distinct=True distinct_returns_first_row_from_group_if_ordered=True date_time=True fixed_length_text_columns=False length_restricted_text_columns=True removes_trailing_whitespace_casting_from_char_to_varchar=False different_size_integer_types=True supports_8bit_integer=False supports_decimal_type=False supports_time_duration=False supports_nanoseconds_in_time=False supports_mixed_columns=False supported_replace_params=Nothing
- run_advanced_edge_case_tests_by_default: A flag that tells whether to
always run some more advanced tests. All backends will run basic tests
that ensure that error checking is performed. However, when this flag
is set to `False`, some more advanced edge-case tests may be disabled
to save time.
This flag must be set to `True` on the in-memory backend to ensure our own
implementation is thoroughly tested. It should also be enabled in fast,
local backends like SQLite or Postgres. Backends that speak to remote
databases may have this flag disabled to save time.
If `ENSO_ADVANCED_EDGE_CASE_TESTS` environment variable is set to `True`,
the advanced tests are ran regardless of this setting.
Config supports_case_sensitive_columns=True order_by=True natural_ordering=False case_insensitive_ordering=True order_by_unicode_normalization_by_default=False case_insensitive_ascii_only=False allows_mixed_type_comparisons=True supports_unicode_normalization=False is_nan_and_nothing_distinct=True distinct_returns_first_row_from_group_if_ordered=True date_time=True fixed_length_text_columns=False length_restricted_text_columns=True removes_trailing_whitespace_casting_from_char_to_varchar=False different_size_integer_types=True supports_8bit_integer=False supports_decimal_type=False supports_time_duration=False supports_nanoseconds_in_time=False supports_mixed_columns=False supported_replace_params=Nothing run_advanced_edge_case_tests_by_default=True
## Specifies if the advanced edge case tests shall be run.
These tests are always run if `run_advanced_edge_case_tests_by_default`
flag is set to `True`. If that flag is set to `False`, they are only run
if the `ENSO_ADVANCED_EDGE_CASE_TESTS` environment variable is set.
run_advanced_edge_case_tests self -> Boolean =
self.run_advanced_edge_case_tests_by_default || (Environment.get "ENSO_ADVANCED_EDGE_CASE_TESTS" . is_nothing . not)
add_specs suite_builder setup =
Core_Spec.add_specs suite_builder setup

View File

@ -25,7 +25,6 @@ type Data
add_specs suite_builder setup =
prefix = setup.prefix
create_connection_fn = setup.create_connection_func
# We cannot create a column of Nothing/NULL in the database without casting it to a non-mixed type.
type_for_nothing_column = if setup.is_database then Value_Type.Char else Value_Type.Mixed
@ -39,27 +38,29 @@ add_specs suite_builder setup =
[[Date.new 2024 1 3, Date.new 2024 1 4, Value_Type.Date]]
+ [[Date_Time.new 2024 1 3 2 30 10 zone=zone, Date_Time.new 2024 1 3 2 30 11 zone=zone, Value_Type.Date_Time]]
+ [[Time_Of_Day.new 2 30 10, Time_Of_Day.new 2 30 11, Value_Type.Time]]
values_without_nothing = [[False, True, Value_Type.Boolean]]
+ [["abcd", "efgh", Value_Type.Char]]
+ [[12, 13, Value_Type.Integer]]
+ [[9223372036854775806, 9223372036854775807, Value_Type.Integer]]
+ [[12.3, 23.4, Value_Type.Float]]
+ date_time_values
+ mixed_values
basic_values_without_nothing = [[False, True, Value_Type.Boolean]]
+ [["abcd", "efgh", Value_Type.Char]]
advanced_values_without_nothing = [[12, 13, Value_Type.Integer]]
+ [[9223372036854775806, 9223372036854775807, Value_Type.Integer]]
+ [[12.3, 23.4, Value_Type.Float]]
+ date_time_values
+ mixed_values
values_without_nothing = if setup.test_selection.run_advanced_edge_case_tests then basic_values_without_nothing + advanced_values_without_nothing else basic_values_without_nothing
values_with_nothing = values_without_nothing + [[Nothing, Nothing, type_for_nothing_column]]
suite_builder.group prefix+"Comparisons" group_builder->
data = Data.setup create_connection_fn
table_builder = setup.light_table_builder
table_builder_typed columns value_type =
## TODO light_table_builder fails here with
Cannot cast Unsupported_Data_Type (NULL) type into ...
#raw_table = setup.light_table_builder columns
#raw_table.cast raw_table.column_names value_type
# TODO maybe use different builder depending on DB??
cast_columns = columns.map c->
Column.from_vector (c.at 0) (c.at 1) . cast value_type
setup.table_builder cast_columns
group_builder.teardown <|
data.teardown
table_builder_typed columns value_type =
cast_columns = columns.map c->
Column.from_vector (c.at 0) (c.at 1) . cast value_type
setup.table_builder cast_columns connection=data.connection
comparisons = [["==", ==], ["!=", !=], ["<", <], ["<=", <=], [">", >], [">=", >=]]
suite_builder.group prefix+"(Nothing_Spec) Comparisons" group_builder->
comparisons = [["==", ==], ["!=", !=], ["<", <]] + (if setup.test_selection.run_advanced_edge_case_tests then [["<=", <=], [">", >], [">=", >=]] else [])
comparisons.map pair->
comparison_name = pair.at 0
@ -69,32 +70,24 @@ add_specs suite_builder setup =
value = triple.at 0
value_type = triple.at 2
group_builder.specify "Correctly handle Nothing in: "+value_type.to_text+" "+comparison_name+" Nothing value" <|
# TODO reuse tables between tests, here and later
group_builder.specify "Correctly handle Nothing in: "+value.to_text+" ("+value_type.to_text+") "+comparison_name+" Nothing value" <|
table = table_builder_typed [["x", [value]], ["n", [Nothing]]] value_type
co = comparison (table.at "x") Nothing
co.to_vector . should_equal [Nothing]
group_builder.specify "Correctly handle Nothing in: "+value_type.to_text+" "+comparison_name+" Nothing column" <|
table = table_builder_typed [["x", [value]], ["n", [Nothing]]] value_type
co = comparison (table.at "x") (table.at "n")
co.to_vector . should_equal [Nothing]
if setup.test_selection.run_advanced_edge_case_tests then
group_builder.specify "Correctly handle Nothing in: "+value.to_text+" ("+value_type.to_text+") "+comparison_name+" Nothing column" <|
table = table_builder_typed [["x", [value]], ["n", [Nothing]]] value_type
co = comparison (table.at "x") (table.at "n")
co.to_vector . should_equal [Nothing]
group_builder.specify "Correctly handle Nothing in: Nothing column "+comparison_name+" "+value_type.to_text <|
table = table_builder_typed [["x", [value]], ["n", [Nothing]]] value_type
co = comparison (table.at "n") (table.at "x")
co.to_vector . should_equal [Nothing]
suite_builder.group prefix+"between" group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
table_builder_typed columns value_type =
cast_columns = columns.map c->
Column.from_vector (c.at 0) (c.at 1) . cast value_type
setup.table_builder cast_columns connection=data.connection
group_builder.specify "Correctly handle Nothing in: Nothing column "+comparison_name+" "+value.to_text+" ("+value_type.to_text+")" <|
table = table_builder_typed [["x", [value]], ["n", [Nothing]]] value_type
co = comparison (table.at "n") (table.at "x")
co.to_vector . should_equal [Nothing]
suite_builder.group prefix+"(Nothing_Spec) between" group_builder->
values_with_nothing.map triple->
value = triple.at 0
value_type = triple.at 2
@ -104,37 +97,28 @@ add_specs suite_builder setup =
co = table.at "n" . between (table.at "x") (table.at "y")
co.to_vector . should_equal [Nothing]
group_builder.specify "Correctly handle Nothing in: "+value_type.to_text+" between Nothing column and "+value_type.to_text <|
table = table_builder_typed [["x", [value]], ["y", [value]], ["n", [Nothing]]] value_type
co = table.at "x" . between (table.at "n") (table.at "y")
co.to_vector . should_equal [Nothing]
if setup.test_selection.run_advanced_edge_case_tests then
group_builder.specify "Correctly handle Nothing in: "+value_type.to_text+" between Nothing column and "+value_type.to_text <|
table = table_builder_typed [["x", [value]], ["y", [value]], ["n", [Nothing]]] value_type
co = table.at "x" . between (table.at "n") (table.at "y")
co.to_vector . should_equal [Nothing]
group_builder.specify "Correctly handle Nothing in: "+value_type.to_text+" between "+value_type.to_text+" and Nothing column" <|
table = table_builder_typed [["x", [value]], ["y", [value]], ["n", [Nothing]]] value_type
co = table.at "x" . between (table.at "y") (table.at "n")
co.to_vector . should_equal [Nothing]
group_builder.specify "Correctly handle Nothing in: "+value_type.to_text+" between "+value_type.to_text+" and Nothing column" <|
table = table_builder_typed [["x", [value]], ["y", [value]], ["n", [Nothing]]] value_type
co = table.at "x" . between (table.at "y") (table.at "n")
co.to_vector . should_equal [Nothing]
group_builder.specify "Correctly handle Nothing in: "+value_type.to_text+" between Nothing value and "+value_type.to_text <|
table = table_builder_typed [["x", [value]], ["y", [value]], ["n", [Nothing]]] value_type
co = table.at "x" . between Nothing (table.at "y")
co.to_vector . should_equal [Nothing]
group_builder.specify "Correctly handle Nothing in: "+value_type.to_text+" between Nothing value and "+value_type.to_text <|
table = table_builder_typed [["x", [value]], ["y", [value]], ["n", [Nothing]]] value_type
co = table.at "x" . between Nothing (table.at "y")
co.to_vector . should_equal [Nothing]
group_builder.specify "Correctly handle Nothing in: "+value_type.to_text+" between "+value_type.to_text+" and Nothing value" <|
table = table_builder_typed [["x", [value]], ["y", [value]], ["n", [Nothing]]] value_type
co = table.at "x" . between (table.at "y") Nothing
co.to_vector . should_equal [Nothing]
suite_builder.group prefix+"is_nothing" group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
table_builder_typed columns value_type =
cast_columns = columns.map c->
Column.from_vector (c.at 0) (c.at 1) . cast value_type
setup.table_builder cast_columns connection=data.connection
group_builder.specify "Correctly handle Nothing in: "+value_type.to_text+" between "+value_type.to_text+" and Nothing value" <|
table = table_builder_typed [["x", [value]], ["y", [value]], ["n", [Nothing]]] value_type
co = table.at "x" . between (table.at "y") Nothing
co.to_vector . should_equal [Nothing]
suite_builder.group prefix+"(Nothing_Spec) is_nothing" group_builder->
values_with_nothing.map triple->
value = triple.at 0
value_type = triple.at 2
@ -144,30 +128,12 @@ add_specs suite_builder setup =
co = table.at "x" . is_nothing
co.to_vector . should_equal [value == Nothing]
suite_builder.group prefix+"not" group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
table_builder columns =
setup.table_builder columns connection=data.connection
suite_builder.group prefix+"(Nothing_Spec) not" group_builder->
group_builder.specify "Correctly handle Nothing in .not" <|
table = table_builder [["x", [True, False, Nothing]]]
table = setup.light_table_builder [["x", [True, False, Nothing]]]
table.at "x" . not . to_vector . should_equal [False, True, Nothing]
suite_builder.group prefix+"is_in" group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
table_builder_typed columns value_type =
cast_columns = columns.map c->
Column.from_vector (c.at 0) (c.at 1) . cast value_type
setup.table_builder cast_columns connection=data.connection
suite_builder.group prefix+"(Nothing_Spec) is_in" group_builder->
values_with_nothing.map triple->
value = triple.at 0
other_value = triple.at 1
@ -187,41 +153,32 @@ add_specs suite_builder setup =
table = table_builder_typed [["x", [value, Nothing]], ["y", [other_value, Nothing]], ["z", [value, other_value]], ["n", [Nothing, Nothing]]] value_type
table.at "x" . is_in (table.at "y") . to_vector . should_equal [Nothing, Nothing]
group_builder.specify "Correctly handle Nothing in: Nothing.is_in Column with Nothings (returning Nothing)" <|
table = table_builder_typed [["x", [value, Nothing]], ["y", [other_value, Nothing]], ["z", [value, other_value]], ["n", [Nothing, Nothing]]] value_type
table.at "n" . is_in (table.at "x") . to_vector . should_equal [Nothing, Nothing]
if setup.test_selection.run_advanced_edge_case_tests then
group_builder.specify "Correctly handle Nothing in: Nothing.is_in Column with Nothings (returning Nothing)" <|
table = table_builder_typed [["x", [value, Nothing]], ["y", [other_value, Nothing]], ["z", [value, other_value]], ["n", [Nothing, Nothing]]] value_type
table.at "n" . is_in (table.at "x") . to_vector . should_equal [Nothing, Nothing]
group_builder.specify "Correctly handle Nothing in: "+value_type.to_text+".is_in Vector (returning True)" <|
table = table_builder_typed [["x", [value, Nothing]], ["y", [other_value, Nothing]], ["z", [value, other_value]], ["n", [Nothing, Nothing]]] value_type
table.at "x" . is_in (table.at "z" . to_vector) . to_vector . should_equal [true_if_not_nothing, Nothing]
group_builder.specify "Correctly handle Nothing in: "+value_type.to_text+".is_in Vector (returning True)" <|
table = table_builder_typed [["x", [value, Nothing]], ["y", [other_value, Nothing]], ["z", [value, other_value]], ["n", [Nothing, Nothing]]] value_type
table.at "x" . is_in (table.at "z" . to_vector) . to_vector . should_equal [true_if_not_nothing, Nothing]
group_builder.specify "Correctly handle Nothing in: "+value_type.to_text+".is_in Vector with Nothings (returning True)" <|
table = table_builder_typed [["x", [value, Nothing]], ["y", [other_value, Nothing]], ["z", [value, other_value]], ["n", [Nothing, Nothing]]] value_type
table.at "x" . is_in (table.at "x" . to_vector) . to_vector . should_equal [true_if_not_nothing, Nothing]
group_builder.specify "Correctly handle Nothing in: "+value_type.to_text+".is_in Vector with Nothings (returning True)" <|
table = table_builder_typed [["x", [value, Nothing]], ["y", [other_value, Nothing]], ["z", [value, other_value]], ["n", [Nothing, Nothing]]] value_type
table.at "x" . is_in (table.at "x" . to_vector) . to_vector . should_equal [true_if_not_nothing, Nothing]
group_builder.specify "Correctly handle Nothing in: "+value_type.to_text+".is_in Vector with Nothings (returning Nothing)" <|
table = table_builder_typed [["x", [value, Nothing]], ["y", [other_value, Nothing]], ["z", [value, other_value]], ["n", [Nothing, Nothing]]] value_type
table.at "x" . is_in (table.at "y" . to_vector) . to_vector . should_equal [Nothing, Nothing]
group_builder.specify "Correctly handle Nothing in: "+value_type.to_text+".is_in Vector with Nothings (returning Nothing)" <|
table = table_builder_typed [["x", [value, Nothing]], ["y", [other_value, Nothing]], ["z", [value, other_value]], ["n", [Nothing, Nothing]]] value_type
table.at "x" . is_in (table.at "y" . to_vector) . to_vector . should_equal [Nothing, Nothing]
group_builder.specify "Correctly handle Nothing in: Nothing.is_in Vector with Nothings (returning Nothing)" <|
table = table_builder_typed [["x", [value, Nothing]], ["y", [other_value, Nothing]], ["z", [value, other_value]], ["n", [Nothing, Nothing]]] value_type
table.at "n" . is_in (table.at "x" . to_vector) . to_vector . should_equal [Nothing, Nothing]
group_builder.specify "Correctly handle Nothing in: Nothing.is_in Vector with Nothings (returning Nothing)" <|
table = table_builder_typed [["x", [value, Nothing]], ["y", [other_value, Nothing]], ["z", [value, other_value]], ["n", [Nothing, Nothing]]] value_type
table.at "n" . is_in (table.at "x" . to_vector) . to_vector . should_equal [Nothing, Nothing]
group_builder.specify "Correctly handle Nothing in: "+value_type.to_text+".is_in empty Vector (returning False)" <|
table = table_builder_typed [["x", [value, Nothing]], ["y", [other_value, Nothing]], ["z", [value, other_value]], ["n", [Nothing, Nothing]]] value_type
table.at "x" . is_in [] . to_vector . should_equal [False, False]
suite_builder.group prefix+"Boolean is_in" group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
table_builder_typed columns value_type =
cast_columns = columns.map c->
Column.from_vector (c.at 0) (c.at 1) . cast value_type
setup.table_builder cast_columns connection=data.connection
group_builder.specify "Correctly handle Nothing in: "+value_type.to_text+".is_in empty Vector (returning False)" <|
table = table_builder_typed [["x", [value, Nothing]], ["y", [other_value, Nothing]], ["z", [value, other_value]], ["n", [Nothing, Nothing]]] value_type
table.at "x" . is_in [] . to_vector . should_equal [False, False]
if setup.test_selection.run_advanced_edge_case_tests then suite_builder.group prefix+"(Nothing_Spec) is_in: Boolean+Nothing edge cases" group_builder->
make_containing_values had_null had_true had_false =
null_maybe = if had_null then [Nothing] else []
true_maybe = if had_true then [True] else []
@ -259,7 +216,7 @@ add_specs suite_builder setup =
output = cs.at 4
containing_values = make_containing_values (cs.at 1) (cs.at 2) (cs.at 3)
group_builder.specify "Boolean is_in: (Vector), "+negation_desc+" "+cs.to_text <|
group_builder.specify "Boolean is_in: edge cases (Vector), "+negation_desc+" "+cs.to_text <|
input_column = transform_input [input]
t = table_builder_typed [["input", input_column]] Value_Type.Boolean
@ -267,7 +224,7 @@ add_specs suite_builder setup =
c.to_vector . should_equal [output]
group_builder.specify "Boolean is_in: (Column), "+negation_desc+" "+cs.to_text <|
group_builder.specify "Boolean is_in: edge cases (Column), "+negation_desc+" "+cs.to_text <|
input_column = transform_input (Vector.fill containing_values.length input)
t = table_builder_typed [["input", input_column], ["containing", transform_argument containing_values]] Value_Type.Boolean
expected_output = if input_column.is_empty then [] else [output]
@ -277,20 +234,7 @@ add_specs suite_builder setup =
c.to_vector . length . should_equal input_column.length
c.to_vector.distinct . should_equal expected_output
suite_builder.group prefix+"distinct" group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
table_builder columns =
setup.table_builder columns connection=data.connection
table_builder_typed columns value_type =
cast_columns = columns.map c->
Column.from_vector (c.at 0) (c.at 1) . cast value_type
setup.table_builder cast_columns connection=data.connection
suite_builder.group prefix+"(Nothing_Spec) distinct" group_builder->
values_without_nothing.map triple->
value = triple.at 0
other_value = triple.at 1
@ -309,17 +253,7 @@ add_specs suite_builder setup =
v = t1.at "x" . to_vector
v . should_equal [Nothing]
suite_builder.group prefix+"sort" group_builder->
data = Data.setup create_connection_fn
group_builder.teardown <|
data.teardown
table_builder_typed columns value_type =
cast_columns = columns.map c->
Column.from_vector (c.at 0) (c.at 1) . cast value_type
setup.table_builder cast_columns connection=data.connection
suite_builder.group prefix+"(Nothing_Spec) sort" group_builder->
values_with_nothing.map triple->
value = triple.at 0
other_value = triple.at 1

View File

@ -5,13 +5,11 @@ from Standard.Test import Test
import project.Database.Codegen_Spec
import project.Database.SQLite_Spec
import project.Database.Postgres_Spec
import project.Database.Redshift_Spec
add_specs suite_builder =
Codegen_Spec.add_specs suite_builder
SQLite_Spec.add_specs suite_builder
Postgres_Spec.add_specs suite_builder
Redshift_Spec.add_specs suite_builder
main filter=Nothing =
suite = Test.build suite_builder->

View File

@ -609,35 +609,39 @@ postgres_specific_spec suite_builder create_connection_fn db_name setup =
do_round data 231 . should_be_a Float
do_round data 231 -1 . should_be_a Float
type Lazy_Ref
Value ~get
add_postgres_specs suite_builder create_connection_fn db_name =
prefix = "[PostgreSQL] "
name_counter = Ref.new 0
table_builder columns connection=(create_connection_fn Nothing) =
default_connection = Lazy_Ref.Value (create_connection_fn Nothing)
table_builder columns connection=Nothing =
ix = name_counter.get
name_counter . put ix+1
name = Name_Generator.random_name "table_"+ix.to_text
in_mem_table = Table.new columns
in_mem_table.select_into_database_table connection name primary_key=Nothing temporary=True
in_mem_table.select_into_database_table (connection.if_nothing default_connection.get) name primary_key=Nothing temporary=True
light_table_builder columns =
default_connection.get.base_connection.create_literal_table (Table.new columns) "literal_table"
materialize = .read
Common_Spec.add_specs suite_builder prefix create_connection_fn
common_selection = Common_Table_Operations.Main.Test_Selection.Config supports_case_sensitive_columns=True order_by_unicode_normalization_by_default=True allows_mixed_type_comparisons=False fixed_length_text_columns=True removes_trailing_whitespace_casting_from_char_to_varchar=True supports_decimal_type=True supported_replace_params=supported_replace_params
common_selection = Common_Table_Operations.Main.Test_Selection.Config supports_case_sensitive_columns=True order_by_unicode_normalization_by_default=True allows_mixed_type_comparisons=False fixed_length_text_columns=True removes_trailing_whitespace_casting_from_char_to_varchar=True supports_decimal_type=True supported_replace_params=supported_replace_params run_advanced_edge_case_tests_by_default=True
aggregate_selection = Common_Table_Operations.Aggregate_Spec.Test_Selection.Config first_last_row_order=False aggregation_problems=False
agg_in_memory_table = (enso_project.data / "data.csv") . read
agg_table_fn = _->
connection = create_connection_fn Nothing
agg_in_memory_table.select_into_database_table connection (Name_Generator.random_name "Agg1") primary_key=Nothing temporary=True
agg_in_memory_table.select_into_database_table default_connection.get (Name_Generator.random_name "Agg1") primary_key=Nothing temporary=True
empty_agg_table_fn = _->
connection = create_connection_fn Nothing
(agg_in_memory_table.take (First 0)).select_into_database_table connection (Name_Generator.random_name "Agg_Empty") primary_key=Nothing temporary=True
(agg_in_memory_table.take (First 0)).select_into_database_table default_connection.get (Name_Generator.random_name "Agg_Empty") primary_key=Nothing temporary=True
setup = Common_Table_Operations.Main.Test_Setup.Config prefix agg_table_fn empty_agg_table_fn table_builder materialize is_database=True test_selection=common_selection aggregate_test_selection=aggregate_selection create_connection_func=create_connection_fn
setup = Common_Table_Operations.Main.Test_Setup.Config prefix agg_table_fn empty_agg_table_fn table_builder materialize is_database=True test_selection=common_selection aggregate_test_selection=aggregate_selection create_connection_func=create_connection_fn light_table_builder=light_table_builder
postgres_specific_spec suite_builder create_connection_fn db_name setup
Common_Table_Operations.Main.add_specs suite_builder setup

View File

@ -4,7 +4,6 @@ from Standard.Base.Runtime import assert
import Standard.Base.Errors.File_Error.File_Error
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
from Standard.Table import Table, Value_Type, Bits
from Standard.Table.Errors import Invalid_Column_Names, Duplicate_Output_Column_Names
@ -308,24 +307,31 @@ sqlite_specific_spec suite_builder prefix create_connection_func setup =
t = table_builder [["x", ["1", "2", "3"]]] connection=data.connection
t.at "x" . const (Date.new 12 4 12) . should_fail_with Unsupported_Database_Operation
type Lazy_Ref
Value ~get
sqlite_spec suite_builder prefix create_connection_func =
name_counter = Ref.new 0
# We keep a default connection to avoid creating connections each time.
default_connection = Lazy_Ref.Value (create_connection_func Nothing)
# The default `connection` parameter always create a new connection.
# In some tests, for example, where we are joining tables, we have to specify
# exactly the same connection.
table_builder columns connection=(create_connection_func Nothing) =
table_builder columns connection=Nothing =
ix = name_counter.get
name_counter . put ix+1
name = Name_Generator.random_name "table_"+ix.to_text
in_mem_table = Table.new columns
in_mem_table.select_into_database_table connection name primary_key=Nothing
in_mem_table.select_into_database_table (connection.if_nothing default_connection.get) name primary_key=Nothing
light_table_builder columns =
default_connection.get.base_connection.create_literal_table (Table.new columns) "literal_table"
materialize = .read
Common_Spec.add_specs suite_builder prefix create_connection_func
common_selection = Common_Table_Operations.Main.Test_Selection.Config supports_case_sensitive_columns=False natural_ordering=False case_insensitive_ordering=True case_insensitive_ascii_only=True is_nan_and_nothing_distinct=False date_time=False supported_replace_params=supported_replace_params different_size_integer_types=False length_restricted_text_columns=False
common_selection = Common_Table_Operations.Main.Test_Selection.Config supports_case_sensitive_columns=False natural_ordering=False case_insensitive_ordering=True case_insensitive_ascii_only=True is_nan_and_nothing_distinct=False date_time=False supported_replace_params=supported_replace_params different_size_integer_types=False length_restricted_text_columns=False run_advanced_edge_case_tests_by_default=True
## For now `advanced_stats`, `first_last`, `text_shortest_longest` and
`multi_distinct` remain disabled, because SQLite does not provide the
@ -339,14 +345,12 @@ sqlite_spec suite_builder prefix create_connection_func =
agg_in_memory_table = (enso_project.data / "data.csv") . read
agg_table_fn = _ ->
connection = create_connection_func Nothing
agg_in_memory_table.select_into_database_table connection (Name_Generator.random_name "Agg1") primary_key=Nothing temporary=True
agg_in_memory_table.select_into_database_table default_connection.get (Name_Generator.random_name "Agg1") primary_key=Nothing temporary=True
empty_agg_table_fn = _ ->
connection = create_connection_func Nothing
(agg_in_memory_table.take (First 0)).select_into_database_table connection (Name_Generator.random_name "Agg_Empty") primary_key=Nothing temporary=True
(agg_in_memory_table.take (First 0)).select_into_database_table default_connection.get (Name_Generator.random_name "Agg_Empty") primary_key=Nothing temporary=True
setup = Common_Table_Operations.Main.Test_Setup.Config prefix agg_table_fn empty_agg_table_fn table_builder materialize is_database=True test_selection=common_selection aggregate_test_selection=aggregate_selection create_connection_func=create_connection_func
setup = Common_Table_Operations.Main.Test_Setup.Config prefix agg_table_fn empty_agg_table_fn table_builder materialize is_database=True test_selection=common_selection aggregate_test_selection=aggregate_selection create_connection_func=create_connection_func light_table_builder=light_table_builder
sqlite_specific_spec suite_builder prefix create_connection_func setup
Common_Table_Operations.Main.add_specs suite_builder setup

File diff suppressed because one or more lines are too long

View File

@ -10,7 +10,7 @@ type Dummy_Connection
close = Nothing
add_specs suite_builder =
selection = Common_Table_Operations.Main.Test_Selection.Config supports_case_sensitive_columns=True natural_ordering=True case_insensitive_ordering=True order_by_unicode_normalization_by_default=True supports_unicode_normalization=True supports_time_duration=True supports_nanoseconds_in_time=True supports_mixed_columns=True fixed_length_text_columns=True supports_8bit_integer=True
selection = Common_Table_Operations.Main.Test_Selection.Config supports_case_sensitive_columns=True natural_ordering=True case_insensitive_ordering=True order_by_unicode_normalization_by_default=True supports_unicode_normalization=True supports_time_duration=True supports_nanoseconds_in_time=True supports_mixed_columns=True fixed_length_text_columns=True supports_8bit_integer=True run_advanced_edge_case_tests_by_default=True
aggregate_selection = Common_Table_Operations.Aggregate_Spec.Test_Selection.Config
agg_table_fn = _ ->
@ -25,10 +25,12 @@ add_specs suite_builder =
table_builder columns connection=Nothing =
_ = connection
Table.new columns
light_table_builder columns =
Table.new columns
create_connection_func = _-> Dummy_Connection
setup = Common_Table_Operations.Main.Test_Setup.Config "[In-Memory] " agg_table_fn empty_table_fn table_builder materialize is_database=False test_selection=selection aggregate_test_selection=aggregate_selection create_connection_func=create_connection_func
setup = Common_Table_Operations.Main.Test_Setup.Config "[In-Memory] " agg_table_fn empty_table_fn table_builder materialize is_database=False test_selection=selection aggregate_test_selection=aggregate_selection create_connection_func=create_connection_func light_table_builder=light_table_builder
Common_Table_Operations.Main.add_specs suite_builder setup

View File

@ -102,6 +102,9 @@ set_writable file writable =
be ordered arbitrarily (we treat them as belonging to the same equivalence
class), but the ordering between elements from different inner vectors must
be preserved.
For example, if `example` is `[[1, 2], [3]]`, then both `[1, 2, 3]` and
`[2, 1, 3]` will be accepted, but `[2, 3, 1]` will be rejected.
Any.should_have_relative_ordering self (example : Vector) =
loc = Meta.get_source_location 1
case self of