mirror of
https://github.com/enso-org/enso.git
synced 2024-11-22 22:10:15 +03:00
Fixing Database tests and Snowflake Dialect - part 3 out of ... (#10458)
- Related to #9486 - Fixes types in literal tables that are used throughout the tests - Tries to makes testing faster by disabling some edge cases, trying batching some queries, re-using the main connection and trying to re-use tables more - Implements date/time type mapping and operations for Snowflake - Updates type mapping to correctly reflect what Snowflake does - Disables warnings for Integer->Decimal coercion as that's too annoying and implicitly understood in Snowflake - Allows to select a Decimal column with `..By_Type ..Integer` (only in Snowflake backend) because the Decimal column there is its 'de-facto' Integer column replacement.
This commit is contained in:
parent
ce6995c83f
commit
48c17845a7
@ -122,8 +122,10 @@ type Redshift_Dialect
|
||||
Internal_Column.Value column.name new_sql_type_reference new_expression
|
||||
|
||||
## PRIVATE
|
||||
needs_execute_query_for_type_inference : Boolean
|
||||
needs_execute_query_for_type_inference self = False
|
||||
needs_execute_query_for_type_inference : Text | SQL_Statement -> Boolean
|
||||
needs_execute_query_for_type_inference self statement =
|
||||
_ = statement
|
||||
False
|
||||
|
||||
## PRIVATE
|
||||
supports_separate_nan : Boolean
|
||||
@ -198,3 +200,9 @@ type Redshift_Dialect
|
||||
## TODO special behaviour for big integer columns should be added here, once we start testing this dialect again
|
||||
See: https://docs.aws.amazon.com/redshift/latest/dg/r_Numeric_types201.html#r_Numeric_types201-decimal-or-numeric-type
|
||||
column.value_type
|
||||
|
||||
## PRIVATE
|
||||
needs_literal_table_cast : Value_Type -> Boolean
|
||||
needs_literal_table_cast self value_type =
|
||||
_ = value_type
|
||||
False
|
||||
|
@ -318,18 +318,26 @@ type Connection
|
||||
|
||||
Arguments:
|
||||
- statement: SQL_Statement to execute.
|
||||
- column_type_suggestions: A vector of SQL type references that can act
|
||||
as suggested column types. By default, the overrides are respected and
|
||||
types that should be computed by the database are passed as `Nothing`
|
||||
to ensure that default `ResultSet` metadata is used for these columns.
|
||||
- column_types: A vector of SQL type references that can act as suggested
|
||||
column types. Only `Override` references override the type. Other kinds
|
||||
of references do not influence the result. `Computed_By_Database`
|
||||
references may get updated to cache the types fetched from the Database.
|
||||
- last_row_only: If set true, only the last row of the query is fetched.
|
||||
Defaults to false.
|
||||
read_statement : SQL_Statement -> (Nothing | Vector SQL_Type_Reference) -> Boolean -> Table
|
||||
read_statement self statement column_type_suggestions=Nothing last_row_only=False =
|
||||
type_overrides = self.dialect.get_type_mapping.prepare_type_overrides column_type_suggestions
|
||||
read_statement self statement column_types=Nothing last_row_only=False =
|
||||
type_overrides = self.dialect.get_type_mapping.prepare_type_overrides column_types
|
||||
statement_setter = self.dialect.get_statement_setter
|
||||
self.jdbc_connection.with_prepared_statement statement statement_setter stmt->
|
||||
rs = stmt.executeQuery
|
||||
|
||||
# If column types were provided, we will cache the types that were not yet cached.
|
||||
column_types.if_not_nothing <|
|
||||
metadata = rs.getMetaData
|
||||
column_types.each_with_index ix-> sql_type_reference->
|
||||
sql_type_reference.cache_computed_type <| SQL_Type.from_metadata metadata ix+1
|
||||
|
||||
# And finally, materialize the results.
|
||||
SQL_Warning_Helper.process_warnings stmt <|
|
||||
result_set_to_table rs self.dialect.get_type_mapping.make_column_fetcher type_overrides last_row_only
|
||||
|
||||
@ -338,7 +346,7 @@ type Connection
|
||||
result set.
|
||||
fetch_columns : Text | SQL_Statement -> Statement_Setter -> Any
|
||||
fetch_columns self statement statement_setter =
|
||||
needs_execute_query = self.dialect.needs_execute_query_for_type_inference
|
||||
needs_execute_query = self.dialect.needs_execute_query_for_type_inference statement
|
||||
self.jdbc_connection.raw_fetch_columns statement needs_execute_query statement_setter
|
||||
|
||||
## PRIVATE
|
||||
|
@ -181,6 +181,13 @@ type DB_Column
|
||||
inferred_precise_value_type self =
|
||||
self.value_type
|
||||
|
||||
## PRIVATE
|
||||
Internal hook that says if a given column should be selected by a
|
||||
specific type in a `By_Type` selection.
|
||||
should_be_selected_by_type self (value_type : Value_Type) -> Boolean =
|
||||
type_mapping = self.connection.dialect.get_type_mapping
|
||||
type_mapping.is_same_type self.value_type value_type
|
||||
|
||||
## ICON convert
|
||||
Returns an SQL statement that will be used for materializing this column.
|
||||
to_sql : SQL_Statement
|
||||
@ -1299,7 +1306,7 @@ type DB_Column
|
||||
Examples.text_column_1.text_left 5
|
||||
text_left : DB_Column|Integer -> DB_Column
|
||||
text_left self n =
|
||||
Value_Type.expect_text self <| Value_Type.expect_integer n <|
|
||||
Value_Type.expect_text self <| Helpers.expect_dialect_specific_integer_type self n <|
|
||||
n2 = n.max 0
|
||||
new_name = self.naming_helper.function_name "text_left" [self, n]
|
||||
self.make_binary_op "LEFT" n2 new_name
|
||||
@ -1320,7 +1327,7 @@ type DB_Column
|
||||
Examples.text_column_1.text_right 5
|
||||
text_right : DB_Column|Integer -> DB_Column
|
||||
text_right self n =
|
||||
Value_Type.expect_text self <| Value_Type.expect_integer n <|
|
||||
Value_Type.expect_text self <| Helpers.expect_dialect_specific_integer_type self n <|
|
||||
n2 = n.max 0
|
||||
new_name = self.naming_helper.function_name "text_right" [self, n]
|
||||
self.make_binary_op "RIGHT" n2 new_name
|
||||
@ -1618,9 +1625,10 @@ type DB_Column
|
||||
Value_Type.expect_type self .is_date_or_time "date/time" <|
|
||||
my_type = self.inferred_precise_value_type
|
||||
Value_Type.expect_type end (== my_type) my_type.to_display_text <|
|
||||
Date_Time_Helpers.check_period_aligned_with_value_type my_type period <|
|
||||
aligned_period = Date_Time_Helpers.align_period_with_value_type my_type period
|
||||
aligned_period.if_not_error <|
|
||||
new_name = self.naming_helper.function_name "date_diff" [self, end, period.to_display_text]
|
||||
metadata = self.connection.dialect.prepare_metadata_for_period period my_type
|
||||
metadata = self.connection.dialect.prepare_metadata_for_period aligned_period my_type
|
||||
self.make_op "date_diff" [end] new_name metadata
|
||||
|
||||
## GROUP Standard.Base.DateTime
|
||||
@ -1647,10 +1655,11 @@ type DB_Column
|
||||
date_add self amount (period : Date_Period | Time_Period = default_date_period self) =
|
||||
Value_Type.expect_type self .is_date_or_time "date/time" <|
|
||||
my_type = self.inferred_precise_value_type
|
||||
Value_Type.expect_integer amount <|
|
||||
Date_Time_Helpers.check_period_aligned_with_value_type my_type period <|
|
||||
Helpers.expect_dialect_specific_integer_type self amount <|
|
||||
aligned_period = Date_Time_Helpers.align_period_with_value_type my_type period
|
||||
aligned_period.if_not_error <|
|
||||
new_name = self.naming_helper.function_name "date_add" [self, amount, period.to_display_text]
|
||||
metadata = self.connection.dialect.prepare_metadata_for_period period my_type
|
||||
metadata = self.connection.dialect.prepare_metadata_for_period aligned_period my_type
|
||||
self.make_op "date_add" [amount] new_name metadata
|
||||
|
||||
## GROUP Standard.Base.Logical
|
||||
|
@ -1085,7 +1085,13 @@ type DB_Table
|
||||
_ -> type_mapping.value_type_to_sql argument_value_type Problem_Behavior.Ignore
|
||||
expr = SQL_Expression.Constant value
|
||||
new_type_ref = SQL_Type_Reference.from_constant sql_type
|
||||
DB_Column.Value value.pretty self.connection new_type_ref expr self.context
|
||||
base_column = Internal_Column.Value value.pretty new_type_ref expr
|
||||
needs_cast = argument_value_type.is_nothing.not && self.connection.dialect.needs_literal_table_cast argument_value_type
|
||||
result_internal_column = if needs_cast.not then base_column else
|
||||
infer_type_from_database new_expression =
|
||||
SQL_Type_Reference.new self.connection self.context new_expression
|
||||
self.connection.dialect.make_cast base_column sql_type infer_type_from_database
|
||||
self.make_column result_internal_column
|
||||
|
||||
## PRIVATE
|
||||
Create a unique temporary column name.
|
||||
@ -1153,8 +1159,8 @@ type DB_Table
|
||||
last_row self =
|
||||
if self.internal_columns.is_empty then Error.throw (Illegal_Argument.Error "Cannot create a table with no columns.") else
|
||||
sql = self.to_sql
|
||||
column_type_suggestions = self.internal_columns.map .sql_type_reference
|
||||
table = self.connection.read_statement sql column_type_suggestions last_row_only=True
|
||||
column_types = self.internal_columns.map .sql_type_reference
|
||||
table = self.connection.read_statement sql column_types last_row_only=True
|
||||
table.rows.first
|
||||
|
||||
## ALIAS sort
|
||||
@ -2596,8 +2602,8 @@ type DB_Table
|
||||
Rows_To_Read.First_With_Warning n -> self.limit n+1
|
||||
|
||||
sql = preprocessed.to_sql
|
||||
column_type_suggestions = preprocessed.internal_columns.map .sql_type_reference
|
||||
materialized_table = self.connection.read_statement sql column_type_suggestions . catch SQL_Error sql_error->
|
||||
column_types = preprocessed.internal_columns.map .sql_type_reference
|
||||
materialized_table = self.connection.read_statement sql column_types . catch SQL_Error sql_error->
|
||||
Error.throw (self.connection.dialect.get_error_mapper.transform_custom_errors sql_error)
|
||||
|
||||
warnings_builder = Builder.new
|
||||
@ -3055,23 +3061,26 @@ make_literal_table connection column_vectors column_names alias =
|
||||
if total_size == 0 then Error.throw (Illegal_Argument.Error "Vectors cannot be empty") else
|
||||
if total_size > MAX_LITERAL_ELEMENT_COUNT then Error.throw (Illegal_Argument.Error "Too many elements for table literal ("+total_size.to_text+"): materialize a table into the database instead") else
|
||||
type_mapping = connection.dialect.get_type_mapping
|
||||
|
||||
values_to_type_ref column_vector =
|
||||
value_type = Value_Type_Helpers.find_common_type_for_arguments column_vector
|
||||
sql_type = case value_type of
|
||||
Nothing -> SQL_Type.null
|
||||
_ -> type_mapping.value_type_to_sql value_type Problem_Behavior.Ignore
|
||||
SQL_Type_Reference.from_constant sql_type
|
||||
|
||||
from_spec = From_Spec.Literal_Values column_vectors column_names alias
|
||||
context = Context.for_subquery from_spec
|
||||
|
||||
infer_type_from_database new_expression =
|
||||
SQL_Type_Reference.new connection context new_expression
|
||||
|
||||
internal_columns = 0.up_to column_vectors.length . map i->
|
||||
column_vector = column_vectors.at i
|
||||
column_name = column_names.at i
|
||||
|
||||
type_ref = values_to_type_ref column_vector.to_vector
|
||||
value_type = Value_Type_Helpers.find_common_type_for_arguments column_vector.to_vector
|
||||
sql_type = case value_type of
|
||||
Nothing -> SQL_Type.null
|
||||
_ -> type_mapping.value_type_to_sql value_type Problem_Behavior.Ignore
|
||||
type_ref = SQL_Type_Reference.from_constant sql_type
|
||||
sql_expression = SQL_Expression.Column alias column_name
|
||||
Internal_Column.Value column_name type_ref sql_expression
|
||||
base_column = Internal_Column.Value column_name type_ref sql_expression
|
||||
|
||||
needs_cast = value_type.is_nothing.not && connection.dialect.needs_literal_table_cast value_type
|
||||
if needs_cast.not then base_column else
|
||||
connection.dialect.make_cast base_column sql_type infer_type_from_database
|
||||
|
||||
DB_Table.Value alias connection internal_columns context
|
||||
|
@ -127,8 +127,12 @@ type Dialect
|
||||
executing the query. In some however, like SQLite, this is insufficient
|
||||
and will yield incorrect results, so the query needs to be executed (even
|
||||
though the full results may not need to be streamed).
|
||||
needs_execute_query_for_type_inference : Boolean
|
||||
needs_execute_query_for_type_inference self =
|
||||
|
||||
The function takes the statement as an argument which can be used in
|
||||
heuristics telling whether the execute is needed.
|
||||
needs_execute_query_for_type_inference : Text | SQL_Statement -> Boolean
|
||||
needs_execute_query_for_type_inference self statement =
|
||||
_ = statement
|
||||
Unimplemented.throw "This is an interface only."
|
||||
|
||||
## PRIVATE
|
||||
|
@ -2,6 +2,8 @@ from Standard.Base import all
|
||||
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
|
||||
from Standard.Base.Runtime import assert
|
||||
|
||||
from Standard.Table import Value_Type
|
||||
|
||||
import project.DB_Column.DB_Column
|
||||
import project.DB_Table.DB_Table
|
||||
import project.Internal.IR.Internal_Column.Internal_Column
|
||||
@ -81,3 +83,10 @@ rename_internal_columns : Vector Internal_Column -> Vector Text -> Vector Intern
|
||||
rename_internal_columns columns new_names =
|
||||
columns.zip new_names col-> name->
|
||||
col.rename name
|
||||
|
||||
## PRIVATE
|
||||
Checks if the `argument` has an integer type (as defined by the dialect associated with `related_column`).
|
||||
See `SQL_Type_Mapping.is_integer_type` for details.
|
||||
expect_dialect_specific_integer_type related_column argument ~action =
|
||||
type_mapping = related_column.connection.dialect.get_type_mapping
|
||||
Value_Type.expect_type argument type_mapping.is_integer_type "Integer" action
|
||||
|
@ -158,8 +158,10 @@ type Postgres_Dialect
|
||||
Internal_Column.Value column.name new_sql_type_reference new_expression
|
||||
|
||||
## PRIVATE
|
||||
needs_execute_query_for_type_inference : Boolean
|
||||
needs_execute_query_for_type_inference self = False
|
||||
needs_execute_query_for_type_inference : Text | SQL_Statement -> Boolean
|
||||
needs_execute_query_for_type_inference self statement =
|
||||
_ = statement
|
||||
False
|
||||
|
||||
## PRIVATE
|
||||
supports_separate_nan : Boolean
|
||||
@ -302,6 +304,12 @@ type Postgres_Dialect
|
||||
False -> base_type
|
||||
_ -> base_type
|
||||
|
||||
## PRIVATE
|
||||
needs_literal_table_cast : Value_Type -> Boolean
|
||||
needs_literal_table_cast self value_type =
|
||||
_ = value_type
|
||||
False
|
||||
|
||||
## PRIVATE
|
||||
make_dialect_operations =
|
||||
cases = [["LOWER", Base_Generator.make_function "LOWER"], ["UPPER", Base_Generator.make_function "UPPER"]]
|
||||
@ -664,6 +672,8 @@ make_date_add arguments (metadata : Date_Period_Metadata) =
|
||||
"secs=>0.001"
|
||||
Time_Period.Microsecond ->
|
||||
"secs=>0.000001"
|
||||
Time_Period.Nanosecond ->
|
||||
Panic.throw (Illegal_State.Error "Nanosecond precision is not supported by Postgres.")
|
||||
interval_expression = SQL_Builder.code "make_interval(" ++ interval_arg ++ ")"
|
||||
shifted = SQL_Builder.code "(" ++ expr ++ " + (" ++ amount ++ " * " ++ interval_expression ++ "))"
|
||||
case metadata.input_value_type of
|
||||
|
@ -125,6 +125,19 @@ type Postgres_Type_Mapping
|
||||
value_type = self.sql_type_to_value_type sql_type
|
||||
Column_Fetcher_Module.default_fetcher_for_value_type value_type
|
||||
|
||||
## PRIVATE
|
||||
is_implicit_conversion (source_type : Value_Type) (target_type : Value_Type) -> Boolean =
|
||||
# Currently, we do not have any implicit conversions.
|
||||
_ = [source_type, target_type]
|
||||
False
|
||||
|
||||
## PRIVATE
|
||||
is_integer_type (value_type : Value_Type) -> Boolean = value_type.is_integer
|
||||
|
||||
## PRIVATE
|
||||
is_same_type (value_type1 : Value_Type) (value_type2 : Value_Type) -> Boolean =
|
||||
value_type1.is_same_type value_type2
|
||||
|
||||
## PRIVATE
|
||||
simple_types_map = Dictionary.from_vector <|
|
||||
ints = [[Types.SMALLINT, Value_Type.Integer Bits.Bits_16], [Types.BIGINT, Value_Type.Integer Bits.Bits_64], [Types.INTEGER, Value_Type.Integer Bits.Bits_32]]
|
||||
|
@ -89,6 +89,40 @@ type SQL_Type_Mapping
|
||||
_ = column_type_suggestions
|
||||
Unimplemented.throw "This is an interface only."
|
||||
|
||||
## PRIVATE
|
||||
Checks if the conversion between the two types is one to be done implicitly in the given backend.
|
||||
Conversions marked as implicit will not raise Inexact_Type_Coercion warnings.
|
||||
|
||||
For example, the Snowflake database converts all integer types to NUMERIC(38, 0).
|
||||
This conversion is a property of the database, so warning about it would only be annoying.
|
||||
is_implicit_conversion (source_type : Value_Type) (target_type : Value_Type) -> Boolean =
|
||||
_ = [source_type, target_type]
|
||||
Unimplemented.throw "This is an interface only."
|
||||
|
||||
## PRIVATE
|
||||
Specifies if this backend recognizes the given type as an integer type.
|
||||
|
||||
For most backends, this should just be `.is_integer`.
|
||||
However, in some backends (e.g. Snowflake), the Decimal type is treated
|
||||
as the main Integer type, so this method can be used to specify that.
|
||||
We don't make Decimal type an integer type by default, as in other
|
||||
backends we do want to keep the distinction (for example in Postgres,
|
||||
`date_add` function will work with Integer but not with Decimal types).
|
||||
is_integer_type (value_type : Value_Type) -> Boolean =
|
||||
_ = value_type
|
||||
Unimplemented.throw "This is an interface only."
|
||||
|
||||
## PRIVATE
|
||||
Checks if the two types are to be considered the same by the `By_Type`
|
||||
selector.
|
||||
|
||||
In most backends this can just delegate to `Value_Type.is_same_type`. But
|
||||
e.g. in Snowflake this can be used to make Decimal and Integer types
|
||||
interchangeable.
|
||||
is_same_type (value_type1 : Value_Type) (value_type2 : Value_Type) -> Boolean =
|
||||
_ = [value_type1, value_type2]
|
||||
Unimplemented.throw "This is an interface only."
|
||||
|
||||
## PRIVATE
|
||||
default_sql_type_to_text sql_type =
|
||||
suffix = case sql_type.precision of
|
||||
|
@ -1,5 +1,6 @@
|
||||
from Standard.Base import all
|
||||
import Standard.Base.Errors.Illegal_State.Illegal_State
|
||||
import Standard.Base.Runtime.Ref.Ref
|
||||
|
||||
import project.Connection.Connection.Connection
|
||||
import project.Internal.IR.Context.Context
|
||||
@ -7,25 +8,36 @@ import project.Internal.IR.Query.Query
|
||||
import project.Internal.IR.SQL_Expression.SQL_Expression
|
||||
import project.SQL_Type.SQL_Type
|
||||
|
||||
## PRIVATE
|
||||
type SQL_Type_Reference
|
||||
## Refers to the SQL type of a given column, as computed by the Database
|
||||
itself.
|
||||
|
||||
Since fetching this type requires querying the database, it is computed
|
||||
lazily and cached.
|
||||
Computed_By_Database (~lazy_ref : SQL_Type)
|
||||
|
||||
We are not using a lazy field, because we also want to be able to
|
||||
manually set cached type - see `cache_computed_type`.
|
||||
Computed_By_Database (ref : Ref (SQL_Type_Recipe | SQL_Type))
|
||||
|
||||
## Refers to an SQL type that is overridden by the dialect's type system.
|
||||
Overridden (value : SQL_Type)
|
||||
|
||||
## A type reference that should never be queried.
|
||||
Used in some internal expressions that are not user facing and are known
|
||||
to not rely on the type.
|
||||
Null
|
||||
|
||||
## PRIVATE
|
||||
Returns the stored SQL type.
|
||||
|
||||
This may perform a database query on first access.
|
||||
get : SQL_Type
|
||||
get self = case self of
|
||||
SQL_Type_Reference.Computed_By_Database lazy_ref -> lazy_ref
|
||||
SQL_Type_Reference.Computed_By_Database ref -> get_or_compute ref
|
||||
SQL_Type_Reference.Overridden value -> value
|
||||
SQL_Type_Reference.Null ->
|
||||
Panic.throw (Illegal_State.Error "Getting the SQL_Type from SQL_Type_Reference.Null is not allowed. This indicates a bug in the Database library.")
|
||||
|
||||
## PRIVATE
|
||||
Creates an `SQL_Type_Reference` from a known constant.
|
||||
@ -43,23 +55,14 @@ type SQL_Type_Reference
|
||||
expected type will be.
|
||||
new : Connection -> Context -> SQL_Expression -> SQL_Type_Reference
|
||||
new connection context expression =
|
||||
do_fetch =
|
||||
statement = connection.dialect.prepare_fetch_types_query expression context
|
||||
statement_setter = connection.dialect.get_statement_setter
|
||||
columns = connection.fetch_columns statement statement_setter
|
||||
only_column = columns.first
|
||||
only_column.second
|
||||
SQL_Type_Reference.Computed_By_Database do_fetch
|
||||
SQL_Type_Reference.Computed_By_Database (Ref.new (SQL_Type_Recipe.Value connection context expression))
|
||||
|
||||
## PRIVATE
|
||||
Creates a new `SQL_Type_Reference` that should never be used.
|
||||
This is used by some internal methods which need to construct an internal
|
||||
column, but we can guarantee that its SQL Type will never be checked.
|
||||
null : SQL_Type_Reference
|
||||
null =
|
||||
getter =
|
||||
Error.throw (Illegal_State.Error "Getting the SQL_Type from SQL_Type_Reference.null is not allowed. This indicates a bug in the Database library.")
|
||||
SQL_Type_Reference.Computed_By_Database getter
|
||||
null = SQL_Type_Reference.Null
|
||||
|
||||
## PRIVATE
|
||||
Turns this reference into a type override.
|
||||
@ -71,4 +74,45 @@ type SQL_Type_Reference
|
||||
to_type_override : SQL_Type | Nothing
|
||||
to_type_override self = case self of
|
||||
SQL_Type_Reference.Overridden sql_type -> sql_type
|
||||
SQL_Type_Reference.Computed_By_Database _ -> Nothing
|
||||
_ -> Nothing
|
||||
|
||||
## PRIVATE
|
||||
If this ref was `Computed_By_Database` that was not yet computed, it will
|
||||
save the computed type in its cache. Otherwise, it will do nothing.
|
||||
|
||||
This can be used by queries to fill-in an already computed type, so that
|
||||
it no longer has to be fetched.
|
||||
|
||||
The `sql_type` argument is suspended, so that it will be processed only
|
||||
if it is used.
|
||||
cache_computed_type self (~sql_type : SQL_Type) = case self of
|
||||
SQL_Type_Reference.Computed_By_Database ref -> case ref.get of
|
||||
_ : SQL_Type_Recipe ->
|
||||
ref.put sql_type
|
||||
Nothing
|
||||
_ -> Nothing
|
||||
_ -> Nothing
|
||||
|
||||
## PRIVATE
|
||||
type SQL_Type_Recipe
|
||||
## PRIVATE
|
||||
A recipe for computing the SQL type.
|
||||
Value connection:Connection context:Context expression:SQL_Expression
|
||||
|
||||
## PRIVATE
|
||||
Returns the SQL type represented by the given reference, computing it if
|
||||
necessary.
|
||||
|
||||
The computed type is saved back into the Ref, so that future accesses can
|
||||
rely on the cached value.
|
||||
get_or_compute (ref : Ref (SQL_Type_Recipe | SQL_Type)) -> SQL_Type =
|
||||
case ref.get of
|
||||
computed : SQL_Type -> computed
|
||||
SQL_Type_Recipe.Value connection context expression ->
|
||||
statement = connection.dialect.prepare_fetch_types_query expression context
|
||||
statement_setter = connection.dialect.get_statement_setter
|
||||
columns = connection.fetch_columns statement statement_setter
|
||||
only_column = columns.first
|
||||
computed_type = only_column.second
|
||||
ref.put computed_type
|
||||
computed_type
|
||||
|
@ -169,8 +169,10 @@ type SQLite_Dialect
|
||||
SQL_Expression.Operation "CAST" [column.expression, SQL_Expression.Literal sql_type_text]
|
||||
|
||||
## PRIVATE
|
||||
needs_execute_query_for_type_inference : Boolean
|
||||
needs_execute_query_for_type_inference self = True
|
||||
needs_execute_query_for_type_inference : Text | SQL_Statement -> Boolean
|
||||
needs_execute_query_for_type_inference self statement =
|
||||
_ = statement
|
||||
True
|
||||
|
||||
## PRIVATE
|
||||
supports_separate_nan : Boolean
|
||||
@ -297,6 +299,12 @@ type SQLite_Dialect
|
||||
value_type_for_upload_of_existing_column : DB_Column -> Value_Type
|
||||
value_type_for_upload_of_existing_column self column = column.value_type
|
||||
|
||||
## PRIVATE
|
||||
needs_literal_table_cast : Value_Type -> Boolean
|
||||
needs_literal_table_cast self value_type =
|
||||
_ = value_type
|
||||
False
|
||||
|
||||
## PRIVATE
|
||||
make_dialect_operations =
|
||||
text = [starts_with, contains, ends_with, make_case_sensitive, ["REPLACE", replace], left, right]+concat_ops+trim_ops
|
||||
|
@ -117,6 +117,19 @@ type SQLite_Type_Mapping
|
||||
value_type = self.sql_type_to_value_type sql_type
|
||||
Column_Fetcher_Module.default_fetcher_for_value_type value_type
|
||||
|
||||
## PRIVATE
|
||||
is_implicit_conversion (source_type : Value_Type) (target_type : Value_Type) -> Boolean =
|
||||
# Currently, we do not have any implicit conversions.
|
||||
_ = [source_type, target_type]
|
||||
False
|
||||
|
||||
## PRIVATE
|
||||
is_integer_type (value_type : Value_Type) -> Boolean = value_type.is_integer
|
||||
|
||||
## PRIVATE
|
||||
is_same_type (value_type1 : Value_Type) (value_type2 : Value_Type) -> Boolean =
|
||||
value_type1.is_same_type value_type2
|
||||
|
||||
## PRIVATE
|
||||
The types that SQLite JDBC driver will report are: BOOLEAN, TINYINT,
|
||||
SMALLINT, BIGINT, INTEGER, DECIMAL, DOUBLE, REAL, FLOAT, NUMERIC, DATE,
|
||||
|
@ -336,7 +336,8 @@ common_update_table (source_table : DB_Table | Table) (target_table : DB_Table)
|
||||
check_for_null_keys_if_any_keys_set source_table effective_key_columns <| Context.Output.with_enabled <|
|
||||
structure_hint = target_table.select_columns source_table.column_names reorder=True . columns . map c->
|
||||
Column_Description.Value c.name c.value_type
|
||||
tmp_table = internal_upload_table source_table connection tmp_table_name primary_key=effective_key_columns structure_hint=structure_hint temporary=True on_problems=Problem_Behavior.Report_Error row_limit=row_limit
|
||||
# We ignore non-critical problems in `internal_upload_table` because we already checked the structure.
|
||||
tmp_table = internal_upload_table source_table connection tmp_table_name primary_key=effective_key_columns structure_hint=structure_hint temporary=True on_problems=Problem_Behavior.Ignore row_limit=row_limit
|
||||
tmp_table.if_not_error <|
|
||||
resulting_table = append_to_existing_table tmp_table target_table update_action effective_key_columns dry_run=dry_run
|
||||
## We don't need to drop the table if append panics, because
|
||||
@ -467,6 +468,7 @@ check_update_arguments_structure_match source_table target_table key_columns upd
|
||||
source_type = source_column.value_type
|
||||
target_type = target_column.value_type
|
||||
if source_type == target_type then [] else
|
||||
if target_table.connection.dialect.get_type_mapping.is_implicit_conversion source_type target_type then [] else
|
||||
if source_type.can_be_widened_to target_type then [Inexact_Type_Coercion.Warning source_type target_type unavailable=False] else
|
||||
Error.throw (Column_Type_Mismatch.Error source_column.name target_type source_type)
|
||||
|
||||
|
@ -41,13 +41,11 @@ type SQL_Statement
|
||||
unsafe_to_raw_sql self =
|
||||
strings = self.internal_fragments . map <| case _ of
|
||||
SQL_Fragment.Code_Part code -> code
|
||||
# TODO at some point we may try more sophisticated serialization based on data type
|
||||
# TODO #183734954: date and time formatting is limited and will lose sub-second precision and timezone offset.
|
||||
SQL_Fragment.Interpolation obj -> case obj of
|
||||
Number -> obj.to_text
|
||||
Date_Time -> "'" + (obj.format "yyyy-MM-dd HH:mm:ss") + "'"
|
||||
Date_Time -> "'" + (obj.format "yyyy-MM-dd HH:mm:ss.f") + "'"
|
||||
Date -> "'" + (obj.format "yyyy-MM-dd") + "'"
|
||||
Time_Of_Day -> "'" + (obj.format "HH:mm:ss") + "'"
|
||||
Time_Of_Day -> "'" + (obj.format "HH:mm:ss.f") + "'"
|
||||
_ -> "'" + obj.to_text.replace "'" "''" + "'"
|
||||
strings.join ""
|
||||
|
||||
|
@ -45,6 +45,9 @@ from Standard.Database.Internal.Statement_Setter import fill_hole_default
|
||||
|
||||
import project.Internal.Snowflake_Type_Mapping.Snowflake_Type_Mapping
|
||||
|
||||
polyglot java import org.enso.database.JDBCUtils
|
||||
polyglot java import org.enso.snowflake.SnowflakeJDBCUtils
|
||||
|
||||
## PRIVATE
|
||||
The dialect of Snowflake databases.
|
||||
snowflake : Snowflake_Dialect
|
||||
@ -144,10 +147,13 @@ type Snowflake_Dialect
|
||||
get_statement_setter : Statement_Setter
|
||||
get_statement_setter self =
|
||||
custom_fill_hole stmt i type_hint value = case value of
|
||||
# TODO some Special handling for Date/Time
|
||||
_ : Date -> stmt.setObject i value
|
||||
_ : Date_Time -> stmt.setObject i value
|
||||
_ : Time_Of_Day -> stmt.setObject i value
|
||||
_ : Date_Time ->
|
||||
keep_offset = case type_hint of
|
||||
Value_Type.Date_Time with_timezone -> with_timezone
|
||||
_ -> True
|
||||
SnowflakeJDBCUtils.setDateTime stmt i value keep_offset
|
||||
_ : Time_Of_Day -> SnowflakeJDBCUtils.setTimeOfDay stmt i value
|
||||
_ : Date -> SnowflakeJDBCUtils.setDate stmt i value
|
||||
# Fallback to default logic for everything else
|
||||
_ -> fill_hole_default stmt i type_hint value
|
||||
Statement_Setter.Value custom_fill_hole
|
||||
@ -169,8 +175,15 @@ type Snowflake_Dialect
|
||||
Internal_Column.Value column.name new_sql_type_reference new_expression
|
||||
|
||||
## PRIVATE
|
||||
needs_execute_query_for_type_inference : Boolean
|
||||
needs_execute_query_for_type_inference self = False
|
||||
needs_execute_query_for_type_inference : Text | SQL_Statement -> Boolean
|
||||
needs_execute_query_for_type_inference self statement =
|
||||
query_text = case statement of
|
||||
text : Text -> text
|
||||
_ : SQL_Statement -> statement.prepare.first
|
||||
keywords_that_need_execute = ["VALUES", "DECODE"]
|
||||
regex = keywords_that_need_execute.join "|"
|
||||
needs_execute = query_text.find regex . is_nothing . not
|
||||
needs_execute
|
||||
|
||||
## PRIVATE
|
||||
supports_separate_nan : Boolean
|
||||
@ -212,8 +225,13 @@ type Snowflake_Dialect
|
||||
## PRIVATE
|
||||
check_aggregate_support : Aggregate_Column -> Boolean ! Unsupported_Database_Operation
|
||||
check_aggregate_support self aggregate =
|
||||
_ = aggregate
|
||||
True
|
||||
unsupported name =
|
||||
Error.throw (Unsupported_Database_Operation.Error name+" is currently not supported by Snowflake backend. You may need to materialize the table and perform the operation in-memory.")
|
||||
case aggregate of
|
||||
First _ _ _ _ -> unsupported "First"
|
||||
Last _ _ _ _ -> unsupported "Last"
|
||||
# All other aggregates are supported.
|
||||
_ -> True
|
||||
|
||||
## PRIVATE
|
||||
Checks if an operation is supported by the dialect.
|
||||
@ -262,12 +280,19 @@ type Snowflake_Dialect
|
||||
case base_type of
|
||||
Value_Type.Decimal precision scale ->
|
||||
used_scale = scale.if_nothing 12
|
||||
used_precision = Math.min 38 precision.if_nothing 38
|
||||
used_precision = Math.min 38 (precision.if_nothing 38)
|
||||
new_type = Value_Type.Decimal used_precision used_scale
|
||||
if used_scale==scale && used_precision==precision then new_type else
|
||||
Warning.attach (Inexact_Type_Coercion.Warning base_type new_type unavailable=False) new_type
|
||||
_ -> base_type
|
||||
|
||||
## PRIVATE
|
||||
needs_literal_table_cast : Value_Type -> Boolean
|
||||
needs_literal_table_cast self value_type = case value_type of
|
||||
Value_Type.Time -> True
|
||||
Value_Type.Date_Time _ -> True
|
||||
_ -> False
|
||||
|
||||
## PRIVATE
|
||||
make_dialect_operations =
|
||||
cases = [["LOWER", Base_Generator.make_function "LOWER"], ["UPPER", Base_Generator.make_function "UPPER"]]
|
||||
@ -279,24 +304,38 @@ make_dialect_operations =
|
||||
stddev_pop = ["STDDEV_POP", Base_Generator.make_function "stddev_pop"]
|
||||
stddev_samp = ["STDDEV_SAMP", Base_Generator.make_function "stddev_samp"]
|
||||
stats = [agg_median, agg_mode, agg_percentile, stddev_pop, stddev_samp]
|
||||
date_ops = [make_extract_as_int "year", make_extract_as_int "quarter", make_extract_as_int "month", make_extract_as_int "week", make_extract_as_int "day", make_extract_as_int "hour", make_extract_as_int "minute", make_extract_fractional_as_int "second", make_extract_fractional_as_int "millisecond" modulus=1000, make_extract_fractional_as_int "microsecond" modulus=1000, ["date_add", make_date_add], ["date_diff", make_date_diff], ["date_trunc_to_day", make_date_trunc_to_day]]
|
||||
special_overrides = []
|
||||
other = [["RUNTIME_ERROR", make_runtime_error_op]]
|
||||
my_mappings = text + counts + stats + first_last_aggregators + arith_extensions + bool + date_ops + special_overrides + other
|
||||
date_ops =
|
||||
trivial = ["year", "quarter", "month", "week", "day", "hour", "minute", "second"]
|
||||
. map name-> [name, Base_Generator.make_function name]
|
||||
fractional = [extract_just_milliseconds, extract_just_microseconds, extract_just_nanoseconds]
|
||||
other = [["day_of_year", Base_Generator.make_function "DAYOFYEAR"], ["day_of_week", Base_Generator.make_function "DAYOFWEEKISO"]]
|
||||
operations = [["date_add", make_date_add], ["date_diff", make_date_diff], ["date_trunc_to_day", make_date_trunc_to_day]]
|
||||
trivial + fractional + other + operations
|
||||
other = [["IIF", make_iif], ["RUNTIME_ERROR", make_runtime_error_op]]
|
||||
my_mappings = text + counts + stats + arith_extensions + bool + date_ops + other
|
||||
Base_Generator.base_dialect_operations . extend_with my_mappings
|
||||
|
||||
## PRIVATE
|
||||
agg_count_is_null = Base_Generator.lift_unary_op "COUNT_IS_NULL" arg->
|
||||
replace_with_zero_if_null <|
|
||||
SQL_Builder.code "COUNT_IF(" ++ arg.paren ++ " IS NULL)"
|
||||
|
||||
## PRIVATE
|
||||
agg_count_empty = Base_Generator.lift_unary_op "COUNT_EMPTY" arg->
|
||||
replace_with_zero_if_null <|
|
||||
SQL_Builder.code "COUNT_IF(" ++ arg.paren ++ " IS NULL OR " ++ arg.paren ++ " = '')"
|
||||
|
||||
## PRIVATE
|
||||
agg_count_not_empty = Base_Generator.lift_unary_op "COUNT_NOT_EMPTY" arg->
|
||||
replace_with_zero_if_null <|
|
||||
SQL_Builder.code "COUNT_IF(" ++ arg.paren ++ " IS NOT NULL AND " ++ arg.paren ++ " != '')"
|
||||
|
||||
## PRIVATE
|
||||
A helper needed because Snowflake's aggregators return NULL if there were no
|
||||
rows. But for aggregators like COUNT we prefer to return 0 in such cases.
|
||||
replace_with_zero_if_null expr =
|
||||
SQL_Builder.code "COALESCE(" ++ expr ++ ", 0)"
|
||||
|
||||
## PRIVATE
|
||||
agg_median = Base_Generator.lift_unary_op "MEDIAN" arg->
|
||||
median = SQL_Builder.code "MEDIAN(" ++ arg ++ ")"
|
||||
@ -314,9 +353,6 @@ agg_percentile = Base_Generator.lift_binary_op "PERCENTILE" p-> expr->
|
||||
SQL_Builder.code "CASE WHEN " ++ has_nan ++ " THEN 'NaN' ELSE " ++ percentile ++ " END"
|
||||
|
||||
## PRIVATE
|
||||
These are written in a not most-efficient way, but a way that makes them
|
||||
compatible with other group-by aggregations out-of-the-box. In the future, we
|
||||
may want to consider some alternative solutions.
|
||||
first_last_aggregators =
|
||||
first = make_first_aggregator reverse=False ignore_null=False
|
||||
first_not_null = make_first_aggregator reverse=False ignore_null=True
|
||||
@ -331,17 +367,17 @@ make_first_aggregator reverse ignore_null args =
|
||||
order_bys = args.drop 1
|
||||
|
||||
method_name = if reverse then "LAST_VALUE" else "FIRST_VALUE"
|
||||
filter_clause = if ignore_null then ") IGNORE NULLS OVER" else ") OVER"
|
||||
order_clause = SQL_Builder.code " ORDER BY " ++ SQL_Builder.join "," order_bys
|
||||
filter_clause = if ignore_null then ") IGNORE NULLS" else ")"
|
||||
order_clause = SQL_Builder.code " OVER (ORDER BY " ++ SQL_Builder.join "," order_bys ++ ")"
|
||||
SQL_Builder.code (method_name + "(") ++ result_expr ++ filter_clause ++ order_clause
|
||||
|
||||
## PRIVATE
|
||||
agg_shortest = Base_Generator.lift_unary_op "SHORTEST" arg->
|
||||
SQL_Builder.code "FIRST_VALUE(" ++ arg ++ ") IGNORE NULLS OVER (ORDER BY LENGTH(" ++ arg ++ "))"
|
||||
SQL_Builder.code "MIN_BY(" ++ arg ++ ", LENGTH(" ++ arg ++ "))"
|
||||
|
||||
## PRIVATE
|
||||
agg_longest = Base_Generator.lift_unary_op "LONGEST" arg->
|
||||
SQL_Builder.code "FIRST_VALUE(" ++ arg ++ ") IGNORE NULLS OVER (ORDER BY LENGTH(" ++ arg ++ ") DESC)"
|
||||
SQL_Builder.code "MAX_BY(" ++ arg ++ ", LENGTH(" ++ arg ++ "))"
|
||||
|
||||
## PRIVATE
|
||||
concat_ops =
|
||||
@ -372,20 +408,26 @@ agg_count_distinct args = if args.is_empty then (Error.throw (Illegal_Argument.E
|
||||
## A single null value will be skipped.
|
||||
SQL_Builder.code "COUNT(DISTINCT " ++ args.first ++ ")"
|
||||
False ->
|
||||
## A tuple of nulls is not a null, so it will not be skipped - but
|
||||
we want to ignore all-null columns. So we manually filter them
|
||||
out.
|
||||
count = SQL_Builder.code "COUNT(DISTINCT (" ++ SQL_Builder.join ", " args ++ "))"
|
||||
are_nulls = args.map arg-> arg.paren ++ " IS NULL"
|
||||
all_nulls_filter = SQL_Builder.code " FILTER (WHERE NOT (" ++ SQL_Builder.join " AND " are_nulls ++ "))"
|
||||
(count ++ all_nulls_filter).paren
|
||||
## We do not want to ignore a row where only some values are NULL - so we coalesce them.
|
||||
coalesced_args = args.map replace_null_with_marker
|
||||
# But we want to ignore all-null tuples. So we include an additional indicator column which is NULL if all cells in the given row were NULL - excluding such rows.
|
||||
are_all_nulls = SQL_Builder.join " AND " (args.map arg-> arg.paren ++ " IS NULL")
|
||||
all_null_indicator = SQL_Builder.code "CASE WHEN " ++ are_all_nulls ++ " THEN NULL ELSE 1 END"
|
||||
SQL_Builder.code "COUNT(DISTINCT " ++ SQL_Builder.join ", " (coalesced_args + [all_null_indicator]) ++ ")"
|
||||
|
||||
## PRIVATE
|
||||
agg_count_distinct_include_null args =
|
||||
## If we always count as tuples, then even null fields are counted.
|
||||
## But Snowflake seems to not like tuples?
|
||||
#SQL_Builder.code "COUNT(DISTINCT (" ++ SQL_Builder.join ", " args ++ ", 0))"
|
||||
SQL_Builder.code "COUNT(DISTINCT (" ++ SQL_Builder.join ", " args ++ "))"
|
||||
# As with `agg_count_distinct`, we do want to handle columns that contain NULLs, so we need to apply the ugly coalesce.
|
||||
coalesced_args = args.map replace_null_with_marker
|
||||
SQL_Builder.code "COUNT(DISTINCT " ++ SQL_Builder.join ", " coalesced_args ++ ")"
|
||||
|
||||
## PRIVATE
|
||||
A helper function that coalesces a NULL column replacing it with a marker value that is expected to not be present in real world data.
|
||||
It is sometimes needed when we want to count distinct values in a column that contains NULLs and still include the rows containing NULLs.
|
||||
The columns are converted to VARIANT type because of that, which may incur some overhead.
|
||||
But there seems to be no other reliable way to handle this for columns like numeric where no non-NULL value exists that can be guaranteed to be unused.
|
||||
replace_null_with_marker expr =
|
||||
SQL_Builder.code "COALESCE(" ++ expr ++ ", {'enso-null-replacement-marker':'"+Random.uuid+"'}::variant)"
|
||||
|
||||
## PRIVATE
|
||||
starts_with = Base_Generator.lift_binary_sql_function "STARTS_WITH" "STARTSWITH"
|
||||
@ -529,121 +571,60 @@ replace args metadata =
|
||||
True -> Nothing
|
||||
expression.if_nothing (replace_params.throw_unsupported snowflake_dialect_name)
|
||||
|
||||
## PRIVATE
|
||||
make_extract_as_int enso_name sql_name=enso_name =
|
||||
Base_Generator.lift_unary_op enso_name arg->
|
||||
as_int32 <| SQL_Builder.code "EXTRACT(" ++ sql_name ++ " FROM " ++ arg ++ ")"
|
||||
extract_just_nanoseconds = Base_Generator.lift_unary_op "nanosecond" arg->
|
||||
SQL_Builder.code "(EXTRACT(NANOSECOND FROM " ++ arg ++ ") % 1000)"
|
||||
|
||||
extract_just_microseconds = Base_Generator.lift_unary_op "microsecond" arg->
|
||||
SQL_Builder.code "(TRUNC(EXTRACT(NANOSECOND FROM " ++ arg ++ ") / 1000) % 1000)"
|
||||
|
||||
extract_just_milliseconds = Base_Generator.lift_unary_op "millisecond" arg->
|
||||
# No modulo is needed, as the milliseconds are the last part of the nanoseconds.
|
||||
SQL_Builder.code "TRUNC(EXTRACT(NANOSECOND FROM " ++ arg ++ ") / 1000000)"
|
||||
|
||||
## PRIVATE
|
||||
make_extract_fractional_as_int enso_name sql_name=enso_name modulus=Nothing =
|
||||
Base_Generator.lift_unary_op enso_name arg->
|
||||
result = as_int32 <| SQL_Builder.code "TRUNC(EXTRACT(" ++ sql_name ++ " FROM " ++ arg ++ "))"
|
||||
case modulus of
|
||||
Nothing -> result
|
||||
_ : Integer ->
|
||||
(result ++ (" % "+modulus.to_text)).paren
|
||||
date_period_to_part_with_multiplier period =
|
||||
case period of
|
||||
Date_Period.Year -> ["year", 1]
|
||||
Date_Period.Quarter -> ["quarter", 1]
|
||||
Date_Period.Month -> ["month", 1]
|
||||
Date_Period.Week _ -> ["week", 1]
|
||||
Date_Period.Day -> ["day", 1]
|
||||
Time_Period.Day -> ["hour", 24]
|
||||
Time_Period.Hour -> ["hour", 1]
|
||||
Time_Period.Minute -> ["minute", 1]
|
||||
Time_Period.Second -> ["second", 1]
|
||||
Time_Period.Millisecond -> ["millisecond", 1]
|
||||
Time_Period.Microsecond -> ["microsecond", 1]
|
||||
Time_Period.Nanosecond -> ["nanosecond", 1]
|
||||
|
||||
## PRIVATE
|
||||
make_date_add arguments (metadata : Date_Period_Metadata) =
|
||||
if arguments.length != 2 then Error.throw (Illegal_State.Error "date_add expects exactly 2 sub expressions. This is a bug in Database library.") else
|
||||
expr = arguments.at 0
|
||||
amount = arguments.at 1
|
||||
interval_arg = case metadata.period of
|
||||
Date_Period.Year ->
|
||||
"years=>1"
|
||||
Date_Period.Quarter ->
|
||||
"months=>3"
|
||||
Date_Period.Month ->
|
||||
"months=>1"
|
||||
Date_Period.Week _ ->
|
||||
"weeks=>1"
|
||||
Date_Period.Day ->
|
||||
"days=>1"
|
||||
Time_Period.Day ->
|
||||
"hours=>24"
|
||||
Time_Period.Hour ->
|
||||
"hours=>1"
|
||||
Time_Period.Minute ->
|
||||
"mins=>1"
|
||||
Time_Period.Second ->
|
||||
"secs=>1"
|
||||
Time_Period.Millisecond ->
|
||||
"secs=>0.001"
|
||||
Time_Period.Microsecond ->
|
||||
"secs=>0.000001"
|
||||
interval_expression = SQL_Builder.code "make_interval(" ++ interval_arg ++ ")"
|
||||
shifted = SQL_Builder.code "(" ++ expr ++ " + (" ++ amount ++ " * " ++ interval_expression ++ "))"
|
||||
case metadata.input_value_type of
|
||||
Value_Type.Date ->
|
||||
SQL_Builder.code "(" ++ shifted ++ "::date)"
|
||||
_ -> shifted
|
||||
part_with_multiplier = date_period_to_part_with_multiplier metadata.period
|
||||
date_part = part_with_multiplier.first
|
||||
multiplier = part_with_multiplier.second
|
||||
scaled_amount = if multiplier == 1 then amount else
|
||||
amount ++ " * " ++ multiplier.to_text
|
||||
sql_typ = sql_type_string_for_date_time metadata.input_value_type
|
||||
SQL_Builder.code "DATEADD('"+date_part+"', (" ++ scaled_amount ++ ")::NUMBER, (" ++ expr ++ ")::" ++ sql_typ ++ ")"
|
||||
|
||||
## PRIVATE
|
||||
make_date_diff arguments (metadata : Date_Period_Metadata) =
|
||||
if arguments.length != 2 then Error.throw (Illegal_State.Error "date_diff expects exactly 2 sub expressions. This is a bug in Database library.") else
|
||||
start = arguments.at 0
|
||||
end = arguments.at 1
|
||||
|
||||
truncate expr =
|
||||
SQL_Builder.code "TRUNC(" ++ expr ++ ")"
|
||||
|
||||
# `age` computes a 'symbolic' difference expressed in years, months and days.
|
||||
extract_years =
|
||||
as_int32 <| SQL_Builder.code "EXTRACT(YEARS FROM age(" ++ end ++ ", " ++ start ++ "))"
|
||||
# To get total months, we need to sum up with whole years.
|
||||
extract_months =
|
||||
months = as_int32 <|
|
||||
SQL_Builder.code "EXTRACT(MONTHS FROM age(" ++ end ++ ", " ++ start ++ "))"
|
||||
SQL_Builder.code "(" ++ extract_years ++ " * 12 + " ++ months ++ ")"
|
||||
## To get total days, we cannot use `age`, because we cannot convert an
|
||||
amount of months to days (month lengths vary). Instead we rely on `-`
|
||||
returning an interval based in 'raw' days.
|
||||
extract_days =
|
||||
as_int32 <| case metadata.input_value_type of
|
||||
## For pure 'date' datatype, the difference is a simple integer
|
||||
count of days.
|
||||
Value_Type.Date -> (end ++ " - " ++ start).paren
|
||||
# For others, it is an interval, so we need to extract.
|
||||
_ -> SQL_Builder.code "EXTRACT(DAYS FROM (" ++ end ++ " - " ++ start ++ "))"
|
||||
## We round the amount of seconds towards zero, as we only count full
|
||||
elapsed seconds in the interval.
|
||||
Note that it is important the interval is computed using `-`. The
|
||||
symbolic `age` has no clear mapping to the count of days, skewing the
|
||||
result.
|
||||
extract_seconds =
|
||||
seconds_numeric = SQL_Builder.code "EXTRACT(EPOCH FROM (" ++ end ++ " - " ++ start ++ "))"
|
||||
as_int64 (truncate seconds_numeric)
|
||||
case metadata.period of
|
||||
Date_Period.Year -> extract_years
|
||||
Date_Period.Month -> extract_months
|
||||
Date_Period.Quarter -> (extract_months ++ " / 3").paren
|
||||
Date_Period.Week _ -> (extract_days ++ " / 7").paren
|
||||
Date_Period.Day -> extract_days
|
||||
## EXTRACT HOURS/MINUTES would yield only a date part, but we need
|
||||
the total which is easiest achieved by EPOCH
|
||||
Time_Period.Hour -> (extract_seconds ++ " / 3600").paren
|
||||
Time_Period.Minute -> (extract_seconds ++ " / 60").paren
|
||||
Time_Period.Second -> extract_seconds
|
||||
Time_Period.Day -> case metadata.input_value_type of
|
||||
Value_Type.Date -> extract_days
|
||||
# Time_Period.Day is treated as 24 hours, so for types that support time we use the same algorithm like for hours, but divide by 24.
|
||||
_ -> (extract_seconds ++ " / (3600 * 24)").paren
|
||||
## The EPOCH gives back just the integer amount of seconds, without
|
||||
the fractional part. So we get the fractional part using
|
||||
MILLISECONDS - but that does not give the _total_ just the
|
||||
'seconds of minute' part, expressed in milliseconds. So we need
|
||||
to merge both - but then seconds of minute appear twice, so we %
|
||||
the milliseconds to get just the fractional part from it and sum
|
||||
both.
|
||||
Time_Period.Millisecond ->
|
||||
millis = truncate <|
|
||||
SQL_Builder.code "EXTRACT(MILLISECONDS FROM (" ++ end ++ " - " ++ start ++ "))"
|
||||
as_int64 <|
|
||||
((extract_seconds ++ " * 1000").paren ++ " + " ++ (millis ++ " % 1000").paren).paren
|
||||
Time_Period.Microsecond ->
|
||||
micros = SQL_Builder.code "EXTRACT(MICROSECONDS FROM (" ++ end ++ " - " ++ start ++ "))"
|
||||
as_int64 <|
|
||||
((extract_seconds ++ " * 1000000").paren ++ " + " ++ (micros ++ " % 1000000").paren).paren
|
||||
part_with_multiplier = date_period_to_part_with_multiplier metadata.period
|
||||
date_part = part_with_multiplier.first
|
||||
multiplier = part_with_multiplier.second
|
||||
## The SQL type to add as a cast. This is needed, because otherwise this operation is losing type information,
|
||||
especially if given NULL (Nothing). It would tell that it returns a VARCHAR which is not true.
|
||||
sql_typ = sql_type_string_for_date_time metadata.input_value_type
|
||||
diff = SQL_Builder.code "DATEDIFF('" ++ date_part ++ "', (" ++ start ++ ")::" ++ sql_typ ++ ", (" ++ end ++ ")::" ++ sql_typ ++ ")"
|
||||
if multiplier == 1 then diff else
|
||||
# We want to return integer, so we truncate any fractional part that did not constitute a full unit.
|
||||
SQL_Builder.code "TRUNC(" ++ diff ++ " / " ++ multiplier.to_text ++ ")"
|
||||
|
||||
## PRIVATE
|
||||
make_date_trunc_to_day arguments =
|
||||
@ -652,16 +633,11 @@ make_date_trunc_to_day arguments =
|
||||
SQL_Builder.code "(DATE_TRUNC('day'," ++ expr ++ ") :: DATE)"
|
||||
|
||||
## PRIVATE
|
||||
Alters the expression casting the value to a 64-bit integer.
|
||||
TODO probably remove
|
||||
as_int64 expr =
|
||||
SQL_Builder.code "(" ++ expr ++ "::int8)"
|
||||
|
||||
## PRIVATE
|
||||
Alters the expression casting the value to a 32-bit integer.
|
||||
TODO probably remove
|
||||
as_int32 expr =
|
||||
SQL_Builder.code "(" ++ expr ++ "::int4)"
|
||||
sql_type_string_for_date_time value_type = case value_type of
|
||||
Value_Type.Date -> "DATE"
|
||||
Value_Type.Date_Time with_tz -> if with_tz then "TIMESTAMP_TZ" else "TIMESTAMP_NTZ"
|
||||
Value_Type.Time -> "TIME"
|
||||
_ -> Panic.throw (Illegal_State.Error "Expects a date or time type. This is a bug in Database library.")
|
||||
|
||||
## PRIVATE
|
||||
The RUNTIME_ERROR operation should allow the query to compile fine and it
|
||||
@ -686,5 +662,18 @@ make_runtime_error_op arguments =
|
||||
|
||||
SQL_Builder.code "CAST('[ENSO INVARIANT VIOLATED: '||" ++ error_message ++ "||'] '||COALESCE(" ++ variable_to_defer ++ "::TEXT,'NULL') AS BOOLEAN)"
|
||||
|
||||
## PRIVATE
|
||||
make_iif : Vector SQL_Builder -> SQL_Builder
|
||||
make_iif arguments = case arguments.length of
|
||||
3 ->
|
||||
expr = arguments.at 0
|
||||
when_true = arguments.at 1
|
||||
when_false = arguments.at 2
|
||||
# We can rely on Snowflake's decode to avoid duplicating `expr` in the SQL code:
|
||||
# (if no default fallback is provided, NULL will be returned - meaning that NULL is mapped to NULL as expected)
|
||||
SQL_Builder.code "DECODE(" ++ expr ++ ", TRUE, " ++ when_true ++ ", FALSE, " ++ when_false ++ ")"
|
||||
_ ->
|
||||
Error.throw <| Illegal_State.Error ("Invalid amount of arguments for operation IIF")
|
||||
|
||||
## PRIVATE
|
||||
snowflake_dialect_name = "Snowflake"
|
||||
|
@ -26,15 +26,15 @@ type Snowflake_Type_Mapping
|
||||
result = case value_type of
|
||||
Value_Type.Boolean -> SQL_Type.Value Types.BOOLEAN "boolean"
|
||||
# All integer types in Snowflake become NUMERIC(38,0).
|
||||
Value_Type.Byte -> SQL_Type.Value Types.BIGINT "bigint"
|
||||
Value_Type.Integer _ -> SQL_Type.Value Types.BIGINT "bigint"
|
||||
Value_Type.Byte -> integer_type
|
||||
Value_Type.Integer _ -> integer_type
|
||||
# All float types in Snowflake become double
|
||||
Value_Type.Float _ -> SQL_Type.Value Types.DOUBLE "float8"
|
||||
Value_Type.Float _ -> SQL_Type.Value Types.DOUBLE "FLOAT"
|
||||
Value_Type.Decimal precision scale -> case precision of
|
||||
# If precision is not set, scale is also lost because SQL is unable to express a scale without a precision.
|
||||
Nothing -> SQL_Type.Value Types.DECIMAL "decimal" Nothing Nothing
|
||||
Nothing -> SQL_Type.Value Types.DECIMAL "NUMBER" Nothing Nothing
|
||||
# Scale can be set or not, if precision is given, so no check needed.
|
||||
_ -> SQL_Type.Value Types.DECIMAL "decimal" precision scale
|
||||
_ -> SQL_Type.Value Types.DECIMAL "NUMBER" precision scale
|
||||
Value_Type.Char size _ ->
|
||||
# Snowflake does not support fixed length strings, so we use VARCHAR.
|
||||
is_unbounded = case size of
|
||||
@ -62,7 +62,10 @@ type Snowflake_Type_Mapping
|
||||
"An unsupported SQL type ["+type_name.to_text+"] cannot be converted into an SQL type because it did not contain the SQL metadata needed to reconstruct it."
|
||||
|
||||
approximated_value_type = Snowflake_Type_Mapping.sql_type_to_value_type result
|
||||
problems = if approximated_value_type == value_type then [] else [Inexact_Type_Coercion.Warning value_type approximated_value_type]
|
||||
problems = if approximated_value_type == value_type then [] else
|
||||
# We skip the inexact coercion warning if the conversion is an implicit one.
|
||||
if Snowflake_Type_Mapping.is_implicit_conversion value_type approximated_value_type then [] else
|
||||
[Inexact_Type_Coercion.Warning value_type approximated_value_type]
|
||||
on_problems.attach_problems_before problems result
|
||||
|
||||
## PRIVATE
|
||||
@ -117,12 +120,30 @@ type Snowflake_Type_Mapping
|
||||
Value_Type.Date_Time _ -> date_time_fetcher
|
||||
_ -> Column_Fetcher_Module.default_fetcher_for_value_type value_type
|
||||
|
||||
## PRIVATE
|
||||
is_implicit_conversion (source_type : Value_Type) (target_type : Value_Type) -> Boolean =
|
||||
# Currently only implicit conversion is Integer -> Decimal
|
||||
case source_type of
|
||||
Value_Type.Integer _ ->
|
||||
target_type == integer_value_type
|
||||
_ -> False
|
||||
|
||||
## PRIVATE
|
||||
is_integer_type (value_type : Value_Type) -> Boolean = case value_type of
|
||||
Value_Type.Integer _ -> True
|
||||
Value_Type.Decimal _ scale -> scale == 0
|
||||
_ -> False
|
||||
|
||||
## PRIVATE
|
||||
is_same_type (value_type1 : Value_Type) (value_type2 : Value_Type) -> Boolean =
|
||||
# Types are considered the same by original semantics, or additionally if both of them are an integer-like type as denoted by `is_integer_type`.
|
||||
(value_type1.is_same_type value_type2) || (Snowflake_Type_Mapping.is_integer_type value_type1 && Snowflake_Type_Mapping.is_integer_type value_type2)
|
||||
|
||||
## PRIVATE
|
||||
simple_types_map = Dictionary.from_vector <|
|
||||
ints = [[Types.TINYINT, Value_Type.Byte], [Types.SMALLINT, Value_Type.Integer Bits.Bits_16], [Types.BIGINT, Value_Type.Integer Bits.Bits_64], [Types.INTEGER, Value_Type.Integer Bits.Bits_32]]
|
||||
floats = [[Types.DOUBLE, Value_Type.Float Bits.Bits_64], [Types.REAL, Value_Type.Float Bits.Bits_32]]
|
||||
floats = [[Types.DOUBLE, Value_Type.Float Bits.Bits_64], [Types.REAL, Value_Type.Float Bits.Bits_64]]
|
||||
other = [[Types.DATE, Value_Type.Date], [Types.TIME, Value_Type.Time], [Types.BOOLEAN, Value_Type.Boolean]]
|
||||
ints + floats + other
|
||||
floats + other
|
||||
|
||||
## PRIVATE
|
||||
complex_types_map = Dictionary.from_vector <|
|
||||
@ -139,7 +160,7 @@ complex_types_map = Dictionary.from_vector <|
|
||||
if sql_type.name == "BOOLEAN" then Value_Type.Boolean else
|
||||
# We currently do not support bit types.
|
||||
on_unknown_type sql_type
|
||||
handle_timestamp sql_type = case sql_type.name of
|
||||
handle_timestamp sql_type = case sql_type.name.to_case Case.Upper of
|
||||
"TIMESTAMPTZ" -> Value_Type.Date_Time with_timezone=True
|
||||
"TIMESTAMP_TZ" -> Value_Type.Date_Time with_timezone=True
|
||||
"TIMESTAMPLTZ" -> Value_Type.Date_Time with_timezone=False
|
||||
@ -148,10 +169,11 @@ complex_types_map = Dictionary.from_vector <|
|
||||
"TIMESTAMP_NTZ" -> Value_Type.Date_Time with_timezone=False
|
||||
_ -> on_unknown_type sql_type
|
||||
|
||||
numerics = [[Types.DECIMAL, make_decimal], [Types.NUMERIC, make_decimal]]
|
||||
# All integer types in Snowflake are Decimal
|
||||
numerics = [[Types.DECIMAL, make_decimal], [Types.NUMERIC, make_decimal], [Types.BIGINT, make_decimal]]
|
||||
strings = [[Types.VARCHAR, make_varchar], [Types.CHAR, make_char], [Types.CLOB, make_varchar]]
|
||||
binaries = [[Types.BINARY, make_binary True], [Types.BIT, handle_bit]]
|
||||
others = [[Types.TIMESTAMP, handle_timestamp]]
|
||||
others = [[Types.TIMESTAMP, handle_timestamp], [Types.TIMESTAMP_WITH_TIMEZONE, handle_timestamp]]
|
||||
numerics + strings + binaries + others
|
||||
|
||||
## PRIVATE
|
||||
@ -179,8 +201,23 @@ date_time_fetcher =
|
||||
fetch_value rs i =
|
||||
## Read the time as a string to get the nanosecond precision.
|
||||
sf_string = rs.getString i
|
||||
if sf_string == Nothing then Nothing else Date_Time.parse sf_string
|
||||
if sf_string == Nothing then Nothing else
|
||||
normalized = if sf_string.at 10 != 'T' then sf_string else
|
||||
(sf_string.take 10) + ' ' + (sf_string.drop 11)
|
||||
# The offset is optional - if we were fetching TIMESTAMP_NTZ it could be missing.
|
||||
# The two variants of offset are needed - to handle both `+0200` and `+02:00` formats.
|
||||
Date_Time.parse normalized "yyyy-MM-dd HH:mm:ss.f[ ZZ][ ZZZZZ]"
|
||||
make_builder initial_size _ =
|
||||
java_builder = Java_Exports.make_date_time_builder initial_size
|
||||
Column_Fetcher_Module.make_builder_from_java_object_builder java_builder
|
||||
Column_Fetcher.Value fetch_value make_builder
|
||||
|
||||
## PRIVATE
|
||||
The actual SQL type that Snowflake uses for all integer types.
|
||||
integer_type = SQL_Type.Value Types.DECIMAL "NUMERIC" 38 0
|
||||
|
||||
## PRIVATE
|
||||
integer_value_type = Value_Type.Decimal 38 0
|
||||
|
||||
## PRIVATE
|
||||
float_value_type = Value_Type.Float Bits.Bits_64
|
||||
|
@ -1639,9 +1639,10 @@ type Column
|
||||
Value_Type.expect_type self .is_date_or_time "date/time" <|
|
||||
my_type = self.inferred_precise_value_type
|
||||
Value_Type.expect_type end (== my_type) my_type.to_display_text <|
|
||||
Date_Time_Helpers.check_period_aligned_with_value_type my_type period <|
|
||||
aligned_period = Date_Time_Helpers.align_period_with_value_type my_type period
|
||||
aligned_period.if_not_error <|
|
||||
new_name = naming_helper.function_name "date_diff" [self, end, period.to_display_text]
|
||||
java_unit = period.to_java_unit
|
||||
java_unit = aligned_period.to_java_unit
|
||||
fn = case my_type of
|
||||
Value_Type.Date_Time _ ->
|
||||
start-> end-> Time_Utils.unit_datetime_difference java_unit start end
|
||||
@ -1677,9 +1678,10 @@ type Column
|
||||
Value_Type.expect_type self .is_date_or_time "date/time" <|
|
||||
my_type = self.inferred_precise_value_type
|
||||
Value_Type.expect_integer amount <|
|
||||
Date_Time_Helpers.check_period_aligned_with_value_type my_type period <|
|
||||
aligned_period = Date_Time_Helpers.align_period_with_value_type my_type period
|
||||
aligned_period.if_not_error <|
|
||||
new_name = naming_helper.function_name "date_add" [self, amount, period.to_display_text]
|
||||
java_unit = period.to_java_unit
|
||||
java_unit = aligned_period.to_java_unit
|
||||
## Here we do not need a Time_Utils helper like in scalar
|
||||
implementations of `date_add`, because the date coming
|
||||
from the column will always be already converted into a
|
||||
@ -2286,6 +2288,12 @@ type Column
|
||||
storage_type = self.java_column.getStorage.inferPreciseType
|
||||
Storage.to_value_type storage_type
|
||||
|
||||
## PRIVATE
|
||||
Internal hook that says if a given column should be selected by a
|
||||
specific type in a `By_Type` selection.
|
||||
should_be_selected_by_type self (value_type : Value_Type) -> Boolean =
|
||||
self.value_type.is_same_type value_type
|
||||
|
||||
## PRIVATE
|
||||
Converts this column to JS_Object representation.
|
||||
|
||||
|
@ -468,6 +468,13 @@ type Invalid_Value_Type
|
||||
Invalid_Value_Type.Incomparable type_1 type_2 ->
|
||||
"Types "+type_1.to_display_text+" and "+type_2.to_display_text+" cannot be compared to each other."
|
||||
|
||||
## PRIVATE
|
||||
to_text self -> Text = case self of
|
||||
Invalid_Value_Type.Column expected actual related_column -> "Invalid_Value_Type.Column "+expected.to_text+" "+actual.to_text+" "+related_column.to_text
|
||||
Invalid_Value_Type.Value expected actual value -> "Invalid_Value_Type.Value "+expected.to_text+" "+actual.to_text+" "+value.to_text
|
||||
Invalid_Value_Type.Not_Ordered actual -> "Invalid_Value_Type.Not_Ordered "+actual.to_text
|
||||
Invalid_Value_Type.Incomparable type_1 type_2 -> "Invalid_Value_Type.Incomparable "+type_1.to_text+" "+type_2.to_text
|
||||
|
||||
## An error representing an invalid JSON format for conversion.
|
||||
type Invalid_JSON_Format
|
||||
## PRIVATE
|
||||
|
@ -9,12 +9,13 @@ import project.Column.Column
|
||||
import project.Value_Type.Value_Type
|
||||
|
||||
## PRIVATE
|
||||
check_period_aligned_with_value_type value_type period ~action = case value_type of
|
||||
align_period_with_value_type value_type (period : Date_Period | Time_Period) = case value_type of
|
||||
Value_Type.Date ->
|
||||
if period.is_a Date_Period then period else
|
||||
## We don't 'officially' allow `Time_Period` for Date, but since
|
||||
`Time_Period.Day` and `Date_Period.Day` in this context can be
|
||||
interchangeable, we allow it as an exception.
|
||||
if (period.is_a Date_Period) || (period == Time_Period.Day) then action else
|
||||
`Time_Period.Day` and `Date_Period.Day` in this context can be interchangeable,
|
||||
we allow it as an exception - we just swap it to be the right type.
|
||||
if period == Time_Period.Day then Date_Period.Day else
|
||||
Error.throw (Illegal_Argument.Error "`Time_Period` is not allowed for Date columns. Use `Date_Period`.")
|
||||
Value_Type.Time ->
|
||||
case period of
|
||||
@ -22,17 +23,17 @@ check_period_aligned_with_value_type value_type period ~action = case value_type
|
||||
Error.throw (Illegal_Argument.Error "`Date_Period` is not allowed for Time columns. Use `Time_Period`.")
|
||||
Time_Period.Day ->
|
||||
Error.throw (Illegal_Argument.Error "`Time_Period.Day` does not make sense for Time columns.")
|
||||
_ -> action
|
||||
_ -> period
|
||||
Value_Type.Date_Time _ ->
|
||||
## Both kinds are allowed for `Date_Time` columns.
|
||||
action
|
||||
## Both kinds are allowed for `Date_Time` columns, return them as-is.
|
||||
period
|
||||
|
||||
## PRIVATE
|
||||
Common logic for `Column.date_part`.
|
||||
make_date_part_function column period make_unary_op naming_helper =
|
||||
Value_Type.expect_type column .is_date_or_time "date/time" <|
|
||||
my_type = column.inferred_precise_value_type
|
||||
check_period_aligned_with_value_type my_type period <|
|
||||
align_period_with_value_type my_type period . if_not_error <|
|
||||
new_name = naming_helper.function_name "date_part" [column, period]
|
||||
result = case period of
|
||||
Date_Period.Year -> make_unary_op column "year"
|
||||
|
@ -192,7 +192,8 @@ type Table_Column_Helper
|
||||
ix : Integer -> if is_index_valid self.columns.length ix then [self.columns.at ix] else
|
||||
problem_builder.report_oob_indices [ix]
|
||||
[]
|
||||
_ : By_Type -> self.columns.filter column-> column.value_type.is_same_type selector.type
|
||||
_ : By_Type -> self.columns.filter column->
|
||||
column.should_be_selected_by_type selector.type
|
||||
_ ->
|
||||
matches = match_columns selector case_sensitivity self.columns
|
||||
if matches.is_empty then problem_builder.report_missing_input_columns [selector]
|
||||
|
@ -431,19 +431,19 @@ Any.should_be_a self typ =
|
||||
c : Meta.Constructor -> case Meta.meta self of
|
||||
a : Meta.Atom ->
|
||||
if a.constructor == c then Spec_Result.Success else
|
||||
expected_type = Meta.get_qualified_type_name typ
|
||||
expected_type = c.declaring_type.qualified_name
|
||||
actual_type = Meta.get_qualified_type_name self
|
||||
message = "Expected a value of type "+expected_type+", built with constructor "+c.name+", but got a value of type "+actual_type+", built with constructor "+a.constructor.name+" instead (at "+loc+")."
|
||||
Test.fail message
|
||||
_ ->
|
||||
expected_type = Meta.get_qualified_type_name typ
|
||||
expected_type = c.declaring_type.qualified_name
|
||||
actual_type = Meta.get_qualified_type_name self
|
||||
message = "Expected a value of type "+expected_type+", built with constructor "+c.name+", but got a value of type "+actual_type+" instead (at "+loc+")."
|
||||
Test.fail message
|
||||
_ : Meta.Type ->
|
||||
meta_type : Meta.Type ->
|
||||
ok = self.is_a typ || self==typ
|
||||
if ok then Spec_Result.Success else
|
||||
expected_type = Meta.get_qualified_type_name typ
|
||||
expected_type = meta_type.qualified_name
|
||||
actual_type = Meta.get_qualified_type_name self
|
||||
message = "Expected a value of type "+expected_type+" but got a value of type "+actual_type+" instead (at "+loc+")."
|
||||
Test.fail message
|
||||
|
@ -75,7 +75,7 @@ test_advanced_problem_handling action error_checker warnings_checker result_chec
|
||||
assume_no_problems result =
|
||||
loc = Meta.get_source_location 1
|
||||
if result.is_error then
|
||||
Test.fail "Expected the result to not be an error, but a dataflow error has been matched: "+result.catch.to_display_text+" (at "+loc+")."
|
||||
Test.fail "Expected the result to not be an error, but a dataflow error has been matched: "+result.catch.to_display_text+" (at "+loc+")." details=result.get_stack_trace_text
|
||||
warnings = get_attached_warnings result
|
||||
if warnings.not_empty then
|
||||
Test.fail "Expected the result to not contain any warnings, but it did: "+warnings.to_text+" (at "+loc+")."
|
||||
|
@ -104,7 +104,12 @@ type Suite
|
||||
succ_tests = all_results.filter (r-> r.is_success) . length
|
||||
failed_tests = all_results.filter (r-> r.is_fail)
|
||||
failed_tests_number = failed_tests.length
|
||||
failed_tests_names = failed_tests.map (t-> t.spec_name.replace ' ' '.') . distinct . take 10 . join "|"
|
||||
failed_tests_names = failed_tests.map .spec_name
|
||||
. distinct
|
||||
. take 10
|
||||
. map Regex.escape
|
||||
. map (t-> t.replace ' ' '.')
|
||||
. join "|"
|
||||
skipped_tests = all_results.filter (r-> r.is_pending) . length
|
||||
pending_groups = matching_specs.filter (p-> p.first.is_pending) . length
|
||||
case should_exit of
|
||||
|
@ -0,0 +1,39 @@
|
||||
package org.enso.snowflake;
|
||||
|
||||
import java.sql.PreparedStatement;
|
||||
import java.sql.SQLException;
|
||||
import java.time.LocalDate;
|
||||
import java.time.LocalDateTime;
|
||||
import java.time.LocalTime;
|
||||
import java.time.ZonedDateTime;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
|
||||
public class SnowflakeJDBCUtils {
|
||||
private static final DateTimeFormatter dateTimeWithOffsetFormatter =
|
||||
DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss.SSSSSSSSS XXX");
|
||||
|
||||
public static void setDateTime(
|
||||
PreparedStatement stmt, int columnIndex, ZonedDateTime dateTime, boolean keepOffset)
|
||||
throws SQLException {
|
||||
if (keepOffset) {
|
||||
String formatted = dateTime.format(dateTimeWithOffsetFormatter);
|
||||
stmt.setString(columnIndex, formatted);
|
||||
} else {
|
||||
LocalDateTime localDateTime = dateTime.toLocalDateTime();
|
||||
stmt.setString(columnIndex, localDateTime.toString());
|
||||
}
|
||||
}
|
||||
|
||||
public static void setTimeOfDay(PreparedStatement stmt, int columnIndex, LocalTime timeOfDay)
|
||||
throws SQLException {
|
||||
// We use setString instead of setTime, because setTime was losing milliseconds,
|
||||
// or with some tricks maybe could have milliseconds but not nanoseconds.
|
||||
// With setting as text we can keep the precision.
|
||||
stmt.setString(columnIndex, timeOfDay.toString());
|
||||
}
|
||||
|
||||
public static void setDate(PreparedStatement stmt, int columnIndex, LocalDate date)
|
||||
throws SQLException {
|
||||
stmt.setDate(columnIndex, java.sql.Date.valueOf(date));
|
||||
}
|
||||
}
|
@ -50,7 +50,7 @@ add_redshift_specific_specs suite_builder create_connection_fn =
|
||||
|
||||
group_builder.specify "should infer standard types correctly" <|
|
||||
data.t.at "strs" . value_type . is_text . should_be_true
|
||||
data.t.at "ints" . value_type . is_integer . should_be_true
|
||||
setup.expect_integer_type <| data.t.at "ints"
|
||||
data.t.at "bools" . value_type . is_boolean . should_be_true
|
||||
data.t.at "reals" . value_type . is_floating_point . should_be_true
|
||||
|
||||
|
@ -14,7 +14,7 @@ polyglot java import java.lang.Integer as Java_Integer
|
||||
Decimal.should_have_rep self rep = self.internal_representation . should_equal rep
|
||||
|
||||
add_specs suite_builder =
|
||||
suite_builder.group "construction" group_builder->
|
||||
suite_builder.group "(Decimal_Spec) construction" group_builder->
|
||||
group_builder.specify "should be able to construct a Decimal from a string" <|
|
||||
Decimal.new "123.45" . should_have_rep [12345, 5, 2]
|
||||
Decimal.from_string "123.45" . should_have_rep [12345, 5, 2]
|
||||
@ -128,7 +128,7 @@ add_specs suite_builder =
|
||||
Decimal.new 12000 (Math_Context.new 2) . should_equal 12000
|
||||
Decimal.new 12000 (Math_Context.new 3) . should_equal 12000
|
||||
|
||||
suite_builder.group "comparison" group_builder->
|
||||
suite_builder.group "(Decimal_Spec) comparison" group_builder->
|
||||
group_builder.specify "should compare correctly" <|
|
||||
nums = [["-45.23", "124.872"], [-45.23, 124.872], [-45, 124]]
|
||||
nums.map pr->
|
||||
@ -364,7 +364,7 @@ add_specs suite_builder =
|
||||
(large_fraction == large_int) . should_be_false
|
||||
(large_int == large_fraction ) . should_be_false
|
||||
|
||||
suite_builder.group "edge cases" group_builder->
|
||||
suite_builder.group "(Decimal_Spec) edge cases" group_builder->
|
||||
group_builder.specify "can support values outside the double range" <|
|
||||
d = Decimal.new Float.max_value
|
||||
(d == Float.max_value) . should_be_false
|
||||
@ -385,7 +385,7 @@ add_specs suite_builder =
|
||||
Comparable.hash_builtin ((d * d) + 0.1) . should_equal 2146435072
|
||||
Comparable.hash_builtin ((-(d * d)) + 0.1) . should_equal -1048576
|
||||
|
||||
suite_builder.group "arithmetic" group_builder->
|
||||
suite_builder.group "(Decimal_Spec) arithmetic" group_builder->
|
||||
group_builder.specify "should allow arithmetic with Decimals, without Math_Context" <|
|
||||
(Decimal.new 1 + Decimal.new 2) . should_equal (Decimal.new 3)
|
||||
(Decimal.new 1.1 + Decimal.new 2.2) . should_equal (Decimal.new 3.3)
|
||||
@ -518,7 +518,7 @@ add_specs suite_builder =
|
||||
nd2 = -d2
|
||||
nd2 . should_equal 5
|
||||
|
||||
suite_builder.group "conversions" group_builder->
|
||||
suite_builder.group "(Decimal_Spec) conversions" group_builder->
|
||||
group_builder.specify "should convert correctly to and from Integer" <|
|
||||
a = Decimal.new "12000"
|
||||
b = Decimal.new "12000" (Math_Context.new 2)
|
||||
@ -609,7 +609,7 @@ add_specs suite_builder =
|
||||
Problems.expect_only_warning Loss_Of_Numeric_Precision (huge_fraction . to_integer)
|
||||
Problems.not_expect_warning Loss_Of_Numeric_Precision (huge_int . to_integer)
|
||||
|
||||
suite_builder.group "remainder" group_builder->
|
||||
suite_builder.group "(Decimal_Spec) remainder" group_builder->
|
||||
group_builder.specify "should define remainder" <|
|
||||
cases = []
|
||||
+ [[5, 3, 2], [5.0, 3.0, 2.0], [3.5, 2, 1.5], [10.5, 1.0, 0.5], [3, 1, 0], [3.0, 1.0, 0]]
|
||||
@ -633,7 +633,7 @@ add_specs suite_builder =
|
||||
(Decimal.new 3 % 0) . should_fail_with Arithmetic_Error
|
||||
(Decimal.new 3 % Decimal.new 0) . should_fail_with Arithmetic_Error
|
||||
|
||||
suite_builder.group "div" group_builder->
|
||||
suite_builder.group "(Decimal_Spec) div" group_builder->
|
||||
group_builder.specify "should define div" <|
|
||||
Decimal.new "10" . div (Decimal.new "3") . should_equal 3
|
||||
Decimal.new "10.28" . div (Decimal.new "3.01") . should_equal 3
|
||||
@ -654,7 +654,7 @@ add_specs suite_builder =
|
||||
nt_error = Arithmetic_Error.Error "Non-terminating decimal expansion; no exact representable decimal result. Please use `.divide` with an explicit `Math_Context` to limit the numeric precision."
|
||||
((Decimal.new "1") / (Decimal.new "3")) . should_fail_with nt_error
|
||||
|
||||
suite_builder.group "pow" group_builder->
|
||||
suite_builder.group "(Decimal_Spec) pow" group_builder->
|
||||
group_builder.specify "should define pow" <|
|
||||
Decimal.new "10" . pow 3 . should_equal 1000
|
||||
|
||||
@ -682,7 +682,7 @@ add_specs suite_builder =
|
||||
Decimal.new "10" . pow -1 . should_fail_with Arithmetic_Error
|
||||
Decimal.new "10" . pow 99999999999 . should_fail_with Arithmetic_Error
|
||||
|
||||
suite_builder.group "text conversion" group_builder->
|
||||
suite_builder.group "(Decimal_Spec) text conversion" group_builder->
|
||||
group_builder.specify "should convert to text correctly" <|
|
||||
Decimal.new "34.56" . to_text . should_equal "34.56"
|
||||
Decimal.new "34.56" . to_display_text . should_equal "34.56"
|
||||
@ -727,7 +727,7 @@ add_specs suite_builder =
|
||||
|
||||
Decimal.parse "123,456,789.87654" . should_fail_with Number_Parse_Error
|
||||
|
||||
suite_builder.group "signs" group_builder->
|
||||
suite_builder.group "(Decimal_Spec) signs" group_builder->
|
||||
group_builder.specify "should calculate abs correctly" <|
|
||||
Decimal.new "12.345" . abs . should_equal 12.345
|
||||
Decimal.new "-12.345" . abs . should_equal 12.345
|
||||
@ -747,12 +747,12 @@ add_specs suite_builder =
|
||||
Decimal.new "-12.345E97" . signum . should_equal -1
|
||||
Decimal.new "0" . signum . should_equal 0
|
||||
|
||||
suite_builder.group "rounding" group_builder->
|
||||
suite_builder.group "(Decimal_Spec) Rounding" group_builder->
|
||||
do_round n dp=0 use_bankers=False =
|
||||
d = Decimal.new n . round dp use_bankers
|
||||
d.to_float
|
||||
|
||||
Round_Spec.add_specs group_builder do_round
|
||||
Round_Spec.add_specs group_builder (Round_Spec.Batch_Runner.from_function do_round) run_advanced=True
|
||||
|
||||
group_builder.specify "Large values" <|
|
||||
Decimal.new "1234.5678E-50" . round 53 . should_equal (Decimal.new "1234.568E-50")
|
||||
@ -780,7 +780,7 @@ add_specs suite_builder =
|
||||
Decimal.new "1234.5678E-50" . round 53 . should_equal (Decimal.new "1234.568E-50")
|
||||
Decimal.new "2.5" . round use_bankers=True . should_equal 2
|
||||
|
||||
suite_builder.group "floor, ceil, truncate" group_builder->
|
||||
suite_builder.group "(Decimal_Spec) floor, ceil, truncate" group_builder->
|
||||
group_builder.specify "should calculate floor correctly" <|
|
||||
Decimal.new "0" . floor . should_equal 0
|
||||
Decimal.new "1" . floor . should_equal 1
|
||||
@ -881,7 +881,7 @@ add_specs suite_builder =
|
||||
Decimal.new "12.34" . truncate . should_equal (Decimal.new "12")
|
||||
Decimal.new "-12.34" . truncate . should_equal (Decimal.new "-12")
|
||||
|
||||
suite_builder.group "min/max" group_builder->
|
||||
suite_builder.group "(Decimal_Spec) min/max" group_builder->
|
||||
group_builder.specify "should calculate min and max correctly" <|
|
||||
Decimal.new "12" . min (Decimal.new "13") . should_equal (Decimal.new "12")
|
||||
Decimal.new "12" . min (Decimal.new "11") . should_equal (Decimal.new "11")
|
||||
@ -960,7 +960,7 @@ add_specs suite_builder =
|
||||
Math.min (Decimal.new "12E70") (Decimal.new "13E70") . should_equal (Decimal.new "12E70")
|
||||
Math.max (Decimal.new "12E70") (Decimal.new "13E70") . should_equal (Decimal.new "13E70")
|
||||
|
||||
suite_builder.group "Integer/Float .to_decimal" group_builder->
|
||||
suite_builder.group "(Decimal_Spec) Integer/Float .to_decimal" group_builder->
|
||||
group_builder.specify ".to_decimal should convert to Decimal" <|
|
||||
12 . to_decimal . should_be_a Decimal
|
||||
12.3 . to_decimal . should_be_a Decimal
|
||||
@ -972,7 +972,7 @@ add_specs suite_builder =
|
||||
12 . to_decimal . should_equal (Decimal.new "12")
|
||||
12.3 . to_decimal . should_equal (Decimal.new "12.3")
|
||||
|
||||
suite_builder.group "BigDecimal internal representation methods" group_builder->
|
||||
suite_builder.group "(Decimal_Spec) BigDecimal internal representation methods" group_builder->
|
||||
group_builder.specify "internal accessors should work correctly" <|
|
||||
d = Decimal.new "123.456"
|
||||
d.precision . should_equal 6
|
||||
|
@ -604,7 +604,7 @@ add_specs suite_builder =
|
||||
((99999999999998 * 1000).div 1000) . round . should_equal 99999999999998
|
||||
|
||||
suite_builder.group "Rounding" group_builder->
|
||||
Round_Spec.add_specs group_builder (.round)
|
||||
Round_Spec.add_specs group_builder (Round_Spec.Batch_Runner.from_function .round) run_advanced=True
|
||||
|
||||
group_builder.specify "Decimal places out of range" <|
|
||||
3.1 . round 16 . should_fail_with Illegal_Argument
|
||||
|
@ -6,9 +6,45 @@ from Standard.Test import all
|
||||
|
||||
polyglot java import java.math.BigInteger
|
||||
|
||||
type Batch_Runner
|
||||
Value (run_batch : Vector -> Vector -> Boolean -> Vector) (run_one : Number -> Integer -> Boolean -> Number)
|
||||
|
||||
from_function (f : Number -> Integer -> Boolean -> Number) =
|
||||
batch values_vec dps_vec use_bankers =
|
||||
values_vec.zip dps_vec v-> dp-> f v dp use_bankers
|
||||
Batch_Runner.Value batch f
|
||||
|
||||
run self (use_bankers : Boolean) (action : Batch_Builder -> Nothing) =
|
||||
configs = Vector.build vector_builder->
|
||||
action (Batch_Builder.Value vector_builder . round)
|
||||
values_vec = configs.map c-> c.at 0
|
||||
dps_vec = configs.map c-> c.at 1
|
||||
expected_vec = configs.map c-> c.at 2
|
||||
got_vec = self.run_batch values_vec dps_vec use_bankers
|
||||
got_vec.each_with_index ix-> got->
|
||||
expected = expected_vec.at ix
|
||||
value = values_vec.at ix
|
||||
dp = dps_vec.at ix
|
||||
Test.with_clue "round("+value.to_text+", "+dp.to_text+", use_bankers="+use_bankers.to_text+") -> " <|
|
||||
got.should_equal expected
|
||||
|
||||
type Batch_Builder
|
||||
Value vector_builder
|
||||
|
||||
round self (value : Number) (dp : Integer = 0) =
|
||||
Check_Instance.Value self.vector_builder value dp
|
||||
|
||||
type Check_Instance
|
||||
Value vector_builder value dp
|
||||
|
||||
should_equal self (expected : Number) =
|
||||
self.vector_builder.append [self.value, self.dp, expected]
|
||||
|
||||
## PRIVATE
|
||||
add_specs group_builder round_fun =
|
||||
add_specs group_builder batch_runner run_advanced =
|
||||
pending_advanced = if run_advanced.not then "Advanced edge tests are disabled in this run."
|
||||
group_builder.specify "Can round positive decimals correctly" <|
|
||||
batch_runner.run use_bankers=False round_fun->
|
||||
round_fun 3.0 . should_equal 3
|
||||
round_fun 3.00001 . should_equal 3
|
||||
round_fun 3.3 . should_equal 3
|
||||
@ -18,6 +54,7 @@ add_specs group_builder round_fun =
|
||||
round_fun 3.99999 . should_equal 4
|
||||
|
||||
group_builder.specify "Can round negative decimals correctly" <|
|
||||
batch_runner.run use_bankers=False round_fun->
|
||||
round_fun -3.0 . should_equal -3
|
||||
round_fun -3.00001 . should_equal -3
|
||||
round_fun -3.3 . should_equal -3
|
||||
@ -27,17 +64,20 @@ add_specs group_builder round_fun =
|
||||
round_fun -3.99999 . should_equal -4
|
||||
|
||||
group_builder.specify "Explicit and implicit 0 decimal places work the same" <|
|
||||
batch_runner.run use_bankers=False round_fun->
|
||||
round_fun 3.00001 0 . should_equal 3
|
||||
round_fun 3.3 0 . should_equal 3
|
||||
round_fun 3.00001 . should_equal 3
|
||||
round_fun 3.3 . should_equal 3
|
||||
|
||||
group_builder.specify "Can round zero and small decimals correctly" <|
|
||||
batch_runner.run use_bankers=False round_fun->
|
||||
round_fun 0.0 . should_equal 0
|
||||
round_fun 0.00001 . should_equal 0
|
||||
round_fun -0.00001 . should_equal 0
|
||||
|
||||
group_builder.specify "Can round positive decimals to a specified number of decimal places" <|
|
||||
batch_runner.run use_bankers=False round_fun->
|
||||
round_fun 3.0001 2 . should_equal 3.0
|
||||
round_fun 3.1414 2 . should_equal 3.14
|
||||
round_fun 3.1415 2 . should_equal 3.14
|
||||
@ -51,6 +91,7 @@ add_specs group_builder round_fun =
|
||||
round_fun 3.9999 3 . should_equal 4.0
|
||||
|
||||
group_builder.specify "Can round negative decimals to a specified number of decimal places" <|
|
||||
batch_runner.run use_bankers=False round_fun->
|
||||
round_fun -3.0001 2 . should_equal -3.0
|
||||
round_fun -3.1414 2 . should_equal -3.14
|
||||
round_fun -3.1415 2 . should_equal -3.14
|
||||
@ -64,6 +105,7 @@ add_specs group_builder round_fun =
|
||||
round_fun -3.9999 3 . should_equal -4.0
|
||||
|
||||
group_builder.specify "Can round positive decimals to a specified negative number of decimal places" <|
|
||||
batch_runner.run use_bankers=False round_fun->
|
||||
round_fun 1234.0 -1 . should_equal 1230
|
||||
round_fun 1234.0 -2 . should_equal 1200
|
||||
round_fun 1234.0 -3 . should_equal 1000
|
||||
@ -79,6 +121,7 @@ add_specs group_builder round_fun =
|
||||
round_fun 1494.0 -2 . should_equal 1500
|
||||
|
||||
group_builder.specify "Can round negative decimals to a specified negative number of decimal places" <|
|
||||
batch_runner.run use_bankers=False round_fun->
|
||||
round_fun -1234.0 -1 . should_equal -1230
|
||||
round_fun -1234.0 -2 . should_equal -1200
|
||||
round_fun -1234.0 -3 . should_equal -1000
|
||||
@ -93,34 +136,37 @@ add_specs group_builder round_fun =
|
||||
round_fun -1495.0 -2 . should_equal -1500
|
||||
round_fun -1494.0 -2 . should_equal -1500
|
||||
|
||||
group_builder.specify "Banker's rounding handles half-way values correctly" <|
|
||||
round_fun -3.5 use_bankers=True . should_equal -4
|
||||
round_fun -2.5 use_bankers=True . should_equal -2
|
||||
round_fun -1.5 use_bankers=True . should_equal -2
|
||||
round_fun -0.5 use_bankers=True . should_equal 0
|
||||
round_fun 0.5 use_bankers=True . should_equal 0
|
||||
round_fun 1.5 use_bankers=True . should_equal 2
|
||||
round_fun 2.5 use_bankers=True . should_equal 2
|
||||
round_fun 3.5 use_bankers=True . should_equal 4
|
||||
group_builder.specify "Banker's rounding handles half-way values correctly" pending=pending_advanced <|
|
||||
batch_runner.run use_bankers=True round_fun->
|
||||
round_fun -3.5 . should_equal -4
|
||||
round_fun -2.5 . should_equal -2
|
||||
round_fun -1.5 . should_equal -2
|
||||
round_fun -0.5 . should_equal 0
|
||||
round_fun 0.5 . should_equal 0
|
||||
round_fun 1.5 . should_equal 2
|
||||
round_fun 2.5 . should_equal 2
|
||||
round_fun 3.5 . should_equal 4
|
||||
|
||||
round_fun 0.235 2 use_bankers=True . should_equal 0.24
|
||||
round_fun 0.225 2 use_bankers=True . should_equal 0.22
|
||||
round_fun -0.235 2 use_bankers=True . should_equal -0.24
|
||||
round_fun -0.225 2 use_bankers=True . should_equal -0.22
|
||||
round_fun 0.235 2 . should_equal 0.24
|
||||
round_fun 0.225 2 . should_equal 0.22
|
||||
round_fun -0.235 2 . should_equal -0.24
|
||||
round_fun -0.225 2 . should_equal -0.22
|
||||
|
||||
round_fun 12350.0 -2 use_bankers=True . should_equal 12400
|
||||
round_fun 12250.0 -2 use_bankers=True . should_equal 12200
|
||||
round_fun -12350.0 -2 use_bankers=True . should_equal -12400
|
||||
round_fun -12250.0 -2 use_bankers=True . should_equal -12200
|
||||
round_fun 12350.0 -2 . should_equal 12400
|
||||
round_fun 12250.0 -2 . should_equal 12200
|
||||
round_fun -12350.0 -2 . should_equal -12400
|
||||
round_fun -12250.0 -2 . should_equal -12200
|
||||
|
||||
group_builder.specify "Banker's rounding handles non-half-way values just like normal rounding" <|
|
||||
round_fun 3.0 use_bankers=True . should_equal 3
|
||||
round_fun 3.00001 use_bankers=True . should_equal 3
|
||||
round_fun 3.3 use_bankers=True . should_equal 3
|
||||
round_fun 3.49999 use_bankers=True . should_equal 3
|
||||
round_fun 3.50001 use_bankers=True . should_equal 4
|
||||
round_fun 3.99999 use_bankers=True . should_equal 4
|
||||
group_builder.specify "Banker's rounding handles non-half-way values just like normal rounding" pending=pending_advanced <|
|
||||
batch_runner.run use_bankers=True round_fun->
|
||||
round_fun 3.0 . should_equal 3
|
||||
round_fun 3.00001 . should_equal 3
|
||||
round_fun 3.3 . should_equal 3
|
||||
round_fun 3.49999 . should_equal 3
|
||||
round_fun 3.50001 . should_equal 4
|
||||
round_fun 3.99999 . should_equal 4
|
||||
|
||||
batch_runner.run use_bankers=False round_fun->
|
||||
round_fun -3.0 . should_equal -3
|
||||
round_fun -3.00001 . should_equal -3
|
||||
round_fun -3.3 . should_equal -3
|
||||
@ -128,7 +174,8 @@ add_specs group_builder round_fun =
|
||||
round_fun -3.50001 . should_equal -4
|
||||
round_fun -3.99999 . should_equal -4
|
||||
|
||||
group_builder.specify "Can round correctly near the precision limit" <|
|
||||
group_builder.specify "Can round correctly near the precision limit" pending=pending_advanced <|
|
||||
batch_runner.run use_bankers=False round_fun->
|
||||
round_fun 1.22222222225 10 . should_equal 1.2222222223
|
||||
round_fun 1.222222222225 11 . should_equal 1.22222222223
|
||||
round_fun 1.2222222222225 12 . should_equal 1.222222222223
|
||||
@ -153,36 +200,38 @@ add_specs group_builder round_fun =
|
||||
round_fun -1.22222222222235 13 . should_equal -1.2222222222224
|
||||
round_fun -1.222222222222235 14 . should_equal -1.22222222222224
|
||||
|
||||
group_builder.specify "Can round correctly near the precision limit, using banker's rounding" <|
|
||||
round_fun 1.22222222225 10 use_bankers=True . should_equal 1.2222222222
|
||||
round_fun 1.222222222225 11 use_bankers=True . should_equal 1.22222222222
|
||||
round_fun 1.2222222222225 12 use_bankers=True . should_equal 1.222222222222
|
||||
round_fun 1.22222222222225 13 use_bankers=True . should_equal 1.2222222222222
|
||||
round_fun 1.222222222222225 14 use_bankers=True . should_equal 1.22222222222222
|
||||
group_builder.specify "Can round correctly near the precision limit, using banker's rounding" pending=pending_advanced <|
|
||||
batch_runner.run use_bankers=True round_fun->
|
||||
round_fun 1.22222222225 10 . should_equal 1.2222222222
|
||||
round_fun 1.222222222225 11 . should_equal 1.22222222222
|
||||
round_fun 1.2222222222225 12 . should_equal 1.222222222222
|
||||
round_fun 1.22222222222225 13 . should_equal 1.2222222222222
|
||||
round_fun 1.222222222222225 14 . should_equal 1.22222222222222
|
||||
|
||||
round_fun -1.22222222225 10 use_bankers=True . should_equal -1.2222222222
|
||||
round_fun -1.222222222225 11 use_bankers=True . should_equal -1.22222222222
|
||||
round_fun -1.2222222222225 12 use_bankers=True . should_equal -1.222222222222
|
||||
round_fun -1.22222222222225 13 use_bankers=True . should_equal -1.2222222222222
|
||||
round_fun -1.222222222222225 14 use_bankers=True . should_equal -1.22222222222222
|
||||
round_fun -1.22222222225 10 . should_equal -1.2222222222
|
||||
round_fun -1.222222222225 11 . should_equal -1.22222222222
|
||||
round_fun -1.2222222222225 12 . should_equal -1.222222222222
|
||||
round_fun -1.22222222222225 13 . should_equal -1.2222222222222
|
||||
round_fun -1.222222222222225 14 . should_equal -1.22222222222222
|
||||
|
||||
round_fun 1.22222222235 10 use_bankers=True . should_equal 1.2222222224
|
||||
round_fun 1.222222222235 11 use_bankers=True . should_equal 1.22222222224
|
||||
round_fun 1.2222222222235 12 use_bankers=True . should_equal 1.222222222224
|
||||
round_fun 1.22222222222235 13 use_bankers=True . should_equal 1.2222222222224
|
||||
round_fun 1.222222222222235 14 use_bankers=True . should_equal 1.22222222222224
|
||||
round_fun 1.22222222235 10 . should_equal 1.2222222224
|
||||
round_fun 1.222222222235 11 . should_equal 1.22222222224
|
||||
round_fun 1.2222222222235 12 . should_equal 1.222222222224
|
||||
round_fun 1.22222222222235 13 . should_equal 1.2222222222224
|
||||
round_fun 1.222222222222235 14 . should_equal 1.22222222222224
|
||||
|
||||
round_fun -1.22222222235 10 use_bankers=True . should_equal -1.2222222224
|
||||
round_fun -1.222222222235 11 use_bankers=True . should_equal -1.22222222224
|
||||
round_fun -1.2222222222235 12 use_bankers=True . should_equal -1.222222222224
|
||||
round_fun -1.22222222222235 13 use_bankers=True . should_equal -1.2222222222224
|
||||
round_fun -1.222222222222235 14 use_bankers=True . should_equal -1.22222222222224
|
||||
round_fun -1.22222222235 10 . should_equal -1.2222222224
|
||||
round_fun -1.222222222235 11 . should_equal -1.22222222224
|
||||
round_fun -1.2222222222235 12 . should_equal -1.222222222224
|
||||
round_fun -1.22222222222235 13 . should_equal -1.2222222222224
|
||||
round_fun -1.222222222222235 14 . should_equal -1.22222222222224
|
||||
|
||||
group_builder.specify "Floating point imperfect representation counter-examples" pending=(if group_builder.name.contains "Snowflake" then "TODO: https://github.com/enso-org/enso/issues/10307") <|
|
||||
round_fun 1.225 2 use_bankers=True . should_equal 1.22 # Actual result 1.23
|
||||
round_fun 37.785 2 . should_equal 37.79
|
||||
batch_runner.run_one 1.225 2 use_bankers=True . should_equal 1.22 # Actual result 1.23
|
||||
batch_runner.run_one 37.785 2 . should_equal 37.79
|
||||
|
||||
group_builder.specify "Can round small integers to a specified number of decimal places correctly (value is unchanged)"
|
||||
group_builder.specify "Can round small integers to a specified number of decimal places correctly (value is unchanged)" <|
|
||||
batch_runner.run use_bankers=False round_fun->
|
||||
round_fun 0 . should_equal 0
|
||||
round_fun 3 . should_equal 3
|
||||
round_fun -3 . should_equal -3
|
||||
@ -191,7 +240,8 @@ add_specs group_builder round_fun =
|
||||
round_fun 3 1 . should_equal 3
|
||||
round_fun -3 1 . should_equal -3
|
||||
|
||||
group_builder.specify "Can round integers to a specified number of negative places correctly"
|
||||
group_builder.specify "Can round integers to a specified number of negative places correctly" <|
|
||||
batch_runner.run use_bankers=False round_fun->
|
||||
round_fun 0 -1 . should_equal 0
|
||||
round_fun 4 -1 . should_equal 0
|
||||
round_fun 5 -1 . should_equal 10
|
||||
@ -219,7 +269,8 @@ add_specs group_builder round_fun =
|
||||
round_fun 3098 -3 . should_equal 3000
|
||||
round_fun 3101 -3 . should_equal 3000
|
||||
|
||||
group_builder.specify "Can round negative integers to a specified number of negative places correctly"
|
||||
group_builder.specify "Can round negative integers to a specified number of negative places correctly" pending=pending_advanced <|
|
||||
batch_runner.run use_bankers=False round_fun->
|
||||
round_fun -4 -1 . should_equal 0
|
||||
round_fun -5 -1 . should_equal -10
|
||||
round_fun -6 -1 . should_equal -10
|
||||
@ -246,36 +297,37 @@ add_specs group_builder round_fun =
|
||||
round_fun -3098 -3 . should_equal -3000
|
||||
round_fun -3101 -3 . should_equal -3000
|
||||
|
||||
group_builder.specify "Can round negative integers to a specified number of negative places with banker's rounding correctly" <|
|
||||
round_fun 12300 -2 use_bankers=True . should_equal 12300
|
||||
round_fun 12301 -2 use_bankers=True . should_equal 12300
|
||||
round_fun 12330 -2 use_bankers=True . should_equal 12300
|
||||
round_fun 12349 -2 use_bankers=True . should_equal 12300
|
||||
round_fun 12350 -2 use_bankers=True . should_equal 12400
|
||||
round_fun 12351 -2 use_bankers=True . should_equal 12400
|
||||
round_fun 12370 -2 use_bankers=True . should_equal 12400
|
||||
round_fun 12430 -2 use_bankers=True . should_equal 12400
|
||||
round_fun 12470 -2 use_bankers=True . should_equal 12500
|
||||
group_builder.specify "Can round negative integers to a specified number of negative places with banker's rounding correctly" pending=pending_advanced <|
|
||||
batch_runner.run use_bankers=True round_fun->
|
||||
round_fun 12300 -2 . should_equal 12300
|
||||
round_fun 12301 -2 . should_equal 12300
|
||||
round_fun 12330 -2 . should_equal 12300
|
||||
round_fun 12349 -2 . should_equal 12300
|
||||
round_fun 12350 -2 . should_equal 12400
|
||||
round_fun 12351 -2 . should_equal 12400
|
||||
round_fun 12370 -2 . should_equal 12400
|
||||
round_fun 12430 -2 . should_equal 12400
|
||||
round_fun 12470 -2 . should_equal 12500
|
||||
|
||||
round_fun 12249 -2 use_bankers=True . should_equal 12200
|
||||
round_fun 12250 -2 use_bankers=True . should_equal 12200
|
||||
round_fun 12251 -2 use_bankers=True . should_equal 12300
|
||||
round_fun 12249 -2 . should_equal 12200
|
||||
round_fun 12250 -2 . should_equal 12200
|
||||
round_fun 12251 -2 . should_equal 12300
|
||||
|
||||
round_fun -12300 -2 use_bankers=True . should_equal -12300
|
||||
round_fun -12301 -2 use_bankers=True . should_equal -12300
|
||||
round_fun -12330 -2 use_bankers=True . should_equal -12300
|
||||
round_fun -12349 -2 use_bankers=True . should_equal -12300
|
||||
round_fun -12350 -2 use_bankers=True . should_equal -12400
|
||||
round_fun -12351 -2 use_bankers=True . should_equal -12400
|
||||
round_fun -12370 -2 use_bankers=True . should_equal -12400
|
||||
round_fun -12430 -2 use_bankers=True . should_equal -12400
|
||||
round_fun -12470 -2 use_bankers=True . should_equal -12500
|
||||
round_fun -12300 -2 . should_equal -12300
|
||||
round_fun -12301 -2 . should_equal -12300
|
||||
round_fun -12330 -2 . should_equal -12300
|
||||
round_fun -12349 -2 . should_equal -12300
|
||||
round_fun -12350 -2 . should_equal -12400
|
||||
round_fun -12351 -2 . should_equal -12400
|
||||
round_fun -12370 -2 . should_equal -12400
|
||||
round_fun -12430 -2 . should_equal -12400
|
||||
round_fun -12470 -2 . should_equal -12500
|
||||
|
||||
round_fun -12249 -2 use_bankers=True . should_equal -12200
|
||||
round_fun -12250 -2 use_bankers=True . should_equal -12200
|
||||
round_fun -12251 -2 use_bankers=True . should_equal -12300
|
||||
round_fun -12249 -2 . should_equal -12200
|
||||
round_fun -12250 -2 . should_equal -12200
|
||||
round_fun -12251 -2 . should_equal -12300
|
||||
|
||||
group_builder.specify "Handles incorrect argument types" <|
|
||||
Test.expect_panic_with (round_fun 123 "two") Type_Error
|
||||
Test.expect_panic_with (round_fun 123 use_bankers="no") Type_Error
|
||||
Test.expect_panic_with (round_fun 123 use_bankers=0) Type_Error
|
||||
Test.expect_panic_with (batch_runner.run_one 123 "two") Type_Error
|
||||
Test.expect_panic_with (batch_runner.run_one 123 use_bankers="no") Type_Error
|
||||
Test.expect_panic_with (batch_runner.run_one 123 use_bankers=0) Type_Error
|
||||
|
@ -181,35 +181,35 @@ snowflake_specific_spec suite_builder default_connection db_name setup =
|
||||
i = data.t.column_info
|
||||
i.at "Column" . to_vector . should_equal ["strs", "ints", "bools", "doubles"]
|
||||
i.at "Items Count" . to_vector . should_equal [3, 1, 2, 3]
|
||||
i.at "Value Type" . to_vector . should_equal [Value_Type.Char, Value_Type.Integer, Value_Type.Boolean, Value_Type.Float]
|
||||
# The integer column is treated as NUMBER(38, 0) in Snowflake so the value type reflects that:
|
||||
i.at "Value Type" . to_vector . should_equal [Value_Type.Char, Value_Type.Decimal 38 0, Value_Type.Boolean, Value_Type.Float]
|
||||
|
||||
group_builder.specify "should return Table information, also for aggregated results" <|
|
||||
i = data.t.aggregate columns=[Aggregate_Column.Concatenate "strs", Aggregate_Column.Sum "ints", Aggregate_Column.Count_Distinct "bools"] . column_info
|
||||
i.at "Column" . to_vector . should_equal ["Concatenate strs", "Sum ints", "Count Distinct bools"]
|
||||
i.at "Items Count" . to_vector . should_equal [1, 1, 1]
|
||||
i.at "Value Type" . to_vector . should_equal [Value_Type.Char, Value_Type.Integer, Value_Type.Integer]
|
||||
i.at "Value Type" . to_vector . should_equal [Value_Type.Char, Value_Type.Decimal 38 0, Value_Type.Decimal 38 0]
|
||||
|
||||
group_builder.specify "should infer standard types correctly" <|
|
||||
data.t.at "strs" . value_type . is_text . should_be_true
|
||||
data.t.at "ints" . value_type . is_integer . should_be_true
|
||||
setup.expect_integer_type <| data.t.at "ints"
|
||||
data.t.at "bools" . value_type . is_boolean . should_be_true
|
||||
data.t.at "doubles" . value_type . is_floating_point . should_be_true
|
||||
|
||||
group_builder.specify "should preserve Snowflake types when table is materialized, where possible" pending="TODO" <|
|
||||
name = Name_Generator.random_name "types-test"
|
||||
Problems.assume_no_problems <|
|
||||
data.connection.execute_update 'CREATE TEMPORARY TABLE "'+name+'" ("int4" int4, "int2" int2, "txt-limited" varchar(10), "txt-fixed" char(3))'
|
||||
t1 = data.connection.query (SQL_Query.Table_Name name)
|
||||
t1.at "int4" . value_type . should_equal (Value_Type.Integer Bits.Bits_32)
|
||||
t1.at "int2" . value_type . should_equal (Value_Type.Integer Bits.Bits_16)
|
||||
t1.at "txt-limited" . value_type . should_equal (Value_Type.Char size=10 variable_length=True)
|
||||
t1.at "txt-fixed" . value_type . should_equal (Value_Type.Char size=3 variable_length=False)
|
||||
group_builder.specify "will report true integer types but infer smartly when materialized (small numbers become Integer in-memory, not Decimal)" <|
|
||||
t1 = table_builder [["small_ints", [1, 2, 3]], ["big_ints", [2^100, 2^110, 1]]] . sort "small_ints"
|
||||
|
||||
# Integer types are NUMBER(38, 0) in Snowflake so they are all mapped to decimal
|
||||
t1.at "small_ints" . value_type . should_equal (Value_Type.Decimal 38 0)
|
||||
t1.at "big_ints" . value_type . should_equal (Value_Type.Decimal 38 0)
|
||||
|
||||
in_memory = t1.read
|
||||
in_memory.at "int4" . value_type . should_equal (Value_Type.Integer Bits.Bits_32)
|
||||
in_memory.at "int2" . value_type . should_equal (Value_Type.Integer Bits.Bits_16)
|
||||
in_memory.at "txt-limited" . value_type . should_equal (Value_Type.Char size=10 variable_length=True)
|
||||
in_memory.at "txt-fixed" . value_type . should_equal (Value_Type.Char size=3 variable_length=False)
|
||||
in_memory.at "small_ints" . value_type . should_equal (Value_Type.Integer Bits.Bits_64)
|
||||
in_memory.at "big_ints" . value_type . should_equal (Value_Type.Decimal 38 0)
|
||||
|
||||
# Check correctness of values
|
||||
in_memory.at "small_ints" . to_vector . should_equal [1, 2, 3]
|
||||
in_memory.at "big_ints" . to_vector . should_equal [2^100, 2^110, 1]
|
||||
|
||||
suite_builder.group "[Snowflake] Dialect-specific codegen" group_builder->
|
||||
data = Snowflake_Info_Data.setup default_connection
|
||||
@ -239,15 +239,15 @@ snowflake_specific_spec suite_builder default_connection db_name setup =
|
||||
r = data.t.aggregate columns=[Aggregate_Column.Count, Aggregate_Column.Count_Empty "txt", Aggregate_Column.Count_Not_Empty "txt", Aggregate_Column.Count_Distinct "i1", Aggregate_Column.Count_Not_Nothing "i2", Aggregate_Column.Count_Nothing "i3"]
|
||||
r.column_count . should_equal 6
|
||||
r.columns.each column->
|
||||
column.value_type . should_equal Value_Type.Integer
|
||||
column.value_type . should_equal (Value_Type.Decimal 18 0)
|
||||
|
||||
group_builder.specify "Sum" <|
|
||||
r = data.t.aggregate columns=[Aggregate_Column.Sum "i1", Aggregate_Column.Sum "i2", Aggregate_Column.Sum "i3", Aggregate_Column.Sum "i4", Aggregate_Column.Sum "r1", Aggregate_Column.Sum "r2"]
|
||||
r.columns.at 0 . value_type . should_equal Value_Type.Integer
|
||||
r.columns.at 1 . value_type . should_equal Value_Type.Integer
|
||||
# TODO are these types right??
|
||||
r.columns.at 2 . value_type . should_equal Value_Type.Integer
|
||||
r.columns.at 3 . value_type . should_equal Value_Type.Integer
|
||||
r.columns.at 0 . value_type . should_equal (Value_Type.Decimal 38 0)
|
||||
r.columns.at 1 . value_type . should_equal (Value_Type.Decimal 38 0)
|
||||
|
||||
r.columns.at 2 . value_type . should_equal (Value_Type.Decimal 38 0)
|
||||
r.columns.at 3 . value_type . should_equal (Value_Type.Decimal 38 0)
|
||||
r.columns.at 4 . value_type . should_equal (Value_Type.Float Bits.Bits_64)
|
||||
r.columns.at 5 . value_type . should_equal (Value_Type.Float Bits.Bits_64)
|
||||
|
||||
@ -567,8 +567,8 @@ add_snowflake_specs suite_builder create_connection_fn db_name =
|
||||
|
||||
Common_Spec.add_specs suite_builder prefix create_connection_fn
|
||||
|
||||
common_selection = Common_Table_Operations.Main.Test_Selection.Config supports_case_sensitive_columns=True order_by_unicode_normalization_by_default=True allows_mixed_type_comparisons=False fixed_length_text_columns=True removes_trailing_whitespace_casting_from_char_to_varchar=True supports_decimal_type=True supported_replace_params=supported_replace_params run_advanced_edge_case_tests_by_default=False
|
||||
aggregate_selection = Common_Table_Operations.Aggregate_Spec.Test_Selection.Config first_last_row_order=False aggregation_problems=False
|
||||
common_selection = Common_Table_Operations.Main.Test_Selection.Config supports_case_sensitive_columns=True order_by_unicode_normalization_by_default=True allows_mixed_type_comparisons=False fixed_length_text_columns=False different_size_integer_types=False removes_trailing_whitespace_casting_from_char_to_varchar=True supports_decimal_type=True supported_replace_params=supported_replace_params run_advanced_edge_case_tests_by_default=False supports_date_time_without_timezone=True supports_nanoseconds_in_time=True
|
||||
aggregate_selection = Common_Table_Operations.Aggregate_Spec.Test_Selection.Config first_last=False first_last_row_order=False aggregation_problems=False text_concat=False
|
||||
agg_in_memory_table = ((Project_Description.new enso_dev.Table_Tests).data / "data.csv") . read
|
||||
|
||||
agg_table_fn = _->
|
||||
@ -577,10 +577,17 @@ add_snowflake_specs suite_builder create_connection_fn db_name =
|
||||
empty_agg_table_fn = _->
|
||||
(agg_in_memory_table.take (..First 0)).select_into_database_table default_connection.get (Name_Generator.random_name "Agg_Empty") primary_key=Nothing temporary=True
|
||||
|
||||
setup = Common_Table_Operations.Main.Test_Setup.Config prefix agg_table_fn empty_agg_table_fn table_builder materialize is_database=True test_selection=common_selection aggregate_test_selection=aggregate_selection create_connection_func=create_connection_fn light_table_builder=light_table_builder
|
||||
setup = Common_Table_Operations.Main.Test_Setup.Config prefix agg_table_fn empty_agg_table_fn table_builder materialize is_database=True test_selection=common_selection aggregate_test_selection=aggregate_selection create_connection_func=create_connection_fn light_table_builder=light_table_builder is_integer_type=is_snowflake_integer
|
||||
|
||||
snowflake_specific_spec suite_builder default_connection db_name setup
|
||||
Common_Table_Operations.Main.add_specs suite_builder setup
|
||||
Upload_Spec.add_specs suite_builder setup create_connection_fn
|
||||
|
||||
## PRIVATE
|
||||
is_snowflake_integer value_type = case value_type of
|
||||
Value_Type.Integer _ -> True
|
||||
Value_Type.Decimal _ scale -> scale == 0
|
||||
_ -> False
|
||||
|
||||
## PRIVATE
|
||||
supported_replace_params : Hashset Replace_Params
|
||||
@ -601,7 +608,6 @@ add_table_specs suite_builder =
|
||||
db_name = get_configured_connection_details.database
|
||||
add_snowflake_specs suite_builder connection_builder db_name
|
||||
Transaction_Spec.add_specs suite_builder connection_builder "[Snowflake] "
|
||||
Upload_Spec.add_specs suite_builder connection_builder "[Snowflake] "
|
||||
|
||||
suite_builder.group "[Snowflake] Secrets in connection settings" group_builder->
|
||||
cloud_setup = Cloud_Tests_Setup.prepare
|
||||
@ -616,12 +622,19 @@ add_table_specs suite_builder =
|
||||
Panic.with_finalizer connection.close <|
|
||||
connection.tables . should_be_a Table
|
||||
|
||||
with_secret name value callback =
|
||||
with_secret name value callback = case value of
|
||||
# If it is already a secret, we pass it as-is.
|
||||
_ : Enso_Secret -> callback value
|
||||
# Otherwise we create the secret, and clean it up afterwards.
|
||||
_ : Text ->
|
||||
secret = Enso_Secret.create name+Random.uuid value
|
||||
secret.should_succeed
|
||||
Panic.with_finalizer secret.delete (callback secret)
|
||||
|
||||
get_configured_connection_details =
|
||||
## We rethrow any dataflow errors to ensure that they are reported.
|
||||
Without it, a dataflow error could make some tests just not be registered and
|
||||
not run, without displaying any failures.
|
||||
get_configured_connection_details = Panic.rethrow <|
|
||||
account_name = Environment.get "ENSO_SNOWFLAKE_ACCOUNT"
|
||||
if account_name.is_nothing then Nothing else
|
||||
get_var name =
|
||||
|
@ -133,7 +133,7 @@ add_specs suite_builder setup =
|
||||
rows = t1.rows.to_vector . map .to_vector
|
||||
rows . should_equal [['a', 1], ['b', 2], ['a', 3], ['a', 4], ['c', 5]]
|
||||
t1.at "Row" . to_vector . should_equal [1, 2, 3, 4, 5]
|
||||
t1.at "Row" . value_type . is_integer . should_be_true
|
||||
setup.expect_integer_type <| t1.at "Row"
|
||||
|
||||
group_builder.specify "should allow customizing the starting index and step" <|
|
||||
t = table_builder [["X", ['a', 'b', 'a']]]
|
||||
|
@ -11,6 +11,7 @@ from Standard.Test import all
|
||||
|
||||
|
||||
from project.Common_Table_Operations.Util import run_default_backend, within_table
|
||||
import project.Common_Table_Operations.Util
|
||||
|
||||
polyglot java import java.lang.Double
|
||||
|
||||
@ -23,29 +24,24 @@ main filter=Nothing =
|
||||
type Data
|
||||
Value ~data
|
||||
|
||||
connection self = self.data.at 0
|
||||
table self = self.data.at 1
|
||||
empty_table self = self.data.at 2
|
||||
table self = self.data.at 0
|
||||
empty_table self = self.data.at 1
|
||||
|
||||
setup create_connection_fn table_fn empty_table_fn = Data.Value <|
|
||||
connection = create_connection_fn Nothing
|
||||
setup table_fn empty_table_fn = Data.Value <|
|
||||
table = table_fn Nothing
|
||||
empty_table = empty_table_fn Nothing
|
||||
[connection, table, empty_table]
|
||||
|
||||
teardown self =
|
||||
self.connection.close
|
||||
|
||||
[table, empty_table]
|
||||
|
||||
## Runs the common aggregate tests.
|
||||
add_specs suite_builder setup =
|
||||
prefix = setup.prefix
|
||||
create_connection_fn = setup.create_connection_func
|
||||
table_fn = setup.table_fn
|
||||
empty_table_fn = setup.empty_table_fn
|
||||
materialize = setup.materialize
|
||||
is_database = setup.is_database
|
||||
test_selection = setup.aggregate_test_selection
|
||||
table_builder = setup.table_builder
|
||||
data = Data.setup table_fn empty_table_fn
|
||||
|
||||
expect_column_names names table =
|
||||
table.columns . map .name . should_equal names frames_to_skip=3
|
||||
@ -59,11 +55,6 @@ add_specs suite_builder setup =
|
||||
if enabled_flag.not then "Not supported." else Nothing
|
||||
|
||||
suite_builder.group prefix+"Table.aggregate should summarize whole table" group_builder->
|
||||
data = Data.setup create_connection_fn table_fn empty_table_fn
|
||||
|
||||
group_builder.teardown <|
|
||||
data.teardown
|
||||
|
||||
group_builder.specify "should be able to count" <|
|
||||
grouped = data.table.aggregate columns=[Count]
|
||||
materialized = materialize grouped
|
||||
@ -252,8 +243,6 @@ add_specs suite_builder setup =
|
||||
materialized.columns.at 0 . at 0 . length . should_equal 7500
|
||||
|
||||
suite_builder.group prefix+"Table.aggregate should summarize empty table" group_builder->
|
||||
data = Data.setup create_connection_fn table_fn empty_table_fn
|
||||
|
||||
group_builder.specify "should be able to count" <|
|
||||
grouped = data.empty_table.aggregate columns=[Count]
|
||||
materialized = materialize grouped
|
||||
@ -379,8 +368,6 @@ add_specs suite_builder setup =
|
||||
materialized.columns.at 0 . at 0 . should_equal Nothing
|
||||
|
||||
suite_builder.group prefix+"Table.aggregate should not summarize empty table when grouped" group_builder->
|
||||
data = Data.setup create_connection_fn table_fn empty_table_fn
|
||||
|
||||
group_builder.specify "should be able to count" <|
|
||||
grouped = data.empty_table.aggregate [0] [Count]
|
||||
materialized = materialize grouped
|
||||
@ -492,8 +479,6 @@ add_specs suite_builder setup =
|
||||
materialized.columns.at 1 . name . should_equal "Concatenate Code"
|
||||
|
||||
suite_builder.group prefix+"Table.aggregate should be able to group on single field" group_builder->
|
||||
data = Data.setup create_connection_fn table_fn empty_table_fn
|
||||
|
||||
group_builder.specify "should be able to count" <|
|
||||
grouped = data.table.aggregate ["Index"] [Count]
|
||||
materialized = materialize grouped
|
||||
@ -696,8 +681,6 @@ add_specs suite_builder setup =
|
||||
materialized.columns.at 1 . at idx . length . should_equal 783
|
||||
|
||||
suite_builder.group prefix+"Table.aggregate should be able to group on multiple fields" group_builder->
|
||||
data = Data.setup create_connection_fn table_fn empty_table_fn
|
||||
|
||||
group_builder.specify "should be able to count" <|
|
||||
grouped = data.table.aggregate ["Flag", "Index"] [Count]
|
||||
materialized = materialize grouped
|
||||
@ -913,14 +896,6 @@ add_specs suite_builder setup =
|
||||
materialized.columns.at 2 . at idx . length . should_equal 381
|
||||
|
||||
suite_builder.group prefix+"Table.aggregate Shortest" (pending = resolve_pending test_selection.text_shortest_longest) group_builder->
|
||||
data = Data.setup create_connection_fn table_fn empty_table_fn
|
||||
|
||||
group_builder.teardown <|
|
||||
data.teardown
|
||||
|
||||
table_builder cols =
|
||||
setup.table_builder cols connection=data.connection
|
||||
|
||||
group_builder.specify "should correctly handle empty strings versus missing (null) strings" <|
|
||||
table = table_builder [["A", ["abcd", "f", ""]], ["B", [Nothing, "f", "abc"]]]
|
||||
result = table.aggregate [] [Shortest "A", Shortest "B"]
|
||||
@ -934,22 +909,22 @@ add_specs suite_builder setup =
|
||||
materialized.columns.at 1 . name . should_equal "Shortest B"
|
||||
materialized.columns.at 1 . to_vector . should_equal ["f"]
|
||||
|
||||
|
||||
# Special case for Snowflake until the https://github.com/enso-org/enso/issues/10412 ticket is resolved.
|
||||
if setup.prefix.contains "Snowflake" then
|
||||
suite_builder.group prefix+"Table.aggregate Concatenate" group_builder->
|
||||
group_builder.specify "should be supported" <|
|
||||
table = table_builder [["X", ["A", "B", "C"]]]
|
||||
result = table.aggregate columns=[(Concatenate "X")]
|
||||
result.row_count . should_equal 1
|
||||
str = result.at 0 . at 0
|
||||
# No assumptions about ordering
|
||||
str . should_contain "A"
|
||||
str . should_contain "B"
|
||||
str . should_contain "C"
|
||||
|
||||
suite_builder.group prefix+"Table.aggregate Concatenate" (pending = resolve_pending test_selection.text_concat) group_builder->
|
||||
data = Data.setup create_connection_fn table_fn empty_table_fn
|
||||
|
||||
group_builder.teardown <|
|
||||
data.teardown
|
||||
|
||||
table_builder cols =
|
||||
setup.table_builder cols connection=data.connection
|
||||
|
||||
build_sorted_table table_structure =
|
||||
# Workaround for https://github.com/enso-org/enso/issues/10321
|
||||
if setup.prefix.contains "Snowflake" . not then table_builder table_structure else
|
||||
row_count = table_structure.first.second.length
|
||||
new_structure = table_structure+[["row_id", (0.up_to row_count) . to_vector]]
|
||||
table_builder new_structure . order_by "row_id" . remove_columns ["row_id"]
|
||||
|
||||
build_sorted_table = Util.build_sorted_table setup
|
||||
group_builder.specify "should insert the separator, add prefix and suffix" <|
|
||||
table = build_sorted_table [["A", ["foo", "bar", "foo", "foo"]], ["B", ["a", "b", "c", "d"]]]
|
||||
result = table.aggregate ["A"] [Concatenate "B" prefix="[[" suffix="]]" separator="; "]
|
||||
@ -1018,14 +993,6 @@ add_specs suite_builder setup =
|
||||
materialized.columns.at 0 . to_vector . should_equal ["'1''0'A''''BC"]
|
||||
|
||||
suite_builder.group prefix+"Table.aggregate Count_Distinct" group_builder->
|
||||
data = Data.setup create_connection_fn table_fn empty_table_fn
|
||||
|
||||
group_builder.teardown <|
|
||||
data.teardown
|
||||
|
||||
table_builder cols =
|
||||
setup.table_builder cols connection=data.connection
|
||||
|
||||
group_builder.specify "should correctly count missing values" <|
|
||||
get_value t =
|
||||
columns = materialize t . columns
|
||||
@ -1095,14 +1062,6 @@ add_specs suite_builder setup =
|
||||
m1.columns.first.to_vector . should_equal [3]
|
||||
|
||||
suite_builder.group prefix+"Table.aggregate Standard_Deviation" pending=(resolve_pending test_selection.std_dev) group_builder->
|
||||
data = Data.setup create_connection_fn table_fn empty_table_fn
|
||||
|
||||
group_builder.teardown <|
|
||||
data.teardown
|
||||
|
||||
table_builder cols =
|
||||
setup.table_builder cols connection=data.connection
|
||||
|
||||
group_builder.specify "should correctly handle single elements" <|
|
||||
r1 = table_builder [["X", [1]]] . aggregate columns=[Standard_Deviation "X" (population=False), Standard_Deviation "X" (population=True)]
|
||||
r1.row_count.should_equal 1
|
||||
@ -1112,14 +1071,6 @@ add_specs suite_builder setup =
|
||||
m1.columns.second.at 0 . should_equal 0
|
||||
|
||||
suite_builder.group prefix+"Table.aggregate should correctly select result types" group_builder->
|
||||
data = Data.setup create_connection_fn table_fn empty_table_fn
|
||||
|
||||
group_builder.teardown <|
|
||||
data.teardown
|
||||
|
||||
table_builder cols =
|
||||
setup.table_builder cols connection=data.connection
|
||||
|
||||
group_builder.specify "widening to decimals on Average" <|
|
||||
table = table_builder [["G", ["a", "a", "b", "b"]], ["X", [0, 1, 1, Nothing]]]
|
||||
r1 = table.aggregate columns=[Average "X"]
|
||||
@ -1170,14 +1121,6 @@ add_specs suite_builder setup =
|
||||
Test.fail "Expected a Nothing or NaN but got: "+value.to_text+" (at "+loc+")."
|
||||
|
||||
suite_builder.group prefix+"Table.aggregate should correctly handle infinities" group_builder->
|
||||
data = Data.setup create_connection_fn table_fn empty_table_fn
|
||||
|
||||
group_builder.teardown <|
|
||||
data.teardown
|
||||
|
||||
table_builder cols =
|
||||
setup.table_builder cols connection=data.connection
|
||||
|
||||
pos_inf = 1/0
|
||||
neg_inf = -1/0
|
||||
|
||||
@ -1257,14 +1200,6 @@ add_specs suite_builder setup =
|
||||
expect_null_or_nan <| m1.columns.second.at 0
|
||||
|
||||
suite_builder.group prefix+"Table.aggregate should correctly handle NaN" pending=(resolve_pending test_selection.nan) group_builder->
|
||||
data = Data.setup create_connection_fn table_fn empty_table_fn
|
||||
|
||||
group_builder.teardown <|
|
||||
data.teardown
|
||||
|
||||
table_builder cols =
|
||||
setup.table_builder cols connection=data.connection
|
||||
|
||||
nan = 0.log 0
|
||||
group_builder.specify "on Average" <|
|
||||
t1 = table_builder [["X", [Nothing, nan, 0, 1, 2]]]
|
||||
@ -1308,14 +1243,6 @@ add_specs suite_builder setup =
|
||||
Double.isNaN (m1.columns.second.at 0) . should_be_true
|
||||
|
||||
suite_builder.group prefix+"Table.aggregate Mode" (pending = resolve_pending test_selection.advanced_stats) group_builder->
|
||||
data = Data.setup create_connection_fn table_fn empty_table_fn
|
||||
|
||||
group_builder.teardown <|
|
||||
data.teardown
|
||||
|
||||
table_builder cols =
|
||||
setup.table_builder cols connection=data.connection
|
||||
|
||||
group_builder.specify "should ignore missing values" <|
|
||||
t1 = table_builder [["X", [Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, 2, 2, 1]]]
|
||||
r1 = t1.aggregate columns=[Mode "X"]
|
||||
@ -1325,14 +1252,6 @@ add_specs suite_builder setup =
|
||||
m1.columns.first.at 0 . should_equal 2
|
||||
|
||||
suite_builder.group prefix+"Table.aggregate First and Last" group_builder->
|
||||
data = Data.setup create_connection_fn table_fn empty_table_fn
|
||||
|
||||
group_builder.teardown <|
|
||||
data.teardown
|
||||
|
||||
table_builder cols =
|
||||
setup.table_builder cols connection=data.connection
|
||||
|
||||
group_builder.specify "should not return the same value for groups with different values but equal ordering keys" (pending = resolve_pending test_selection.first_last) <|
|
||||
t1 = table_builder [["G", ["a", "a"]], ["X", [1, 2]]]
|
||||
order = [Sort_Column.Name "G"]
|
||||
@ -1345,14 +1264,6 @@ add_specs suite_builder setup =
|
||||
(first != last).should_be_true
|
||||
|
||||
suite_builder.group prefix+"Table.aggregate" group_builder->
|
||||
data = Data.setup create_connection_fn table_fn empty_table_fn
|
||||
|
||||
group_builder.teardown <|
|
||||
data.teardown
|
||||
|
||||
table_builder cols =
|
||||
setup.table_builder cols connection=data.connection
|
||||
|
||||
group_builder.specify "should work even if no aggregations apart from groupings are specified" <|
|
||||
table = table_builder [["A", [1, 1, 2, 1]], ["B", [3, 2, 2, 3]], ["C", [11, 12, 13, 14]]]
|
||||
grouped = table.aggregate ["B", "A"]
|
||||
@ -1427,14 +1338,6 @@ add_specs suite_builder setup =
|
||||
r1.to_display_text . should_contain "`First`"
|
||||
|
||||
suite_builder.group prefix+"Table.aggregate+Expressions" group_builder->
|
||||
data = Data.setup create_connection_fn table_fn empty_table_fn
|
||||
|
||||
group_builder.teardown <|
|
||||
data.teardown
|
||||
|
||||
table_builder cols =
|
||||
setup.table_builder cols connection=data.connection
|
||||
|
||||
## TODO we probably should check all kinds of aggregate columns
|
||||
to verify that all of them correctly support expressions.
|
||||
group_builder.specify "should allow expressions in aggregates" <|
|
||||
@ -1468,14 +1371,6 @@ add_specs suite_builder setup =
|
||||
err4.expression_error.should_equal (No_Such_Column.Error "MISSING")
|
||||
|
||||
suite_builder.group prefix+"Table.aggregate should raise warnings when there are issues" group_builder->
|
||||
data = Data.setup create_connection_fn table_fn empty_table_fn
|
||||
|
||||
group_builder.teardown <|
|
||||
data.teardown
|
||||
|
||||
table_builder cols =
|
||||
setup.table_builder cols connection=data.connection
|
||||
|
||||
table =
|
||||
col1 = ["Index", [1, 2, 3]]
|
||||
col2 = ["Value", [1, 2, 3]]
|
||||
@ -1564,14 +1459,6 @@ add_specs suite_builder setup =
|
||||
Problems.test_problem_handling action problems tester
|
||||
|
||||
suite_builder.group prefix+"Table.aggregate should report warnings and errors based on types" group_builder->
|
||||
data = Data.setup create_connection_fn table_fn empty_table_fn
|
||||
|
||||
group_builder.teardown <|
|
||||
data.teardown
|
||||
|
||||
table_builder cols =
|
||||
setup.table_builder cols connection=data.connection
|
||||
|
||||
group_builder.specify "should warn if grouping on a floating point" <|
|
||||
t = table_builder [["X", [1.1, 2.2, 3.3, 2.2]]]
|
||||
action = t.aggregate ["X"] on_problems=_
|
||||
@ -1613,26 +1500,18 @@ add_specs suite_builder setup =
|
||||
|
||||
t1 = table.aggregate ["Text", "Int", "Float"]
|
||||
t1.at "Text" . value_type . is_text . should_be_true
|
||||
t1.at "Int" . value_type . is_integer . should_be_true
|
||||
setup.expect_integer_type <| t1.at "Int"
|
||||
t1.at "Float" . value_type . is_floating_point . should_be_true
|
||||
|
||||
t2 = table.aggregate columns=[Count, Count_Not_Empty "Text", Sum "Int", Sum "Float", Average "Int", Concatenate "Text"]
|
||||
t2.at "Count" . value_type . is_integer . should_be_true
|
||||
t2.at "Count Not Empty Text" . value_type . is_integer . should_be_true
|
||||
setup.expect_integer_type <| t2.at "Count"
|
||||
setup.expect_integer_type <| t2.at "Count Not Empty Text"
|
||||
t2.at "Sum Int" . value_type . is_numeric . should_be_true
|
||||
t2.at "Sum Float" . value_type . is_floating_point . should_be_true
|
||||
t2.at "Average Int" . value_type . is_numeric . should_be_true
|
||||
t2.at "Concatenate Text" . value_type . is_text . should_be_true
|
||||
|
||||
suite_builder.group prefix+"Table.aggregate should raise warnings when there are issues computing aggregation" pending=(resolve_pending test_selection.aggregation_problems) group_builder->
|
||||
data = Data.setup create_connection_fn table_fn empty_table_fn
|
||||
|
||||
group_builder.teardown <|
|
||||
data.teardown
|
||||
|
||||
table_builder cols =
|
||||
setup.table_builder cols connection=data.connection
|
||||
|
||||
table =
|
||||
col1 = ["Index", [1, 2, 3]]
|
||||
col2 = ["Value", [1, 2, 3.1]]
|
||||
@ -1664,14 +1543,6 @@ add_specs suite_builder setup =
|
||||
Problems.test_problem_handling action problems tester
|
||||
|
||||
suite_builder.group prefix+"Table.aggregate should merge warnings when issues computing aggregation" pending=(resolve_pending test_selection.aggregation_problems) group_builder->
|
||||
data = Data.setup create_connection_fn table_fn empty_table_fn
|
||||
|
||||
group_builder.teardown <|
|
||||
data.teardown
|
||||
|
||||
table_builder cols =
|
||||
setup.table_builder cols connection=data.connection
|
||||
|
||||
group_builder.specify "should merge Invalid Aggregation warnings" <|
|
||||
table = table_builder [["X", (0.up_to 16).map (_-> ",")]]
|
||||
new_table = table.aggregate columns=[Concatenate "X" separator=","]
|
||||
@ -1695,15 +1566,7 @@ add_specs suite_builder setup =
|
||||
|
||||
if is_database then
|
||||
suite_builder.group prefix+"Table.aggregate should report unsupported operations but not block other aggregations in warning mode" group_builder->
|
||||
data = Data.setup create_connection_fn table_fn empty_table_fn
|
||||
|
||||
group_builder.teardown <|
|
||||
data.teardown
|
||||
|
||||
table_builder cols =
|
||||
setup.table_builder cols connection=data.connection
|
||||
|
||||
expect_sum_and_unsupported_errors error_count result =
|
||||
expect_sum_and_unsupported_errors error_count result = within_table result <|
|
||||
result.column_count . should_equal 1
|
||||
result.row_count . should_equal 1
|
||||
result.columns.first.to_vector . should_equal [6]
|
||||
@ -1743,7 +1606,7 @@ add_specs suite_builder setup =
|
||||
expect_sum_and_unsupported_errors 2 <|
|
||||
table.aggregate columns=[Sum "X", Shortest "Y", Longest "Y"]
|
||||
|
||||
if test_selection.text_concat.not then
|
||||
if test_selection.text_concat.not && (setup.prefix.contains "Snowflake" . not) then
|
||||
group_builder.specify "with Concatenate" <|
|
||||
table = table_builder [["X", [1,2,3]], ["Y", ["a", "bb", "ccc"]]]
|
||||
expect_sum_and_unsupported_errors 1 <|
|
||||
|
@ -30,7 +30,7 @@ type Arithmetic_Data
|
||||
z self = self.data.at 2
|
||||
|
||||
setup table_builder = Arithmetic_Data.Value <|
|
||||
t2 = table_builder [["x", [1, 4, 5, Nothing]], ["y", [2.0, 3.25, 5.0, Nothing]], ["z", [Decimal.new 2, Decimal.new 4.5, Decimal.new 5.0, Nothing]], ["row_id", [1, 2, 3, 4]]] . order_by "row_id"
|
||||
t2 = table_builder [["x", [1, 4, 5, Nothing]], ["y", [2.0, 3.25, 5.0, Nothing]], ["z", [Decimal.new 2, Decimal.new 4.5, Decimal.new 5.0, Nothing]], ["row_id", [1, 2, 3, 4]]] . sort "row_id"
|
||||
x = t2.at "x"
|
||||
y = t2.at "y"
|
||||
z = t2.at "z"
|
||||
@ -45,7 +45,7 @@ type Min_Max_Data
|
||||
t self = self.data.at 3
|
||||
|
||||
setup table_builder = Min_Max_Data.Value <|
|
||||
t = table_builder [["a", [1, 2, 3]], ["b", [4.5, 5.5, 6.5]], ["c", ['a', 'b', 'c']], ["d", [True, False, True]]] . order_by "a"
|
||||
t = table_builder [["a", [1, 2, 3]], ["b", [4.5, 5.5, 6.5]], ["c", ['a', 'b', 'c']], ["d", [True, False, True]]] . sort "a"
|
||||
a = t.at "a"
|
||||
b = t.at "b"
|
||||
c = t.at "c"
|
||||
@ -58,8 +58,8 @@ type Literal_Data
|
||||
col1 self = self.data.at 1
|
||||
|
||||
setup table_builder = Literal_Data.Value <|
|
||||
col0 = table_builder [["x", ['hello Hello', 'hello hello', 'HELLO HELLO']], ["row_id", [1, 2, 3]]] . order_by "row_id" . at "x"
|
||||
col1 = table_builder [["x", ['a[bcd] A[bCd] a[bcd]', 'abac ad Ab aCAd']], ["row_id", [1, 2]]] . order_by "row_id" . at "x"
|
||||
col0 = table_builder [["x", ['hello Hello', 'hello hello', 'HELLO HELLO']], ["row_id", [1, 2, 3]]] . sort "row_id" . at "x"
|
||||
col1 = table_builder [["x", ['a[bcd] A[bCd] a[bcd]', 'abac ad Ab aCAd']], ["row_id", [1, 2]]] . sort "row_id" . at "x"
|
||||
[col0, col1]
|
||||
|
||||
type Replace_Data
|
||||
@ -70,7 +70,7 @@ type Replace_Data
|
||||
replacements self = self.data.at 2
|
||||
|
||||
setup table_builder = Replace_Data.Value <|
|
||||
table = table_builder [["x", ['hello Hello', 'hello hello', 'HELLO HELLO', 'a[bcd] A[bCd] a[bcd]', 'abac ad Ab aCAd']], ["patterns", ['hello', 'hello', 'hello', 'a[bcd]', 'a[bcd]']], ["replacements", ['bye', 'bye', 'bye', 'hey', 'hey']], ["row_id", [1, 2, 3, 4, 5]]] . order_by "row_id"
|
||||
table = table_builder [["x", ['hello Hello', 'hello hello', 'HELLO HELLO', 'a[bcd] A[bCd] a[bcd]', 'abac ad Ab aCAd']], ["patterns", ['hello', 'hello', 'hello', 'a[bcd]', 'a[bcd]']], ["replacements", ['bye', 'bye', 'bye', 'hey', 'hey']], ["row_id", [1, 2, 3, 4, 5]]] . sort "row_id"
|
||||
col = table.at "x"
|
||||
patterns = table.at "patterns"
|
||||
replacements = table.at "replacements"
|
||||
@ -86,7 +86,7 @@ type Text_Replace_Data
|
||||
d self = self.data.at 3
|
||||
|
||||
setup table_builder = Text_Replace_Data.Value <|
|
||||
t4 = table_builder [["A", ["Alpha", "Bravo", "Charlie", "Delta", "Echo", "Foxtrot"]], ["B", ["A","O","a","E","o","O"]], ["C", [1,2,3,4,5,6]], ["D", ['',Nothing,'',Nothing,'','']]] . order_by "C"
|
||||
t4 = table_builder [["A", ["Alpha", "Bravo", "Charlie", "Delta", "Echo", "Foxtrot"]], ["B", ["A","O","a","E","o","O"]], ["C", [1,2,3,4,5,6]], ["D", ['',Nothing,'',Nothing,'','']]] . sort "C"
|
||||
a = t4.at "A"
|
||||
b = t4.at "B"
|
||||
c = t4.at "C"
|
||||
@ -101,7 +101,7 @@ type Trim_Data
|
||||
c self = self.data.at 2
|
||||
|
||||
setup table_builder = Trim_Data.Value <|
|
||||
table = table_builder [["A", [" A ", ' \t\n\rA\r\n\t ', "xxxAxx"]], ["B", [" ",' \t',"x"]], ["C", [1,2,3]]] . order_by "C"
|
||||
table = table_builder [["A", [" A ", ' \t\n\rA\r\n\t ', "xxxAxx"]], ["B", [" ",' \t',"x"]], ["C", [1,2,3]]] . sort "C"
|
||||
a = table.at "A"
|
||||
b = table.at "B"
|
||||
c = table.at "C"
|
||||
@ -113,7 +113,7 @@ type Names_Data
|
||||
t self = self.data.at 0
|
||||
|
||||
setup table_builder = Names_Data.Value <|
|
||||
t = table_builder [["a", [1, 2, 3]], ["b", ['x', 'y', 'z']], ["c", [1.0, 2.0, 3.0]], ["d", [True, False, True]]] . order_by "a"
|
||||
t = table_builder [["a", [1, 2, 3]], ["b", ['x', 'y', 'z']], ["c", [1.0, 2.0, 3.0]], ["d", [True, False, True]]] . sort "a"
|
||||
[t]
|
||||
|
||||
|
||||
@ -123,14 +123,33 @@ add_specs suite_builder setup =
|
||||
light_table_builder = setup.light_table_builder
|
||||
pending_datetime = if setup.test_selection.date_time.not then "Date/Time operations are not supported by this backend."
|
||||
suite_builder.group prefix+"(Column_Operations_Spec) Rounding numeric tests" group_builder->
|
||||
do_op n op =
|
||||
round_one n dp=0 use_bankers=False =
|
||||
table = light_table_builder [["x", [n]]]
|
||||
result = table.at "x" |> op
|
||||
result = table.at "x" . round dp use_bankers
|
||||
result.to_vector.at 0
|
||||
|
||||
do_round n dp=0 use_bankers=False = do_op n (_.round dp use_bankers)
|
||||
|
||||
Round_Spec.add_specs group_builder do_round
|
||||
round_batch n_vec dp_vec use_bankers = case use_bankers of
|
||||
## If banker's rounding is used, we refrain from batching due to issue
|
||||
https://github.com/enso-org/enso/issues/10306
|
||||
That is because the created SQL code is tremendously large with batching
|
||||
(up to around 250k characters, making running it very slow).
|
||||
True ->
|
||||
# Just use `round_one` for each entry
|
||||
n_vec.zip dp_vec n-> dp-> round_one n dp use_bankers
|
||||
False ->
|
||||
name_for ix = "x"+ix.to_text
|
||||
n_columns = n_vec.map_with_index ix-> n-> [name_for ix, [n]]
|
||||
table = light_table_builder n_columns
|
||||
new_columns = dp_vec.map_with_index ix-> dp->
|
||||
n_col = table.at (name_for ix)
|
||||
n_col.round dp use_bankers . rename (name_for ix)
|
||||
# In new table input columns are replaced with results of output
|
||||
new_table = new_columns.fold table acc-> col-> acc.set col (set_mode=..Update)
|
||||
# We read all operations in a single query
|
||||
materialized = new_table.read
|
||||
Runtime.assert (materialized.row_count == 1)
|
||||
materialized.first_row.to_vector
|
||||
Round_Spec.add_specs group_builder (Round_Spec.Batch_Runner.Value round_batch round_one) run_advanced=setup.test_selection.run_advanced_edge_case_tests
|
||||
|
||||
group_builder.specify "Decimal places out of range" <|
|
||||
3.1 . round 16 . should_fail_with Illegal_Argument
|
||||
@ -141,7 +160,7 @@ add_specs suite_builder setup =
|
||||
if setup.prefix.contains "Snowflake" . not then table_builder table_structure else
|
||||
row_count = table_structure.first.second.length
|
||||
new_structure = table_structure+[["row_id", (0.up_to row_count) . to_vector]]
|
||||
table_builder new_structure . order_by "row_id" . remove_columns ["row_id"]
|
||||
table_builder new_structure . sort "row_id" . remove_columns ["row_id"]
|
||||
|
||||
## Runs the provided callback with a few combinations of columns, where some
|
||||
of them are made Mixed (but still contain only the original values).
|
||||
@ -173,28 +192,26 @@ add_specs suite_builder setup =
|
||||
suite_builder.group prefix+"(Column_Operations_Spec) Boolean Column Operations" group_builder->
|
||||
table_builder = light_table_builder
|
||||
group_builder.specify "iif" <|
|
||||
t = table_builder [["X", [True, False, Nothing, True]]]
|
||||
# TODO
|
||||
IO.println (t.at "X" . value_type)
|
||||
t = build_sorted_table [["X", [True, False, Nothing, True]]]
|
||||
c1 = t.at "X" . iif 22 33
|
||||
c1.to_vector . should_equal [22, 33, Nothing, 22]
|
||||
c1.value_type . is_integer . should_be_true
|
||||
setup.expect_integer_type <| c1
|
||||
|
||||
c2 = t.at "X" . iif 22 33.0
|
||||
c2.to_vector . should_equal [22, 33, Nothing, 22]
|
||||
c2.value_type . is_floating_point . should_be_true
|
||||
c2.value_type . should_be_a (Value_Type.Float ...)
|
||||
|
||||
c3 = t.at "X" . iif "A" "B"
|
||||
c3.to_vector . should_equal ["A", "B", Nothing, "A"]
|
||||
c3.value_type . is_text . should_be_true
|
||||
c3.value_type . should_be_a (Value_Type.Char ...)
|
||||
|
||||
c4 = t.at "X" . iif Nothing "B"
|
||||
c4.to_vector . should_equal [Nothing, "B", Nothing, Nothing]
|
||||
c4.value_type . is_text . should_be_true
|
||||
c4.value_type . should_be_a (Value_Type.Char ...)
|
||||
|
||||
c5 = t.at "X" . iif 42 Nothing
|
||||
c5.to_vector . should_equal [42, Nothing, Nothing, 42]
|
||||
c5.value_type . is_integer . should_be_true
|
||||
setup.expect_integer_type <| c5
|
||||
|
||||
c6 = t.at "X" . iif Nothing Nothing
|
||||
c6.to_vector . should_equal [Nothing, Nothing, Nothing, Nothing]
|
||||
@ -446,6 +463,10 @@ add_specs suite_builder setup =
|
||||
t = table_builder [["col0", ["0", Nothing, "4", "5", Nothing, Nothing]]] . cast "col0" (Value_Type.Char size=1 variable_length=False)
|
||||
actual = t.fill_nothing ["col0"] "ABCDE"
|
||||
actual.at "col0" . to_vector . should_equal ["0", "ABCDE", "4", "5", "ABCDE", "ABCDE"]
|
||||
# Snowflake worksheet returns:
|
||||
# SELECT SYSTEM$TYPEOF(COALESCE(CAST("literal_table"."x" AS CHAR(1)), 'N/A')) FROM (VALUES ('1'), (NULL), ('a')) AS "literal_table"("x");
|
||||
# VARCHAR(3) - so it is correct
|
||||
# TODO - investigate JDBC / our mapping bug
|
||||
case setup.test_selection.length_restricted_text_columns of
|
||||
True -> actual.at "col0" . value_type . should_equal (Value_Type.Char size=5 variable_length=True)
|
||||
False -> actual.at "col0" . value_type . should_equal (Value_Type.Char variable_length=True)
|
||||
@ -630,8 +651,8 @@ add_specs suite_builder setup =
|
||||
with_mixed_columns_if_supported [["x", [1, 4, 5, Nothing]], ["y", [2.0, 3.25, 5.0, Nothing]]] t2->
|
||||
x = t2.at "x"
|
||||
y = t2.at "y"
|
||||
x.inferred_precise_value_type . is_integer . should_be_true
|
||||
y.inferred_precise_value_type . is_floating_point . should_be_true
|
||||
setup.is_integer_type x.inferred_precise_value_type . should_be_true
|
||||
y.inferred_precise_value_type . should_be_a (Value_Type.Float ...)
|
||||
|
||||
(x < y).to_vector . should_equal [True, False, False, Nothing]
|
||||
(x <= y).to_vector . should_equal [True, False, True, Nothing]
|
||||
@ -762,20 +783,20 @@ add_specs suite_builder setup =
|
||||
(x ^ y).to_vector . should_equal [10000, 9765625]
|
||||
|
||||
group_builder.specify "should correctly infer the types" <|
|
||||
(data.x + data.x).value_type . is_integer . should_be_true
|
||||
(data.x + data.y).value_type . is_floating_point . should_be_true
|
||||
(data.x + 2).value_type . is_integer . should_be_true
|
||||
(data.x + 1.5).value_type . is_floating_point . should_be_true
|
||||
setup.expect_integer_type <| (data.x + data.x)
|
||||
(data.x + data.y).value_type . should_be_a (Value_Type.Float ...)
|
||||
setup.expect_integer_type <| (data.x + 2)
|
||||
(data.x + 1.5).value_type . should_be_a (Value_Type.Float ...)
|
||||
|
||||
(data.x - data.x).value_type . is_integer . should_be_true
|
||||
(data.x - data.y).value_type . is_floating_point . should_be_true
|
||||
(data.x - 2).value_type . is_integer . should_be_true
|
||||
(data.x - 1.5).value_type . is_floating_point . should_be_true
|
||||
setup.expect_integer_type <| (data.x - data.x)
|
||||
(data.x - data.y).value_type . should_be_a (Value_Type.Float ...)
|
||||
setup.expect_integer_type <| (data.x - 2)
|
||||
(data.x - 1.5).value_type . should_be_a (Value_Type.Float ...)
|
||||
|
||||
(data.x * data.x).value_type . is_integer . should_be_true
|
||||
(data.x * data.y).value_type . is_floating_point . should_be_true
|
||||
(data.x * 2).value_type . is_integer . should_be_true
|
||||
(data.x * 1.5).value_type . is_floating_point . should_be_true
|
||||
setup.expect_integer_type <| (data.x * data.x)
|
||||
(data.x * data.y).value_type . should_be_a (Value_Type.Float ...)
|
||||
setup.expect_integer_type <| (data.x * 2)
|
||||
(data.x * 1.5).value_type . should_be_a (Value_Type.Float ...)
|
||||
|
||||
(data.x ^ data.x).value_type . is_numeric . should_be_true
|
||||
|
||||
@ -858,16 +879,16 @@ add_specs suite_builder setup =
|
||||
a.zip b (/) . should_equal r
|
||||
|
||||
t = build_sorted_table [["A", a], ["B", b]]
|
||||
t.at "A" . value_type . is_integer . should_be_true
|
||||
t.at "B" . value_type . is_integer . should_be_true
|
||||
setup.expect_integer_type <| t.at "A"
|
||||
setup.expect_integer_type <| t.at "B"
|
||||
|
||||
r2 = (t.at "A") / (t.at "B")
|
||||
r2 . to_vector . should_equal r
|
||||
r2.value_type . is_floating_point . should_be_true
|
||||
r2.value_type . should_be_a (Value_Type.Float ...)
|
||||
|
||||
r3 = (t.at "A") / 2
|
||||
r3 . to_vector . should_equal [0.5, 2.5, 5.0, 50.0]
|
||||
r3.value_type . is_floating_point . should_be_true
|
||||
r3.value_type . should_be_a (Value_Type.Float ...)
|
||||
|
||||
a2 = [1.2, 5, 10.2, 100]
|
||||
b2 = [1.2, 2, 2, 5]
|
||||
@ -880,11 +901,11 @@ add_specs suite_builder setup =
|
||||
|
||||
r5 = (t2.at "A") / (t2.at "B")
|
||||
r5 . to_vector . should_equal r4
|
||||
r5.value_type . is_floating_point . should_be_true
|
||||
r5.value_type . should_be_a (Value_Type.Float ...)
|
||||
|
||||
r6 = (t2.at "A") / 2
|
||||
r6 . to_vector . should_equal [0.6, 2.5, 5.1, 50.0]
|
||||
r6.value_type . is_floating_point . should_be_true
|
||||
r6.value_type . should_be_a (Value_Type.Float ...)
|
||||
|
||||
db_pending = if setup.is_database then "Arithmetic error handling is currently not implemented for the Database backend."
|
||||
group_builder.specify "should allow division by 0 and report warnings" pending=db_pending <|
|
||||
@ -1322,39 +1343,38 @@ add_specs suite_builder setup =
|
||||
result.value_type . should_equal Value_Type.Char
|
||||
result . to_vector . should_equal expected
|
||||
|
||||
literal_data = Literal_Data.setup setup.table_builder
|
||||
suite_builder.group prefix+"(Column_Operations_Spec) replace: literal text pattern and replacement" group_builder->
|
||||
data = Literal_Data.setup setup.table_builder
|
||||
group_builder.specify "case_sensitivity=sensitive/default use_regex=false only_first=false"
|
||||
do_replace data.col0 'hello' 'bye' expected=['bye Hello', 'bye bye', 'HELLO HELLO']
|
||||
do_replace data.col0 'hello' 'bye' case_sensitivity=Case_Sensitivity.Default expected=['bye Hello', 'bye bye', 'HELLO HELLO']
|
||||
do_replace data.col0 'hello' 'bye' case_sensitivity=Case_Sensitivity.Sensitive expected=['bye Hello', 'bye bye', 'HELLO HELLO']
|
||||
do_replace data.col1 'a[bcd]' 'hey' expected=['hey A[bCd] hey', 'abac ad Ab aCAd']
|
||||
do_replace literal_data.col0 'hello' 'bye' expected=['bye Hello', 'bye bye', 'HELLO HELLO']
|
||||
do_replace literal_data.col0 'hello' 'bye' case_sensitivity=Case_Sensitivity.Default expected=['bye Hello', 'bye bye', 'HELLO HELLO']
|
||||
do_replace literal_data.col0 'hello' 'bye' case_sensitivity=Case_Sensitivity.Sensitive expected=['bye Hello', 'bye bye', 'HELLO HELLO']
|
||||
do_replace literal_data.col1 'a[bcd]' 'hey' expected=['hey A[bCd] hey', 'abac ad Ab aCAd']
|
||||
|
||||
group_builder.specify "case_sensitivity=sensitive/default use_regex=false only_first=true"
|
||||
do_replace data.col0 'hello' 'bye' only_first=True expected=['bye Hello', 'bye hello', 'HELLO HELLO']
|
||||
do_replace data.col1 'a[bcd]' 'hey' only_first=True expected=['hey A[bCd] a[bcd]', 'abac ad Ab aCAd']
|
||||
do_replace literal_data.col0 'hello' 'bye' only_first=True expected=['bye Hello', 'bye hello', 'HELLO HELLO']
|
||||
do_replace literal_data.col1 'a[bcd]' 'hey' only_first=True expected=['hey A[bCd] a[bcd]', 'abac ad Ab aCAd']
|
||||
|
||||
group_builder.specify "case_sensitivity=insensitive use_regex=false only_first=false"
|
||||
do_replace data.col0 'hello' 'bye' case_sensitivity=Case_Sensitivity.Insensitive expected=['bye bye', 'bye bye', 'bye bye']
|
||||
do_replace data.col1 'a[bcd]' 'hey' case_sensitivity=Case_Sensitivity.Insensitive expected=['hey hey hey', 'abac ad Ab aCAd']
|
||||
do_replace literal_data.col0 'hello' 'bye' case_sensitivity=Case_Sensitivity.Insensitive expected=['bye bye', 'bye bye', 'bye bye']
|
||||
do_replace literal_data.col1 'a[bcd]' 'hey' case_sensitivity=Case_Sensitivity.Insensitive expected=['hey hey hey', 'abac ad Ab aCAd']
|
||||
|
||||
group_builder.specify "case_sensitivity=insensitive use_regex=false only_first=true"
|
||||
do_replace data.col0 'hello' 'bye' case_sensitivity=Case_Sensitivity.Insensitive only_first=True expected=['bye Hello', 'bye hello', 'bye HELLO']
|
||||
do_replace data.col1 'a[bcd]' 'hey' case_sensitivity=Case_Sensitivity.Insensitive only_first=True expected=['hey A[bCd] a[bcd]', 'abac ad Ab aCAd']
|
||||
do_replace literal_data.col0 'hello' 'bye' case_sensitivity=Case_Sensitivity.Insensitive only_first=True expected=['bye Hello', 'bye hello', 'bye HELLO']
|
||||
do_replace literal_data.col1 'a[bcd]' 'hey' case_sensitivity=Case_Sensitivity.Insensitive only_first=True expected=['hey A[bCd] a[bcd]', 'abac ad Ab aCAd']
|
||||
|
||||
suite_builder.group prefix+"(Column_Operations_Spec) replace: literal regex pattern and replacement" group_builder->
|
||||
data = Literal_Data.setup setup.table_builder
|
||||
group_builder.specify "case_sensitivity=sensitive/default use_regex=True only_first=false"
|
||||
do_replace data.col1 'a[bcd]'.to_regex 'hey' expected=['a[bcd] A[bCd] a[bcd]', 'heyhey hey Ab aCAd']
|
||||
do_replace literal_data.col1 'a[bcd]'.to_regex 'hey' expected=['a[bcd] A[bCd] a[bcd]', 'heyhey hey Ab aCAd']
|
||||
|
||||
group_builder.specify "case_sensitivity=sensitive/default use_regex=True only_first=true"
|
||||
do_replace data.col1 'a[bcd]'.to_regex 'hey' only_first=True expected=['a[bcd] A[bCd] a[bcd]', 'heyac ad Ab aCAd']
|
||||
do_replace literal_data.col1 'a[bcd]'.to_regex 'hey' only_first=True expected=['a[bcd] A[bCd] a[bcd]', 'heyac ad Ab aCAd']
|
||||
|
||||
group_builder.specify "case_sensitivity=insensitive use_regex=True only_first=false"
|
||||
do_replace data.col1 'a[bcd]'.to_regex 'hey' case_sensitivity=Case_Sensitivity.Insensitive expected=['a[bcd] A[bCd] a[bcd]', 'heyhey hey hey heyhey']
|
||||
do_replace literal_data.col1 'a[bcd]'.to_regex 'hey' case_sensitivity=Case_Sensitivity.Insensitive expected=['a[bcd] A[bCd] a[bcd]', 'heyhey hey hey heyhey']
|
||||
|
||||
group_builder.specify "case_sensitivity=insensitive use_regex=True only_first=true"
|
||||
do_replace data.col1 'a[bcd]'.to_regex 'hey' case_sensitivity=Case_Sensitivity.Insensitive only_first=True expected=['a[bcd] A[bCd] a[bcd]', 'heyac ad Ab aCAd']
|
||||
do_replace literal_data.col1 'a[bcd]'.to_regex 'hey' case_sensitivity=Case_Sensitivity.Insensitive only_first=True expected=['a[bcd] A[bCd] a[bcd]', 'heyac ad Ab aCAd']
|
||||
|
||||
group_builder.specify "can properly escape complex regexes" <|
|
||||
regex = "^([^\(]+)|(?<foo>\w\d[a-z])+$"
|
||||
@ -1362,10 +1382,8 @@ add_specs suite_builder setup =
|
||||
do_replace col regex "asdf" ["asdf"]
|
||||
|
||||
suite_builder.group prefix+"(Column_Operations_Spec) replace: Named_Pattern and replacement" group_builder->
|
||||
data = Literal_Data.setup setup.table_builder
|
||||
group_builder.specify "can use Named_Patterns" <|
|
||||
do_replace data.col1 Named_Pattern.Symbols 'hey' expected=['aheybcdhey AheybCdhey aheybcdhey', 'abac ad Ab aCAd']
|
||||
|
||||
do_replace literal_data.col1 Named_Pattern.Symbols 'hey' expected=['aheybcdhey AheybCdhey aheybcdhey', 'abac ad Ab aCAd']
|
||||
|
||||
suite_builder.group prefix+"(Column_Operations_Spec) replace: pattern and replacement columns" group_builder->
|
||||
data = Replace_Data.setup setup.table_builder
|
||||
@ -1612,10 +1630,47 @@ add_specs suite_builder setup =
|
||||
suite_builder.group prefix+"(Column_Operations_Spec) Table.make_constant_column" group_builder->
|
||||
group_builder.specify "Should allow the creation of constant columns" <|
|
||||
t = table_builder [["x", ["1", "2", "3"]]]
|
||||
t.make_constant_column True . to_vector . should_equal [True, True, True]
|
||||
t.make_constant_column 12 . to_vector . should_equal [12, 12, 12]
|
||||
t.make_constant_column 12.3 . to_vector . should_equal [12.3, 12.3, 12.3]
|
||||
t.make_constant_column "asdf" . to_vector . should_equal ["asdf", "asdf", "asdf"]
|
||||
c1 = t.make_constant_column True
|
||||
c1.to_vector . should_equal [True, True, True]
|
||||
c1.value_type.should_equal Value_Type.Boolean
|
||||
|
||||
c2 = t.make_constant_column 12
|
||||
c2.to_vector . should_equal [12, 12, 12]
|
||||
setup.expect_integer_type c2
|
||||
|
||||
c3 = t.make_constant_column 12.3
|
||||
c3.to_vector . should_equal [12.3, 12.3, 12.3]
|
||||
c3.value_type.should_be_a (Value_Type.Float ...)
|
||||
|
||||
c4 = t.make_constant_column "asdf"
|
||||
c4.to_vector . should_equal ["asdf", "asdf", "asdf"]
|
||||
c4.value_type.should_be_a (Value_Type.Char ...)
|
||||
|
||||
if setup.test_selection.date_time then group_builder.specify "Should allow the creation of constant date/time columns" <|
|
||||
t = table_builder [["x", ["1", "2", "3"]]]
|
||||
d = Date.new 2024 07 05
|
||||
c1 = t.make_constant_column d
|
||||
c1.to_vector . should_equal [d, d, d]
|
||||
c1.value_type.should_equal Value_Type.Date
|
||||
|
||||
dt = Date_Time.new 2024 07 05 12 30 45
|
||||
c2 = t.make_constant_column dt
|
||||
c2.value_type.should_be_a (Value_Type.Date_Time ...)
|
||||
|
||||
time = Time_Of_Day.new 12 30 45
|
||||
c3 = t.make_constant_column time
|
||||
c3.to_vector . should_equal [time, time, time]
|
||||
c3.value_type.should_equal Value_Type.Time
|
||||
|
||||
# The types should be preserved after an 'identity' operation:
|
||||
c1_prime = (c1 == c1).iif c1 c1
|
||||
c1_prime.value_type.should_equal Value_Type.Date
|
||||
|
||||
c2_prime = (c2 == c2).iif c2 c2
|
||||
c2_prime.value_type.should_be_a (Value_Type.Date_Time ...)
|
||||
|
||||
c3_prime = (c3 == c3).iif c3 c3
|
||||
c3_prime.value_type.should_equal Value_Type.Time
|
||||
|
||||
group_builder.specify "Should allow the creation of constant columns on a table with no rows" <|
|
||||
t = table_builder [["x", ["1", "2", "3"]]]
|
||||
|
@ -8,7 +8,7 @@ from Standard.Database.Errors import Unsupported_Database_Operation
|
||||
|
||||
from Standard.Test import all
|
||||
|
||||
from project.Common_Table_Operations.Util import run_default_backend
|
||||
from project.Common_Table_Operations.Util import run_default_backend, build_sorted_table
|
||||
|
||||
polyglot java import java.lang.Long as Java_Long
|
||||
|
||||
@ -20,32 +20,13 @@ type My_Type
|
||||
to_text : Text
|
||||
to_text self = "{{{MY Type [x="+self.x.to_text+"] }}}"
|
||||
|
||||
|
||||
type Data
|
||||
Value ~connection
|
||||
|
||||
setup create_connection_fn = Data.Value <|
|
||||
create_connection_fn Nothing
|
||||
|
||||
teardown self =
|
||||
self.connection.close
|
||||
|
||||
|
||||
add_specs suite_builder setup =
|
||||
prefix = setup.prefix
|
||||
materialize = setup.materialize
|
||||
supports_dates = setup.test_selection.date_time
|
||||
create_connection_fn = setup.create_connection_func
|
||||
supports_conversion_failure_reporting = setup.is_database.not
|
||||
suite_builder.group prefix+"Table/Column.cast - to text" group_builder->
|
||||
data = Data.setup create_connection_fn
|
||||
|
||||
group_builder.teardown <|
|
||||
data.teardown
|
||||
|
||||
table_builder cols =
|
||||
setup.table_builder cols connection=data.connection
|
||||
|
||||
table_builder = build_sorted_table setup
|
||||
suite_builder.group prefix+"(Conversion_Spec) Table/Column.cast - to text" group_builder->
|
||||
group_builder.specify "should allow to cast columns of various basic types to text" <|
|
||||
t = table_builder [["X", [1, 2, 3000]], ["Y", [True, False, True]], ["Z", [1.5, 0.125, -2.5]], ["W", ["a", "DEF", "a slightly longer text"]]]
|
||||
t2 = t.cast t.column_names Value_Type.Char
|
||||
@ -63,10 +44,10 @@ add_specs suite_builder setup =
|
||||
group_builder.specify "should allow to cast columns selecting columns by type" <|
|
||||
t = table_builder [["X", [1, 2, 3000]], ["Y", [True, False, True]], ["Z", [1.5, 0.125, -2.5]], ["W", ["a", "DEF", "a slightly longer text"]]]
|
||||
t2 = t.cast [..By_Type ..Integer] Value_Type.Char
|
||||
t2.at "X" . value_type . is_text . should_be_true
|
||||
t2.at "X" . value_type . should_be_a (Value_Type.Char ...)
|
||||
t2.at "Y" . value_type . is_text . should_be_false
|
||||
t2.at "Z" . value_type . is_text . should_be_false
|
||||
t2.at "W" . value_type . is_text . should_be_true
|
||||
t2.at "W" . value_type . should_be_a (Value_Type.Char ...)
|
||||
|
||||
if supports_dates then
|
||||
group_builder.specify "should allow to cast date/time columns to text" <|
|
||||
@ -76,10 +57,15 @@ add_specs suite_builder setup =
|
||||
t2.at "Y" . value_type . is_text . should_be_true
|
||||
t2.at "Z" . value_type . is_text . should_be_true
|
||||
|
||||
t2.at "X" . to_vector . should_equal ["2015-01-01", "2023-12-31"]
|
||||
t2.at "Y" . to_vector . should_equal ["01:02:03", "23:57:59"]
|
||||
m2 = t2.read
|
||||
m2.at "X" . to_vector . should_equal ["2015-01-01", "2023-12-31"]
|
||||
|
||||
# The nanoseconds suffix is optional - we strip it for the comparison.
|
||||
strip_ns s = s.replace "\.\d+$".to_regex ""
|
||||
m2.at "Y" . to_vector . map strip_ns . should_equal ["01:02:03", "23:57:59"]
|
||||
|
||||
# The particular format depends on the backend.
|
||||
vz = t2.at "Z" . to_vector
|
||||
vz = m2.at "Z" . to_vector
|
||||
vz.first . should_contain "2015-01-01"
|
||||
vz.first . should_contain "01:02:03"
|
||||
vz.second . should_contain "2023-11-30"
|
||||
@ -149,21 +135,11 @@ add_specs suite_builder setup =
|
||||
r2 = c1.cast (Value_Type.Char size=0 variable_length=True)
|
||||
r2.should_fail_with Illegal_Argument
|
||||
|
||||
suite_builder.group prefix+"Table/Column.cast - numeric" group_builder->
|
||||
data = Data.setup create_connection_fn
|
||||
|
||||
group_builder.teardown <|
|
||||
data.teardown
|
||||
|
||||
table_builder cols =
|
||||
setup.table_builder cols connection=data.connection
|
||||
|
||||
suite_builder.group prefix+"(Conversion_Spec) Table/Column.cast - numeric" group_builder->
|
||||
group_builder.specify "should allow to cast a boolean column to integer" <|
|
||||
t = table_builder [["X", [True, False, True]]]
|
||||
c = t.at "X" . cast Value_Type.Integer
|
||||
vt = c.value_type
|
||||
Test.with_clue "Expecting "+vt.to_display_text+" to be Integer. " <|
|
||||
vt.is_integer . should_be_true
|
||||
setup.expect_integer_type c
|
||||
c.to_vector . should_equal [1, 0, 1]
|
||||
|
||||
group_builder.specify "should allow to cast an integer column to floating point" <|
|
||||
@ -230,9 +206,7 @@ add_specs suite_builder setup =
|
||||
group_builder.specify "should allow to cast a floating point column to integer" <|
|
||||
t = table_builder [["X", [1.0001, 2.25, 4.0]]]
|
||||
c = t.at "X" . cast Value_Type.Integer
|
||||
vt = c.value_type
|
||||
Test.with_clue "Expecting "+vt.to_display_text+" to be Integer. " <|
|
||||
vt.is_integer . should_be_true
|
||||
setup.expect_integer_type c
|
||||
c.to_vector . should_equal [1, 2, 4]
|
||||
# Not reporting Lossy Conversion as converting floats to integers obviously truncates the value.
|
||||
Problems.assume_no_problems c
|
||||
@ -334,15 +308,7 @@ add_specs suite_builder setup =
|
||||
warning.to_display_text . should_contain too_big_double.to_text
|
||||
|
||||
if supports_dates then
|
||||
suite_builder.group prefix+"Table/Column.cast - date/time" group_builder->
|
||||
data = Data.setup create_connection_fn
|
||||
|
||||
group_builder.teardown <|
|
||||
data.teardown
|
||||
|
||||
table_builder cols =
|
||||
setup.table_builder cols connection=data.connection
|
||||
|
||||
suite_builder.group prefix+"(Conversion_Spec) Table/Column.cast - date/time" group_builder->
|
||||
group_builder.specify "should allow to get the Date part from a Date_Time" <|
|
||||
t = table_builder [["X", [Date_Time.new 2015 1 2 3 4 5, Date_Time.new 2023 12 31 23 56 59]]]
|
||||
c = t.at "X" . cast Value_Type.Date
|
||||
@ -367,15 +333,7 @@ add_specs suite_builder setup =
|
||||
expected_diff = Duration.between day1.to_date_time day2.to_date_time
|
||||
diff . should_equal expected_diff
|
||||
|
||||
suite_builder.group prefix+"Table/Column.cast - checking invariants" group_builder->
|
||||
data = Data.setup create_connection_fn
|
||||
|
||||
group_builder.teardown <|
|
||||
data.teardown
|
||||
|
||||
table_builder cols =
|
||||
setup.table_builder cols connection=data.connection
|
||||
|
||||
suite_builder.group prefix+"(Conversion_Spec) Table/Column.cast - checking invariants" group_builder->
|
||||
group_builder.specify "should report an error for unsupported conversions" <|
|
||||
t = table_builder [["X", [1, 2, 3]]]
|
||||
r1 = t.at "X" . cast Value_Type.Boolean
|
||||
@ -408,11 +366,11 @@ add_specs suite_builder setup =
|
||||
t = table_builder [["X", [Nothing, 1, 2, 3000]], ["Y", [Nothing, True, False, True]]]
|
||||
|
||||
c1 = t.at "X" . cast Value_Type.Char
|
||||
c1.value_type.is_text . should_be_true
|
||||
c1.value_type . should_be_a (Value_Type.Char ...)
|
||||
c1.to_vector . should_equal [Nothing, "1", "2", "3000"]
|
||||
|
||||
c2 = t.at "Y" . cast Value_Type.Integer
|
||||
c2.value_type . should_equal Value_Type.Integer
|
||||
setup.expect_integer_type c2
|
||||
c2.to_vector . should_equal [Nothing, 1, 0, 1]
|
||||
|
||||
group_builder.specify "should not lose the type after further operations were performed on the result" <|
|
||||
@ -421,13 +379,11 @@ add_specs suite_builder setup =
|
||||
c2 = t.at "Y" . cast Value_Type.Integer
|
||||
|
||||
c3 = c1 + '_suffix'
|
||||
c3.value_type.is_text . should_be_true
|
||||
c3.value_type . should_be_a (Value_Type.Char ...)
|
||||
c3.to_vector . should_equal ["1_suffix", "2_suffix", "3000_suffix"]
|
||||
|
||||
c4 = c2 + 1000
|
||||
vt4 = c4.value_type
|
||||
Test.with_clue "Expecting "+vt4.to_display_text+" to be Integer. " <|
|
||||
vt4.is_integer . should_be_true
|
||||
setup.expect_integer_type c4
|
||||
c4.to_vector . should_equal [1001, 1000, 1001]
|
||||
|
||||
group_builder.specify "should not lose the type after further operations were performed on the result, even if the first row is NULL" <|
|
||||
@ -436,13 +392,11 @@ add_specs suite_builder setup =
|
||||
c2 = t.at "Y" . cast Value_Type.Integer
|
||||
|
||||
c3 = c1 + '_suffix'
|
||||
c3.value_type.is_text . should_be_true
|
||||
c3.value_type.should_be_a (Value_Type.Char ...)
|
||||
c3.to_vector . should_equal [Nothing, "1_suffix", "2_suffix", "3000_suffix"]
|
||||
|
||||
c4 = c2 + 1000
|
||||
vt4 = c4.value_type
|
||||
Test.with_clue "Expecting "+vt4.to_display_text+" to be Integer. " <|
|
||||
vt4.is_integer . should_be_true
|
||||
setup.expect_integer_type c4
|
||||
c4.to_vector . should_equal [Nothing, 1001, 1000, 1001]
|
||||
|
||||
group_builder.specify 'Table.cast should cast the columns "in-place" and not reorder them' <|
|
||||
@ -450,7 +404,7 @@ add_specs suite_builder setup =
|
||||
t2 = t.cast ["Z", "Y"] Value_Type.Char
|
||||
t2.column_names . should_equal ["X", "Y", "Z", "A"]
|
||||
|
||||
t2.at "X" . value_type . is_integer . should_be_true
|
||||
setup.expect_integer_type <| t2.at "X"
|
||||
t2.at "Y" . value_type . is_text . should_be_true
|
||||
t2.at "Z" . value_type . is_text . should_be_true
|
||||
t2.at "A" . value_type . is_boolean . should_be_true
|
||||
@ -587,24 +541,16 @@ add_specs suite_builder setup =
|
||||
r3 = t.cast "X" (Value_Type.Unsupported_Data_Type "foobar" "foobar")
|
||||
r3.should_fail_with Illegal_Argument
|
||||
|
||||
suite_builder.group prefix+"Simple variant of Table/Column.parse in all backends" group_builder->
|
||||
data = Data.setup create_connection_fn
|
||||
|
||||
group_builder.teardown <|
|
||||
data.teardown
|
||||
|
||||
table_builder cols =
|
||||
setup.table_builder cols connection=data.connection
|
||||
|
||||
suite_builder.group prefix+"(Conversion_Spec) Simple variant of Table/Column.parse in all backends" group_builder->
|
||||
group_builder.specify "should be able to parse simple integers" <|
|
||||
t = table_builder [["X", ["42", "0", "-1"]]]
|
||||
|
||||
c1 = t.at "X" . parse Value_Type.Integer
|
||||
c1.value_type.is_integer . should_be_true
|
||||
setup.expect_integer_type c1
|
||||
c1.to_vector . should_equal [42, 0, -1]
|
||||
|
||||
c2 = t.parse ["X"] Value_Type.Integer . at "X"
|
||||
c2.value_type.is_integer . should_be_true
|
||||
setup.expect_integer_type c2
|
||||
c2.to_vector . should_equal [42, 0, -1]
|
||||
|
||||
group_builder.specify "should be able to parse simple floats" <|
|
||||
@ -655,7 +601,7 @@ add_specs suite_builder setup =
|
||||
|
||||
t1 = t.parse ["X", "Y"] Value_Type.Integer error_on_missing_columns=False
|
||||
t1.at "X" . to_vector . should_equal [42, 0, -1]
|
||||
t1.at "X" . value_type . is_integer . should_be_true
|
||||
setup.expect_integer_type <| t1.at "X"
|
||||
Problems.expect_warning Missing_Input_Columns t1
|
||||
|
||||
t2 = t.parse ["Y"] Value_Type.Integer error_on_missing_columns=False
|
||||
@ -666,57 +612,25 @@ add_specs suite_builder setup =
|
||||
r3.should_fail_with Missing_Input_Columns
|
||||
r3.catch.criteria . should_equal ["Y"]
|
||||
|
||||
if setup.is_database then suite_builder.group prefix+"Table/Column auto value type" group_builder->
|
||||
data = Data.setup create_connection_fn
|
||||
|
||||
group_builder.teardown <|
|
||||
data.teardown
|
||||
|
||||
table_builder cols =
|
||||
setup.table_builder cols connection=data.connection
|
||||
|
||||
if setup.is_database then suite_builder.group prefix+"(Conversion_Spec) Table/Column auto value type" group_builder->
|
||||
group_builder.specify "should report unsupported" <|
|
||||
t = table_builder [["X", [1, 2, 3]]]
|
||||
t.auto_cast . should_fail_with Unsupported_Database_Operation
|
||||
t.at "X" . auto_cast . should_fail_with Unsupported_Database_Operation
|
||||
|
||||
# The in-memory functionality of `expand_column` is tested in test/Table_Tests/src/In_Memory/Table_Conversion_Spec.enso
|
||||
if setup.is_database then suite_builder.group prefix+"Table.expand_column" group_builder->
|
||||
data = Data.setup create_connection_fn
|
||||
|
||||
group_builder.teardown <|
|
||||
data.teardown
|
||||
|
||||
table_builder cols =
|
||||
setup.table_builder cols connection=data.connection
|
||||
|
||||
if setup.is_database then suite_builder.group prefix+"(Conversion_Spec) Table.expand_column" group_builder->
|
||||
group_builder.specify "should report unsupported" <|
|
||||
table = table_builder [["aaa", [1, 2]], ["bbb", [3, 4]], ["ccc", [5, 6]]]
|
||||
table.expand_column "bbb" . should_fail_with Unsupported_Database_Operation
|
||||
|
||||
# The in-memory functionality of `expand_to_rows` is tested in test/Table_Tests/src/In_Memory/Table_Conversion_Spec.enso
|
||||
if setup.is_database then suite_builder.group prefix+"Table.expand_to_rows" group_builder->
|
||||
data = Data.setup create_connection_fn
|
||||
|
||||
group_builder.teardown <|
|
||||
data.teardown
|
||||
|
||||
table_builder cols =
|
||||
setup.table_builder cols connection=data.connection
|
||||
|
||||
if setup.is_database then suite_builder.group prefix+"(Conversion_Spec) Table.expand_to_rows" group_builder->
|
||||
group_builder.specify "should report unsupported" <|
|
||||
table = table_builder [["aaa", [1, 2]], ["bbb", [3, 4]], ["ccc", [5, 6]]]
|
||||
table.expand_to_rows "bbb" . should_fail_with Unsupported_Database_Operation
|
||||
|
||||
if setup.is_database.not then suite_builder.group prefix+"Table/Column auto value type" group_builder->
|
||||
data = Data.setup create_connection_fn
|
||||
|
||||
group_builder.teardown <|
|
||||
data.teardown
|
||||
|
||||
table_builder cols =
|
||||
setup.table_builder cols connection=data.connection
|
||||
|
||||
if setup.is_database.not then suite_builder.group prefix+"(Conversion_Spec) Table/Column auto value type" group_builder->
|
||||
group_builder.specify "should allow to narrow down types of a Mixed column" <|
|
||||
[True, False].each shrink_types->
|
||||
mixer = My_Type.Value 1
|
||||
@ -736,7 +650,7 @@ add_specs suite_builder setup =
|
||||
t2 = t1.auto_cast shrink_types=shrink_types
|
||||
# Depending on shrink_types value the size of the Char/Integer types may vary - exact details tested elsewhere.
|
||||
t2.at "strs" . value_type . should_be_a (Value_Type.Char ...)
|
||||
t2.at "ints" . value_type . should_be_a (Value_Type.Integer ...)
|
||||
setup.expect_integer_type <| t2.at "ints"
|
||||
t2.at "floats" . value_type . should_equal Value_Type.Float
|
||||
t2.at "mix" . value_type . should_equal Value_Type.Mixed
|
||||
t2.at "dates" . value_type . should_equal Value_Type.Date
|
||||
|
@ -7,24 +7,21 @@ from Standard.Table.Errors import all
|
||||
import Standard.Table.Expression.Expression_Error
|
||||
|
||||
from Standard.Database import all
|
||||
import Standard.Database.Internal.Internals_Access
|
||||
from Standard.Database.Errors import Integrity_Error
|
||||
|
||||
from Standard.Test import all
|
||||
|
||||
|
||||
from project.Common_Table_Operations.Util import run_default_backend
|
||||
import project.Common_Table_Operations.Util
|
||||
|
||||
main filter=Nothing = run_default_backend add_specs filter
|
||||
|
||||
type Data
|
||||
Value ~data
|
||||
Value ~table
|
||||
|
||||
connection self = self.data.at 0
|
||||
table self = self.data.at 1
|
||||
|
||||
setup create_connection_fn table_builder = Data.Value <|
|
||||
connection = create_connection_fn Nothing
|
||||
table =
|
||||
setup table_builder = Data.Value <|
|
||||
col1 = ["foo", [1,2,3]]
|
||||
col2 = ["bar", [4,5,6]]
|
||||
col3 = ["Baz", [7,8,9]]
|
||||
@ -32,55 +29,30 @@ type Data
|
||||
col5 = ["foo 2", [13,14,15]]
|
||||
col6 = ["ab.+123", [16,17,18]]
|
||||
col7 = ["abcd123", [19,20,21]]
|
||||
table_builder [col1, col2, col3, col4, col5, col6, col7] connection=connection
|
||||
[connection, table]
|
||||
|
||||
teardown self =
|
||||
self.connection.close
|
||||
|
||||
table_builder [col1, col2, col3, col4, col5, col6, col7]
|
||||
|
||||
type Rows_Data
|
||||
Value ~data
|
||||
|
||||
connection self = self.data.at 0
|
||||
table self = self.data.at 1
|
||||
|
||||
setup create_connection_fn table_builder = Rows_Data.Value <|
|
||||
connection = create_connection_fn Nothing
|
||||
table = table_builder [["X", [1, 2, 3, 4]], ["Y", [5, 6, 7, 8]], ["Z", ["A", "B", "C", "D"]]] connection=connection
|
||||
[connection, table]
|
||||
|
||||
teardown self =
|
||||
self.connection.close
|
||||
Value ~table
|
||||
|
||||
setup table_builder = Rows_Data.Value <|
|
||||
table_builder [["X", [1, 2, 3, 4]], ["Y", [5, 6, 7, 8]], ["Z", ["A", "B", "C", "D"]]]
|
||||
|
||||
type Read_Data
|
||||
Value ~data
|
||||
|
||||
connection self = self.data.at 0
|
||||
t_big self = self.data.at 1
|
||||
t_small self = self.data.at 2
|
||||
|
||||
setup create_connection_fn table_builder = Read_Data.Value <|
|
||||
connection = create_connection_fn Nothing
|
||||
t_big = table_builder [["X", (0.up_to 1500)]] connection=connection
|
||||
t_small = table_builder [["X", (0.up_to 10)]] connection=connection
|
||||
[connection, t_big, t_small]
|
||||
|
||||
teardown self =
|
||||
self.connection.close
|
||||
t_big self = self.data.at 0
|
||||
t_small self = self.data.at 1
|
||||
|
||||
setup table_builder = Read_Data.Value <|
|
||||
t_big = table_builder [["X", (0.up_to 1500)]]
|
||||
t_small = table_builder [["X", (0.up_to 10)]]
|
||||
[t_big, t_small]
|
||||
|
||||
add_specs suite_builder setup =
|
||||
prefix = setup.prefix
|
||||
create_connection_fn = setup.create_connection_func
|
||||
|
||||
suite_builder.group prefix+"Table.at" group_builder->
|
||||
data = Data.setup create_connection_fn setup.table_builder
|
||||
|
||||
group_builder.teardown <|
|
||||
data.teardown
|
||||
|
||||
build_sorted_table = Util.build_sorted_table setup
|
||||
data = Data.setup build_sorted_table
|
||||
suite_builder.group prefix+"(Core_Spec) Table.at" group_builder->
|
||||
group_builder.specify "should allow selecting columns by name" <|
|
||||
column_1 = data.table.at "bar"
|
||||
column_1.name . should_equal "bar"
|
||||
@ -114,12 +86,7 @@ add_specs suite_builder setup =
|
||||
group_builder.specify "should fail with Type Error is not an Integer or Text" <|
|
||||
Test.expect_panic_with (data.table.at (Pair.new 1 2)) Type_Error
|
||||
|
||||
suite_builder.group prefix+"Table.get" group_builder->
|
||||
data = Data.setup create_connection_fn setup.table_builder
|
||||
|
||||
group_builder.teardown <|
|
||||
data.teardown
|
||||
|
||||
suite_builder.group prefix+"(Core_Spec) Table.get" group_builder->
|
||||
group_builder.specify "should allow selecting columns by name" <|
|
||||
column_1 = data.table.get "bar"
|
||||
column_1.name . should_equal "bar"
|
||||
@ -147,15 +114,7 @@ add_specs suite_builder setup =
|
||||
group_builder.specify "should fail with Type Error is not an Integer or Text" <|
|
||||
Test.expect_panic_with (data.table.at (Pair.new 1 2)) Type_Error
|
||||
|
||||
suite_builder.group prefix+"Table.set" group_builder->
|
||||
data = Data.setup create_connection_fn setup.table_builder
|
||||
|
||||
group_builder.teardown <|
|
||||
data.teardown
|
||||
|
||||
table_builder cols =
|
||||
setup.table_builder cols connection=data.connection
|
||||
|
||||
suite_builder.group prefix+"(Core_Spec) Table.set" group_builder->
|
||||
group_builder.specify "should allow adding a column" <|
|
||||
bar2 = data.table.get "bar" . rename "bar2"
|
||||
t2 = data.table.set bar2
|
||||
@ -185,7 +144,7 @@ add_specs suite_builder setup =
|
||||
data.table.set foo set_mode=Set_Mode.Add . should_fail_with Existing_Column
|
||||
|
||||
group_builder.specify "should not affect existing columns that depended on the old column being replaced" <|
|
||||
t1 = table_builder [["X", [1,2,3]]]
|
||||
t1 = build_sorted_table [["X", [1,2,3]]]
|
||||
t2 = t1.set (t1.at "X" * 100) as="Y"
|
||||
t3 = t2.set (expr "[X] + 10") as="Z"
|
||||
t4 = t3.set (Simple_Expression.Simple_Expr (Column_Ref.Name "X") (Simple_Calculation.Add 1000)) as="X"
|
||||
@ -195,13 +154,13 @@ add_specs suite_builder setup =
|
||||
t4.at "Z" . to_vector . should_equal [11, 12, 13]
|
||||
|
||||
group_builder.specify "should gracefully handle expression failures" <|
|
||||
t1 = table_builder [["X", [1,2,3]]]
|
||||
t1 = build_sorted_table [["X", [1,2,3]]]
|
||||
t1.set (expr "[unknown] + 10") as="Z" . should_fail_with No_Such_Column
|
||||
t1.set (expr "[[[[") . should_fail_with Expression_Error
|
||||
t1.set (expr "[[[[") . catch . should_be_a Expression_Error.Syntax_Error
|
||||
|
||||
group_builder.specify "should forward expression problems" <|
|
||||
t1 = table_builder [["X", [1.5, 2.0, 0.0]]]
|
||||
t1 = build_sorted_table [["X", [1.5, 2.0, 0.0]]]
|
||||
|
||||
r1 = t1.set (expr "([X] == 2) || ([X] + 0.5 == 2)") on_problems=..Ignore
|
||||
Problems.assume_no_problems r1
|
||||
@ -223,7 +182,7 @@ add_specs suite_builder setup =
|
||||
Problems.test_problem_handling action2 problems2 tester2
|
||||
|
||||
group_builder.specify "should gracefully handle columns from different backends" <|
|
||||
t1 = table_builder [["A", [1, 2, 3]]]
|
||||
t1 = build_sorted_table [["A", [1, 2, 3]]]
|
||||
alternative_connection = Database.connect (SQLite.In_Memory)
|
||||
t0 = (Table.new [["X", [3, 2, 1]]]).select_into_database_table alternative_connection "T0" temporary=True
|
||||
c0 = t0.at "X"
|
||||
@ -233,42 +192,25 @@ add_specs suite_builder setup =
|
||||
True -> r1.should_fail_with Integrity_Error
|
||||
False -> r1.should_fail_with Illegal_Argument
|
||||
|
||||
suite_builder.group prefix+"Table.column_names" group_builder->
|
||||
data = Data.setup create_connection_fn setup.table_builder
|
||||
|
||||
group_builder.teardown <|
|
||||
data.teardown
|
||||
|
||||
table_builder cols =
|
||||
setup.table_builder cols connection=data.connection
|
||||
|
||||
suite_builder.group prefix+"(Core_Spec) Table.column_names" group_builder->
|
||||
group_builder.specify "should return the names of all columns" <|
|
||||
data.table.column_names . should_equal ["foo", "bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123"]
|
||||
|
||||
group_builder.specify "should allow weird column names in all backends" <|
|
||||
columns = weird_names.map_with_index ix-> name->
|
||||
[name, [100+ix, 2, 3]]
|
||||
table = table_builder columns
|
||||
table = build_sorted_table columns
|
||||
table.column_names . should_equal weird_names
|
||||
|
||||
weird_names.map_with_index ix-> name->
|
||||
table.at name . to_vector . should_equal [100+ix, 2, 3]
|
||||
|
||||
suite_builder.group prefix+"Table.column_count" group_builder->
|
||||
data = Data.setup create_connection_fn setup.table_builder
|
||||
|
||||
group_builder.teardown <|
|
||||
data.teardown
|
||||
|
||||
suite_builder.group prefix+"(Core_Spec) Table.column_count" group_builder->
|
||||
group_builder.specify "should allow getting the column count" <|
|
||||
data.table.column_count . should_equal 7
|
||||
|
||||
suite_builder.group prefix+"Table.rows" group_builder->
|
||||
data = Rows_Data.setup create_connection_fn setup.table_builder
|
||||
|
||||
group_builder.teardown <|
|
||||
data.teardown
|
||||
|
||||
suite_builder.group prefix+"(Core_Spec) Table.rows" group_builder->
|
||||
data = Rows_Data.setup build_sorted_table
|
||||
group_builder.specify "should allow to get a Vector of Table rows" <|
|
||||
rows = data.table.rows
|
||||
rows.length . should_equal 4
|
||||
@ -329,12 +271,8 @@ add_specs suite_builder setup =
|
||||
data.table.rows . at 0 . at "unknown" . should_fail_with No_Such_Column
|
||||
|
||||
|
||||
suite_builder.group prefix+"Table.read" group_builder->
|
||||
data = Read_Data.setup create_connection_fn setup.table_builder
|
||||
|
||||
group_builder.teardown <|
|
||||
data.teardown
|
||||
|
||||
suite_builder.group prefix+"(Core_Spec) Table.read" group_builder->
|
||||
data = Read_Data.setup build_sorted_table
|
||||
has_default_row_limit = setup.is_database
|
||||
|
||||
group_builder.specify "should have a row limit by default and warn about it" <|
|
||||
@ -419,23 +357,24 @@ add_specs suite_builder setup =
|
||||
Problems.assume_no_problems r5
|
||||
|
||||
if setup.is_database then group_builder.specify "should allow similar API on Connection.read" <|
|
||||
data.connection.query data.t_big.name . row_count . should_equal 1500
|
||||
connection = Internals_Access.get_connection data.t_big
|
||||
connection.query data.t_big.name . row_count . should_equal 1500
|
||||
|
||||
t1 = data.connection.read data.t_big.name
|
||||
t1 = connection.read data.t_big.name
|
||||
t1.row_count . should_equal 1000
|
||||
w1 = Problems.expect_only_warning Not_All_Rows_Downloaded t1
|
||||
w1.max_rows . should_equal 1000
|
||||
|
||||
t2 = data.connection.read data.t_big.name (..First_With_Warning 42)
|
||||
t2 = connection.read data.t_big.name (..First_With_Warning 42)
|
||||
t2.row_count . should_equal 42
|
||||
w2 = Problems.expect_only_warning Not_All_Rows_Downloaded t2
|
||||
w2.max_rows . should_equal 42
|
||||
|
||||
t3 = data.connection.read data.t_big.name (..All_Rows)
|
||||
t3 = connection.read data.t_big.name (..All_Rows)
|
||||
t3.row_count . should_equal 1500
|
||||
Problems.assume_no_problems t3
|
||||
|
||||
t4 = data.connection.read data.t_big.name (..First 1000)
|
||||
t4 = connection.read data.t_big.name (..First 1000)
|
||||
t4.row_count . should_equal 1000
|
||||
Problems.assume_no_problems t4
|
||||
|
||||
|
@ -17,33 +17,20 @@ main filter=Nothing = run_default_backend add_specs filter
|
||||
type Data
|
||||
Value ~data
|
||||
|
||||
connection self = self.data.at 0
|
||||
table self = self.data.at 1
|
||||
table2 self = self.data.at 2
|
||||
|
||||
setup create_connection_fn table_builder = Data.Value <|
|
||||
connection = create_connection_fn Nothing
|
||||
table = table_builder [["Key", ["x", "x", "x", "x", "y", "y", "y", "z", "z"]], ["Value", [1, 2, 3, 4, 5, 6, 7, 8, 9]]] connection=connection
|
||||
table2 = table_builder [["Group", ["A","B","A","B","A","B","A","B","A"]], ["Key", ["x", "x", "x", "x", "y", "y", "y", "z", "z"]], ["Value", [1, 2, 3, 4, 5, 6, 7, 8, 9]]] connection=connection
|
||||
[connection, table, table2]
|
||||
|
||||
teardown self =
|
||||
self.connection.close
|
||||
table self = self.data.at 0
|
||||
table2 self = self.data.at 1
|
||||
|
||||
setup table_builder = Data.Value <|
|
||||
table = table_builder [["Key", ["x", "x", "x", "x", "y", "y", "y", "z", "z"]], ["Value", [1, 2, 3, 4, 5, 6, 7, 8, 9]]]
|
||||
table2 = table_builder [["Group", ["A","B","A","B","A","B","A","B","A"]], ["Key", ["x", "x", "x", "x", "y", "y", "y", "z", "z"]], ["Value", [1, 2, 3, 4, 5, 6, 7, 8, 9]]]
|
||||
[table, table2]
|
||||
|
||||
add_specs suite_builder setup =
|
||||
prefix = setup.prefix
|
||||
create_connection_fn = setup.create_connection_func
|
||||
db_todo = if setup.is_database.not then Nothing else "Table.cross_tab is not implemented yet in Database."
|
||||
table_builder = setup.table_builder
|
||||
suite_builder.group prefix+"Table.cross_tab" pending=db_todo group_builder->
|
||||
data = Data.setup create_connection_fn setup.table_builder
|
||||
|
||||
group_builder.teardown <|
|
||||
data.teardown
|
||||
|
||||
table_builder cols =
|
||||
setup.table_builder cols connection=data.connection
|
||||
|
||||
data = Data.setup setup.table_builder
|
||||
group_builder.specify "should cross_tab counts by default using first column as names" <|
|
||||
t1 = data.table.cross_tab [] "Key"
|
||||
t1.column_names . should_equal ["x", "y", "z"]
|
||||
@ -246,9 +233,9 @@ add_specs suite_builder setup =
|
||||
table = table_builder [["Text", ["a", "b"]], ["Int", [1, 2]], ["Float", [1.1, 2.2]]]
|
||||
t1 = table.cross_tab ["Int"] "Text"
|
||||
t1.column_names . should_equal ["Int", "a", "b"]
|
||||
t1.at "Int" . value_type . is_integer . should_be_true
|
||||
t1.at "a" . value_type . is_integer . should_be_true
|
||||
t1.at "b" . value_type . is_integer . should_be_true
|
||||
setup.expect_integer_type <| t1.at "Int"
|
||||
setup.expect_integer_type <| t1.at "a"
|
||||
setup.expect_integer_type <| t1.at "b"
|
||||
|
||||
t2 = table.cross_tab [] "Int" values=[Aggregate_Column.Average "Float", Aggregate_Column.Concatenate "Text"] . sort_columns
|
||||
t2.column_names . should_equal ["1 Average Float", "1 Concatenate Text", "2 Average Float", "2 Concatenate Text"]
|
||||
|
@ -1,54 +1,51 @@
|
||||
from Standard.Base import all
|
||||
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
|
||||
|
||||
from Standard.Table import Value_Type
|
||||
from Standard.Table import Table, Value_Type
|
||||
from Standard.Table.Errors import Inexact_Type_Coercion, Invalid_Value_Type
|
||||
|
||||
from Standard.Database.Errors import Unsupported_Database_Operation
|
||||
import Standard.Database.Column_Description.Column_Description
|
||||
import Standard.Database.Internal.Internals_Access
|
||||
# Import the `update_rows` extension method.
|
||||
import Standard.Database.Extensions.Upload_Database_Table
|
||||
|
||||
from Standard.Test import all
|
||||
|
||||
from project.Common_Table_Operations.Util import all
|
||||
import project.Common_Table_Operations.Util
|
||||
|
||||
main filter=Nothing = run_default_backend add_specs filter
|
||||
|
||||
type Data
|
||||
Value ~data
|
||||
|
||||
connection self = self.data.at 0
|
||||
dates self = self.data.at 1
|
||||
times self = self.data.at 2
|
||||
datetimes self = self.data.at 3
|
||||
dates self = self.data.at 0
|
||||
times self = self.data.at 1
|
||||
datetimes self = self.data.at 2
|
||||
|
||||
setup create_connection_fn table_builder = Data.Value <|
|
||||
connection = create_connection_fn Nothing
|
||||
dates = table_builder [["A", [Date.new 2020 12 31, Date.new 2024 2 29, Date.new 1990 1 1, Nothing, Date.new 2024 5 26]], ["X", [2020, 29, 1, 100, 99]]] connection=connection
|
||||
times = table_builder [["A", [Time_Of_Day.new 23 59 59 millisecond=567 nanosecond=123, Time_Of_Day.new 2 30 44 nanosecond=1002000, Time_Of_Day.new 0 0 0, Nothing]], ["X", [2020, 29, 1, 100]]] connection=connection
|
||||
datetimes = table_builder [["A", [Date_Time.new 2020 12 31 23 59 59 millisecond=567 nanosecond=123, Date_Time.new 2024 2 29 2 30 44 nanosecond=1002000, Date_Time.new 1990 1 1 0 0 0, Nothing, Date_Time.new 2024 5 26 11 43 22]], ["X", [2020, 29, 1, 100, 99]]] connection=connection
|
||||
[connection, dates, times, datetimes]
|
||||
setup table_builder = Data.Value <|
|
||||
dates = table_builder [["A", [Date.new 2020 12 31, Date.new 2024 2 29, Date.new 1990 1 1, Nothing, Date.new 2024 5 26]], ["X", [2020, 29, 1, 100, 99]]]
|
||||
times = table_builder [["A", [Time_Of_Day.new 23 59 59 millisecond=567 nanosecond=123, Time_Of_Day.new 2 30 44 nanosecond=1002000, Time_Of_Day.new 0 0 0, Nothing]], ["X", [2020, 29, 1, 100]]]
|
||||
datetimes = table_builder [["A", [Date_Time.new 2020 12 31 23 59 59 millisecond=567 nanosecond=123, Date_Time.new 2024 2 29 2 30 44 nanosecond=1002000, Date_Time.new 1990 1 1 0 0 0, Nothing, Date_Time.new 2024 5 26 11 43 22]], ["X", [2020, 29, 1, 100, 99]]]
|
||||
[dates, times, datetimes]
|
||||
|
||||
teardown self =
|
||||
self.connection.close
|
||||
|
||||
type Lazy_Ref
|
||||
Value ~get
|
||||
|
||||
add_specs suite_builder setup =
|
||||
prefix = setup.prefix
|
||||
create_connection_fn = setup.create_connection_func
|
||||
|
||||
table_builder = Util.build_sorted_table setup
|
||||
data = Data.setup table_builder
|
||||
pending_datetime = if setup.test_selection.date_time.not then "Date/Time operations are not supported by this backend."
|
||||
|
||||
suite_builder.group prefix+"Date-Time support" pending=pending_datetime group_builder->
|
||||
data = Data.setup create_connection_fn setup.table_builder
|
||||
|
||||
group_builder.teardown <|
|
||||
data.teardown
|
||||
|
||||
table_builder cols =
|
||||
setup.table_builder cols connection=data.connection
|
||||
|
||||
group_builder.specify "should allow to create Table with Date columns and round-trip them back to Enso" <|
|
||||
d = Date.new 2020 10 24
|
||||
table = table_builder [["A", [d]], ["X", [123]]]
|
||||
table = setup.table_builder [["A", [d]], ["X", [123]]]
|
||||
table.at "A" . value_type . should_equal Value_Type.Date
|
||||
table.at "A" . to_vector . should_equal [d]
|
||||
table.at "X" . to_vector . should_equal [123]
|
||||
@ -56,7 +53,7 @@ add_specs suite_builder setup =
|
||||
group_builder.specify "should allow to create Table with Time Of Day columns and round-trip them back to Enso" <|
|
||||
# We do not check nanoseconds, as nano support may be optional.
|
||||
t = Time_Of_Day.new hour=12 minute=3 second=5 millisecond=6 microsecond=7
|
||||
table = table_builder [["B", [t]], ["X", [123]]]
|
||||
table = setup.table_builder [["B", [t]], ["X", [123]]]
|
||||
table.at "B" . value_type . should_equal Value_Type.Time
|
||||
table.at "B" . to_vector . should_equal [t]
|
||||
table.at "X" . to_vector . should_equal [123]
|
||||
@ -75,15 +72,32 @@ add_specs suite_builder setup =
|
||||
table.at "C" . to_vector . should_equal_tz_agnostic dates
|
||||
table.at "X" . to_vector . should_equal xs
|
||||
|
||||
# Currently we support Value_Type.Date_Time with_timezone=False in Database backends, not in-memory.
|
||||
# In this test we create the table using the connection, so it only can run on Database.
|
||||
# This is the only way to enforce uploading Date Times without TZ without relying on Casting (which works on another level so it would not be a good test of the upload mechanism).
|
||||
if setup.is_database && setup.test_selection.supports_date_time_without_timezone then group_builder.specify "should allow to create Table with Date Time (without timezone) columns and round-trip them back to Enso, preserving the date/time parts, possibly losing the timezone/offset" <|
|
||||
dt1 = Date_Time.new 2020 10 24 1 2 3 millisecond=4 microsecond=5
|
||||
offset_zone = Time_Zone.new hours=(-11) minutes=(-30)
|
||||
dt2 = Date_Time.new 2021 11 23 1 2 3 zone=offset_zone
|
||||
named_zone = Time_Zone.parse "US/Hawaii"
|
||||
dt3 = Date_Time.new 2019 11 23 4 5 6 zone=named_zone
|
||||
|
||||
connection = Internals_Access.get_connection data.dates
|
||||
db_table = connection.create_table "dates-test-"+Date_Time.now.to_text [Column_Description.Value "C" (Value_Type.Date_Time with_timezone=False), Column_Description.Value "X" Value_Type.Integer] temporary=True on_problems=Problem_Behavior.Ignore
|
||||
Problems.assume_no_problems db_table
|
||||
|
||||
dates = [dt1, dt2, dt3]
|
||||
xs = [1, 2, 3]
|
||||
local_table_with_tz = Table.new [["C", dates], ["X", xs]]
|
||||
db_table.update_rows local_table_with_tz key_columns=["X"] . should_succeed
|
||||
ordered = db_table.sort "X"
|
||||
|
||||
ordered.at "X" . to_vector . should_equal xs
|
||||
ordered.at "C" . value_type . should_equal (Value_Type.Date_Time with_timezone=False)
|
||||
# We now look at the values ignoring the time zone / offset - only from local time perspective.
|
||||
ordered.at "C" . to_vector . map as_local_date_time_repr . should_equal ["2020-10-24 01:02:03.004005", "2021-11-23 01:02:03.0", "2019-11-23 04:05:06.0"]
|
||||
|
||||
suite_builder.group prefix+"Date-Time operations" pending=pending_datetime group_builder->
|
||||
data = Data.setup create_connection_fn setup.table_builder
|
||||
|
||||
group_builder.teardown <|
|
||||
data.teardown
|
||||
|
||||
table_builder cols =
|
||||
setup.table_builder cols connection=data.connection
|
||||
|
||||
group_builder.specify "should allow to get the year/month/day/day_of_year/day_of_week of a Date" <|
|
||||
t = data.dates
|
||||
a = t.at "A"
|
||||
@ -94,8 +108,8 @@ add_specs suite_builder setup =
|
||||
a.day_of_week . to_vector . should_equal [4, 4, 1, Nothing, 7]
|
||||
[a.year, a.month, a.day, a.day_of_year, a.day_of_week].each c->
|
||||
Test.with_clue "The column "+c.name+" value type ("+c.value_type.to_display_text+") should be an integer: " <|
|
||||
c.value_type.is_integer.should_be_true
|
||||
c.value_type.is_integer.should_be_true
|
||||
setup.expect_integer_type c
|
||||
setup.expect_integer_type c
|
||||
|
||||
((a.year) == (t.at "X")).to_vector . should_equal [True, False, False, Nothing, False]
|
||||
((a.month) == (t.at "X")).to_vector . should_equal [False, False, True, Nothing, False]
|
||||
@ -111,7 +125,7 @@ add_specs suite_builder setup =
|
||||
a.day_of_week . to_vector . should_equal [4, 4, 1, Nothing, 7]
|
||||
[a.year, a.month, a.day, a.day_of_year, a.day_of_week].each c->
|
||||
Test.with_clue "The column "+c.name+" value type ("+c.value_type.to_display_text+") should be an integer: " <|
|
||||
c.value_type.is_integer.should_be_true
|
||||
setup.expect_integer_type c
|
||||
|
||||
((a.year) == (t.at "X")).to_vector . should_equal [True, False, False, Nothing, False]
|
||||
((a.month) == (t.at "X")).to_vector . should_equal [False, False, True, Nothing, False]
|
||||
@ -121,7 +135,7 @@ add_specs suite_builder setup =
|
||||
t = table_builder [["A", [Date.new 2020 12 31, Date.new 2024 2 29, Date.new 1990 1 1, Nothing]], ["X", [0, 2, 1, 100]], ["B", [Date_Time.new 2020 10 31 23 59 59, Date_Time.new 2024 4 29 2 30 44, Date_Time.new 1990 10 1 0 0 0, Nothing]]]
|
||||
c = t.evaluate_expression "year([A]) + [X] + day([A]) * month([B])"
|
||||
Test.with_clue "The column "+c.name+" value type ("+c.value_type.to_display_text+") should be an integer: " <|
|
||||
c.value_type.is_integer.should_be_true
|
||||
setup.expect_integer_type c
|
||||
c.to_vector . should_equal [(2020 + 0 + 31 * 10), (2024 + 2 + 29 * 4), (1990 + 1 + 1 * 10), Nothing]
|
||||
|
||||
group_builder.specify "should allow to get hour/minute/second of a Time_Of_Day" <|
|
||||
@ -136,7 +150,7 @@ add_specs suite_builder setup =
|
||||
|
||||
[a.hour, a.minute, a.second, a.date_part Time_Period.Hour, a.date_part Time_Period.Minute, a.date_part Time_Period.Second].each c->
|
||||
Test.with_clue "The column "+c.name+" value type ("+c.value_type.to_display_text+") should be an integer: " <|
|
||||
c.value_type.is_integer.should_be_true
|
||||
setup.expect_integer_type c
|
||||
|
||||
group_builder.specify "should allow to get hour/minute/second of a Date_Time" <|
|
||||
a = data.datetimes.at "A"
|
||||
@ -150,7 +164,7 @@ add_specs suite_builder setup =
|
||||
|
||||
[a.hour, a.minute, a.second, a.date_part Time_Period.Hour, a.date_part Time_Period.Minute, a.date_part Time_Period.Second].each c->
|
||||
Test.with_clue "The column "+c.name+" value type ("+c.value_type.to_display_text+") should be an integer: " <|
|
||||
c.value_type.is_integer.should_be_true
|
||||
setup.expect_integer_type c
|
||||
|
||||
group_builder.specify "should allow to get millisecond/nanosecond of Time_Of_Day through date_part" <|
|
||||
a = data.times.at "A"
|
||||
@ -168,7 +182,7 @@ add_specs suite_builder setup =
|
||||
testCases = testCases + (a.date_part Time_Period.Nanosecond)
|
||||
testCases.each c->
|
||||
Test.with_clue "The column "+c.name+" value type ("+c.value_type.to_display_text+") should be an integer: " <|
|
||||
c.value_type.is_integer.should_be_true
|
||||
setup.expect_integer_type c
|
||||
|
||||
group_builder.specify "should allow to get week/quarter of Date through date_part" <|
|
||||
a = data.dates.at "A"
|
||||
@ -177,7 +191,7 @@ add_specs suite_builder setup =
|
||||
|
||||
[a.date_part Date_Period.Quarter, a.date_part Date_Period.Week].each c->
|
||||
Test.with_clue "The column "+c.name+" value type ("+c.value_type.to_display_text+") should be an integer: " <|
|
||||
c.value_type.is_integer.should_be_true
|
||||
setup.expect_integer_type c
|
||||
|
||||
group_builder.specify "should allow to get various date_part of Date_Time" <|
|
||||
a = data.datetimes.at "A"
|
||||
@ -196,7 +210,7 @@ add_specs suite_builder setup =
|
||||
testCases = testCases + (a.date_part Time_Period.Nanosecond)
|
||||
testCases.each c->
|
||||
Test.with_clue "The column "+c.name+" value type ("+c.value_type.to_display_text+") should be an integer: " <|
|
||||
c.value_type.is_integer.should_be_true
|
||||
setup.expect_integer_type c
|
||||
|
||||
|
||||
group_builder.specify "should allow to compare dates" <|
|
||||
@ -275,45 +289,44 @@ add_specs suite_builder setup =
|
||||
((t.at "Z") - (t.at "Y")) . should_fail_with Invalid_Value_Type
|
||||
((t.at "Z") - (Date.new 2021 11 3)) . should_fail_with Invalid_Value_Type
|
||||
|
||||
group_builder.specify "should allow computing a SQL-like difference" <|
|
||||
t1 = table_builder [["X", [Date.new 2021 11 3]], ["Y", [Date.new 2021 12 5]]]
|
||||
|
||||
zone = Time_Zone.parse "Europe/Warsaw"
|
||||
dates1 = Lazy_Ref.Value <|
|
||||
dates = [["d_X", [Date.new 2021 11 3]], ["d_Y", [Date.new 2021 12 5]], ["X1", [Date.new 2021 01 02]]]
|
||||
date_times = [["dt_X", [Date_Time.new 2021 11 3 10 15 0 zone=zone]], ["dt_Y", [Date_Time.new 2021 12 5 12 30 20 zone=zone]]]
|
||||
times = [["t_X", [Time_Of_Day.new 10 15 0]], ["t_Y", [Time_Of_Day.new 12 30 20]]]
|
||||
table_builder (dates + date_times + times)
|
||||
group_builder.specify "should allow computing a SQL-like difference (date_diff)" <|
|
||||
t1 = dates1.get.select_columns ["d_X", "d_Y", "X1"] reorder=True . rename_columns ["X", "Y", "X1"]
|
||||
(t1.at "X").date_diff (t1.at "Y") Date_Period.Day . to_vector . should_equal [32]
|
||||
(t1.at "Y").date_diff (t1.at "X") Date_Period.Day . to_vector . should_equal [-32]
|
||||
(t1.at "X").date_diff (Date.new 2021 11 3) Date_Period.Day . to_vector . should_equal [0]
|
||||
|
||||
(t1.at "X").date_diff (t1.at "Y") Date_Period.Month . to_vector . should_equal [1]
|
||||
(t1.at "X").date_diff (Date.new 2021 12 1) Date_Period.Month . to_vector . should_equal [0]
|
||||
(t1.at "X").date_diff (Date.new 2020 12 1) Date_Period.Month . to_vector . should_equal [-11]
|
||||
|
||||
(t1.at "X").date_diff (t1.at "Y") Date_Period.Quarter . to_vector . should_equal [0]
|
||||
(t1.at "X").date_diff (Date.new 2021 5 1) Date_Period.Quarter . to_vector . should_equal [-2]
|
||||
(t1.at "X").date_diff (Date.new 2023 7 1) Date_Period.Quarter . to_vector . should_equal [6]
|
||||
|
||||
(t1.at "X").date_diff (t1.at "Y") Date_Period.Year . to_vector . should_equal [0]
|
||||
(t1.at "X").date_diff (Date.new 2021 12 1) Date_Period.Year . to_vector . should_equal [0]
|
||||
(t1.at "X").date_diff (Date.new 2020 10 1) Date_Period.Year . to_vector . should_equal [-1]
|
||||
|
||||
# Ensure months of varying length (e.g. February) are still counted right.
|
||||
t1_2 = table_builder [["X", [Date.new 2021 01 02]]]
|
||||
(t1_2 . at "X").date_diff (Date.new 2021 03 02) Date_Period.Day . to_vector . should_equal [59]
|
||||
(t1_2 . at "X").date_diff (Date.new 2021 03 02) Date_Period.Month . to_vector . should_equal [2]
|
||||
(t1_2 . at "X").date_diff (Date.new 2021 03 01) Date_Period.Day . to_vector . should_equal [58]
|
||||
(t1_2 . at "X").date_diff (Date.new 2021 03 01) Date_Period.Month . to_vector . should_equal [1]
|
||||
(t1.at "X1").date_diff (Date.new 2021 03 02) Date_Period.Day . to_vector . should_equal [59]
|
||||
(t1.at "X1").date_diff (Date.new 2021 03 02) Date_Period.Month . to_vector . should_equal [2]
|
||||
(t1.at "X1").date_diff (Date.new 2021 03 01) Date_Period.Day . to_vector . should_equal [58]
|
||||
|
||||
# We do allow the `Time_Period.Day` as a kind of alias for `Date_Period.Day` here.
|
||||
(t1.at "X").date_diff (t1.at "Y") Time_Period.Day . to_vector . should_equal [32]
|
||||
(t1.at "X").date_diff (t1.at "Y") Time_Period.Hour . should_fail_with Illegal_Argument
|
||||
|
||||
zone = Time_Zone.parse "Europe/Warsaw"
|
||||
t2 = table_builder [["X", [Date_Time.new 2021 11 3 10 15 0 zone=zone]], ["Y", [Date_Time.new 2021 12 5 12 30 20 zone=zone]]]
|
||||
t2 = dates1.get.select_columns ["dt_X", "dt_Y"] reorder=True . rename_columns ["X", "Y"]
|
||||
|
||||
(t2.at "X").date_diff (t2.at "Y") Date_Period.Day . to_vector . should_equal [32]
|
||||
(t2.at "Y").date_diff (t2.at "X") Date_Period.Day . to_vector . should_equal [-32]
|
||||
(t2.at "X").date_diff (Date_Time.new 2021 11 3 10 15 0 zone=zone) Date_Period.Day . to_vector . should_equal [0]
|
||||
|
||||
(t2.at "X").date_diff (t2.at "Y") Date_Period.Month . to_vector . should_equal [1]
|
||||
(t2.at "X").date_diff (Date_Time.new 2021 12 1 10 15 0 zone=zone) Date_Period.Month . to_vector . should_equal [0]
|
||||
|
||||
(t2.at "X").date_diff (t2.at "Y") Date_Period.Year . to_vector . should_equal [0]
|
||||
(t2.at "X").date_diff (Date_Time.new 2031 12 1 10 15 0 zone=zone) Date_Period.Year . to_vector . should_equal [10]
|
||||
@ -343,7 +356,7 @@ add_specs suite_builder setup =
|
||||
(t2.at "X").date_diff (t2.at "Y") Time_Period.Nanosecond . should_fail_with Unsupported_Database_Operation
|
||||
(t2.at "X").date_diff (Date_Time.new 2021 11 3 10 15 30 123 456 789 zone=zone) Time_Period.Nanosecond . should_fail_with Unsupported_Database_Operation
|
||||
|
||||
t3 = table_builder [["X", [Time_Of_Day.new 10 15 0]], ["Y", [Time_Of_Day.new 12 30 20]]]
|
||||
t3 = dates1.get.select_columns ["t_X", "t_Y"] reorder=True . rename_columns ["X", "Y"]
|
||||
|
||||
# There is no default period:
|
||||
(t3.at "X").date_diff (t3.at "Y") Date_Period.Month . should_fail_with Illegal_Argument
|
||||
@ -374,6 +387,19 @@ add_specs suite_builder setup =
|
||||
(t3.at "X").date_diff (t3.at "Y") Time_Period.Nanosecond . should_fail_with Unsupported_Database_Operation
|
||||
(t3.at "X").date_diff (Time_Of_Day.new 10 15 12 34 56 78) Time_Period.Nanosecond . should_fail_with Unsupported_Database_Operation
|
||||
|
||||
## TODO these were extracted from the test above, they may later be merged again, or we may keep them separate depending on resolution of
|
||||
https://github.com/enso-org/enso/issues/10438
|
||||
group_builder.specify "date_diff edge cases" pending=(if setup.prefix.contains "Snowflake" then "TODO: https://github.com/enso-org/enso/issues/10438") <|
|
||||
t1 = dates1.get.select_columns ["d_X", "d_Y", "X1"] reorder=True . rename_columns ["X", "Y", "X1"]
|
||||
## TODO Snowflake just looks at year and month, but Postgres/in-memory look if it was a full month or not by looking at days
|
||||
(t1.at "X").date_diff (Date.new 2021 12 1) Date_Period.Month . to_vector . should_equal [0]
|
||||
(t1.at "X").date_diff (Date.new 2023 7 1) Date_Period.Quarter . to_vector . should_equal [6]
|
||||
|
||||
(t1.at "X1").date_diff (Date.new 2021 03 01) Date_Period.Month . to_vector . should_equal [1]
|
||||
|
||||
t2 = dates1.get.select_columns ["dt_X", "dt_Y"] reorder=True . rename_columns ["X", "Y"]
|
||||
(t2.at "X").date_diff (Date_Time.new 2021 12 1 10 15 0 zone=zone) Date_Period.Month . to_vector . should_equal [0]
|
||||
|
||||
group_builder.specify "date_diff should return integers" <|
|
||||
t = table_builder [["X", [Date.new 2021 01 31]], ["Y", [Time_Of_Day.new 12 30 20]], ["Z", [Date_Time.new 2021 12 5 12 30 20]]]
|
||||
|
||||
@ -381,13 +407,13 @@ add_specs suite_builder setup =
|
||||
date_periods = [Date_Period.Day, Date_Period.Week, Date_Period.Month, Date_Period.Quarter, Date_Period.Year]
|
||||
|
||||
date_periods.each p->
|
||||
(t.at "X").date_diff (Date.new 2021 12 05) p . value_type . is_integer . should_be_true
|
||||
setup.expect_integer_type <| (t.at "X").date_diff (Date.new 2021 12 05) p
|
||||
|
||||
time_periods.each p->
|
||||
(t.at "Y").date_diff (Time_Of_Day.new 01 02) p . value_type . is_integer . should_be_true
|
||||
setup.expect_integer_type <| (t.at "Y").date_diff (Time_Of_Day.new 01 02) p
|
||||
|
||||
(date_periods+time_periods).each p->
|
||||
(t.at "Z").date_diff (Date_Time.new 2021 12 05 01 02) p . value_type . is_integer . should_be_true
|
||||
setup.expect_integer_type <| (t.at "Z").date_diff (Date_Time.new 2021 12 05 01 02) p
|
||||
|
||||
group_builder.specify "should not allow mixing types in date_diff" <|
|
||||
t = table_builder [["X", [Date.new 2021 01 31]], ["Y", [Time_Of_Day.new 12 30 20]], ["Z", [Date_Time.new 2021 12 5 12 30 20]]]
|
||||
@ -403,6 +429,7 @@ add_specs suite_builder setup =
|
||||
|
||||
group_builder.specify "should allow an SQL-like shift" <|
|
||||
t1 = table_builder [["X", [Date.new 2021 01 31, Date.new 2021 01 01, Date.new 2021 12 31]], ["Y", [5, -1, 0]]]
|
||||
t1.at "X" . value_type . should_equal Value_Type.Date
|
||||
(t1.at "X").date_add (t1.at "Y") Date_Period.Day . to_vector . should_equal [Date.new 2021 02 05, Date.new 2020 12 31, Date.new 2021 12 31]
|
||||
(t1.at "X").date_add -1 Date_Period.Day . to_vector . should_equal [Date.new 2021 01 30, Date.new 2020 12 31, Date.new 2021 12 30]
|
||||
(t1.at "X").date_add (t1.at "Y") Date_Period.Month . to_vector . should_equal [Date.new 2021 06 30, Date.new 2020 12 01, Date.new 2021 12 31]
|
||||
@ -417,27 +444,29 @@ add_specs suite_builder setup =
|
||||
|
||||
(t1.at "X").date_add 1 Time_Period.Hour . should_fail_with Illegal_Argument
|
||||
# Will accept Time_Period.Day as alias of Date_Period.Day
|
||||
(t1.at "X").date_add 1 Time_Period.Day . to_vector . should_equal [Date.new 2021 02 01, Date.new 2021 01 02, Date.new 2022 01 01]
|
||||
r1 = (t1.at "X").date_add 1 Time_Period.Day
|
||||
r1.value_type . should_equal Value_Type.Date
|
||||
r1.to_vector . should_equal [Date.new 2021 02 01, Date.new 2021 01 02, Date.new 2022 01 01]
|
||||
|
||||
t2 = table_builder [["X", [Date_Time.new 2021 01 31 12 30 0, Date_Time.new 2021 01 01 12 30 0, Date_Time.new 2021 12 31 12 30 0]], ["Y", [5, -1, 0]]]
|
||||
(t2.at "X").date_add (t2.at "Y") Date_Period.Day . to_vector . should_equal_tz_agnostic [Date_Time.new 2021 02 05 12 30 0, Date_Time.new 2020 12 31 12 30 0, Date_Time.new 2021 12 31 12 30 0]
|
||||
t2 = table_builder [["X", [Date_Time.new 2021 01 31 12 30 0, Date_Time.new 2021 01 01 12 30 0, Date_Time.new 2021 12 31 12 30 0]], ["Y", [11, -1, 0]]]
|
||||
(t2.at "X").date_add (t2.at "Y") Date_Period.Day . to_vector . should_equal_tz_agnostic [Date_Time.new 2021 02 11 12 30 0, Date_Time.new 2020 12 31 12 30 0, Date_Time.new 2021 12 31 12 30 0]
|
||||
(t2.at "X").date_add -1 Time_Period.Day . to_vector . should_equal_tz_agnostic [Date_Time.new 2021 01 30 12 30 0, Date_Time.new 2020 12 31 12 30 0, Date_Time.new 2021 12 30 12 30 0]
|
||||
(t2.at "X").date_add (t2.at "Y") Date_Period.Month . to_vector . should_equal_tz_agnostic [Date_Time.new 2021 06 30 12 30 0, Date_Time.new 2020 12 01 12 30 0, Date_Time.new 2021 12 31 12 30 0]
|
||||
(t2.at "X").date_add (t2.at "Y") Date_Period.Month . to_vector . should_equal_tz_agnostic [Date_Time.new 2021 12 31 12 30 0, Date_Time.new 2020 12 01 12 30 0, Date_Time.new 2021 12 31 12 30 0]
|
||||
(t2.at "X").date_add 1 Date_Period.Month . to_vector . should_equal_tz_agnostic [Date_Time.new 2021 02 28 12 30 0, Date_Time.new 2021 02 01 12 30 0, Date_Time.new 2022 01 31 12 30 0]
|
||||
(t2.at "X").date_add (t2.at "Y") Date_Period.Year . to_vector . should_equal_tz_agnostic [Date_Time.new 2026 01 31 12 30 0, Date_Time.new 2020 01 01 12 30 0, Date_Time.new 2021 12 31 12 30 0]
|
||||
(t2.at "X").date_add (t2.at "Y") Date_Period.Year . to_vector . should_equal_tz_agnostic [Date_Time.new 2032 01 31 12 30 0, Date_Time.new 2020 01 01 12 30 0, Date_Time.new 2021 12 31 12 30 0]
|
||||
(t2.at "X").date_add 1 Date_Period.Year . to_vector . should_equal_tz_agnostic [Date_Time.new 2022 01 31 12 30 0, Date_Time.new 2022 01 01 12 30 0, Date_Time.new 2022 12 31 12 30 0]
|
||||
|
||||
(t2.at "X").date_add (t2.at "Y") Time_Period.Hour . to_vector . should_equal_tz_agnostic [Date_Time.new 2021 01 31 17 30 0, Date_Time.new 2021 01 01 11 30 0, Date_Time.new 2021 12 31 12 30 0]
|
||||
(t2.at "X").date_add (t2.at "Y") Time_Period.Hour . to_vector . should_equal_tz_agnostic [Date_Time.new 2021 01 31 23 30 0, Date_Time.new 2021 01 01 11 30 0, Date_Time.new 2021 12 31 12 30 0]
|
||||
(t2.at "X").date_add 1 Time_Period.Hour . to_vector . should_equal_tz_agnostic [Date_Time.new 2021 01 31 13 30 0, Date_Time.new 2021 01 01 13 30 0, Date_Time.new 2021 12 31 13 30 0]
|
||||
(t2.at "X").date_add (t2.at "Y") Time_Period.Minute . to_vector . should_equal_tz_agnostic [Date_Time.new 2021 01 31 12 35 0, Date_Time.new 2021 01 01 12 29 0, Date_Time.new 2021 12 31 12 30 0]
|
||||
(t2.at "X").date_add (t2.at "Y") Time_Period.Minute . to_vector . should_equal_tz_agnostic [Date_Time.new 2021 01 31 12 41 0, Date_Time.new 2021 01 01 12 29 0, Date_Time.new 2021 12 31 12 30 0]
|
||||
(t2.at "X").date_add 1 Time_Period.Minute . to_vector . should_equal_tz_agnostic [Date_Time.new 2021 01 31 12 31 0, Date_Time.new 2021 01 01 12 31 0, Date_Time.new 2021 12 31 12 31 0]
|
||||
(t2.at "X").date_add (t2.at "Y") Time_Period.Second . to_vector . should_equal_tz_agnostic [Date_Time.new 2021 01 31 12 30 5, Date_Time.new 2021 01 01 12 29 59, Date_Time.new 2021 12 31 12 30 0]
|
||||
(t2.at "X").date_add (t2.at "Y") Time_Period.Second . to_vector . should_equal_tz_agnostic [Date_Time.new 2021 01 31 12 30 11, Date_Time.new 2021 01 01 12 29 59, Date_Time.new 2021 12 31 12 30 0]
|
||||
(t2.at "X").date_add 1 Time_Period.Second . to_vector . should_equal_tz_agnostic [Date_Time.new 2021 01 31 12 30 1, Date_Time.new 2021 01 01 12 30 1, Date_Time.new 2021 12 31 12 30 1]
|
||||
(t2.at "X").date_add 1 Time_Period.Millisecond . to_vector . should_equal_tz_agnostic [Date_Time.new 2021 01 31 12 30 millisecond=1, Date_Time.new 2021 01 01 12 30 millisecond=1, Date_Time.new 2021 12 31 12 30 millisecond=1]
|
||||
(t2.at "X").date_add 1 Time_Period.Microsecond . to_vector . should_equal_tz_agnostic [Date_Time.new 2021 01 31 12 30 microsecond=1, Date_Time.new 2021 01 01 12 30 microsecond=1, Date_Time.new 2021 12 31 12 30 microsecond=1]
|
||||
case setup.test_selection.supports_nanoseconds_in_time of
|
||||
True ->
|
||||
(t2.at "X").date_add 1 Time_Period.Nanosecond . to_vector . should_equal [Date_Time.new 2021 01 31 12 30 nanosecond=1, Date_Time.new 2021 01 01 12 30 nanosecond=1, Date_Time.new 2021 12 31 12 30 nanosecond=1]
|
||||
(t2.at "X").date_add 1 Time_Period.Nanosecond . to_vector . should_equal_tz_agnostic [Date_Time.new 2021 01 31 12 30 nanosecond=1, Date_Time.new 2021 01 01 12 30 nanosecond=1, Date_Time.new 2021 12 31 12 30 nanosecond=1]
|
||||
False ->
|
||||
(t2.at "X").date_add 1 Time_Period.Nanosecond . should_fail_with Unsupported_Database_Operation
|
||||
|
||||
@ -463,28 +492,39 @@ add_specs suite_builder setup =
|
||||
|
||||
# Date period defaults to Day for date/date-time
|
||||
(t1.at "X").date_add (t1.at "Y") . to_vector . should_equal [Date.new 2021 02 05, Date.new 2020 12 31, Date.new 2021 12 31]
|
||||
(t2.at "X").date_add (t2.at "Y") . to_vector . should_equal_tz_agnostic [Date_Time.new 2021 02 05 12 30 0, Date_Time.new 2020 12 31 12 30 0, Date_Time.new 2021 12 31 12 30 0]
|
||||
(t2.at "X").date_add (t2.at "Y") . to_vector . should_equal_tz_agnostic [Date_Time.new 2021 02 11 12 30 0, Date_Time.new 2020 12 31 12 30 0, Date_Time.new 2021 12 31 12 30 0]
|
||||
# and defaults to Hour for time-of-day
|
||||
(t3.at "X").date_add (t3.at "Y") . to_vector . should_equal [Time_Of_Day.new 17 30 0, Time_Of_Day.new 22 45 0, Time_Of_Day.new 1 30 0]
|
||||
|
||||
group_builder.specify "should check shift_amount type in date_add" <|
|
||||
t = table_builder [["X", [Date.new 2021 01 31]]]
|
||||
t.at "X" . date_add "text" Date_Period.Day . should_fail_with Invalid_Value_Type
|
||||
t = dates1.get
|
||||
t.at "d_X" . date_add "text" Date_Period.Day . should_fail_with Invalid_Value_Type
|
||||
|
||||
group_builder.specify "date_diff and date_add should work correctly with DST" pending="May be failing on some Database configurations. ToDo: investigate - https://github.com/enso-org/enso/issues/7326" <|
|
||||
zone = Time_Zone.parse "Europe/Warsaw"
|
||||
# TODO the behaviour here may differ between backends.
|
||||
# Postgres will store time in UTC and Snowflake remembers the offset but discards the timezone, so each of them have some trouble around DST.
|
||||
dt1 = Date_Time.new 2023 03 26 00 30 00 zone=zone
|
||||
t = table_builder [["X", [dt1]]]
|
||||
dt3 = Date_Time.new 2023 03 28 01 30 00 zone=zone
|
||||
t = table_builder [["X", [dt1]], ["Y", [dt3]]]
|
||||
x = t.at "X"
|
||||
|
||||
# +24h will shift 1 day and 1 hour, because they 26th of March has only 23 hours within it
|
||||
x.date_add 24 Time_Period.Hour . to_vector . should_equal_tz_agnostic [Date_Time.new 2023 03 27 01 30 00 zone=zone]
|
||||
|
||||
# But 1 day date shift will shift 1 day, keeping the time, even if that particular day is only 23 hours.
|
||||
x.date_add 1 Date_Period.Day . to_vector . should_equal_tz_agnostic [Date_Time.new 2023 03 27 00 30 00 zone=zone]
|
||||
plus_1_calendar_day = x.date_add 1 Date_Period.Day
|
||||
# plus_1_calendar_day.to_vector . should_equal_tz_agnostic [Date_Time.new 2023 03 27 00 30 00 zone=zone]
|
||||
# but `should_equal_tz_agnostic` because after conversion to UTC this will actually be a different hour!
|
||||
# instead let's compare as local timestamps:
|
||||
plus_1_calendar_day.to_vector . map as_local_date_time_repr . should_equal ["2023-03-27 00:30:00.0"]
|
||||
|
||||
# Time_Period.Day will shift by 24 hours.
|
||||
x.date_add 1 Time_Period.Day . to_vector . should_equal_tz_agnostic [Date_Time.new 2023 03 27 01 30 00 zone=zone]
|
||||
|
||||
# Once we add 3 months, we should get the same hour back.
|
||||
plus_3_months = x.date_add 3 Date_Period.Month
|
||||
plus_3_months.to_vector.map as_local_date_time_repr . should_equal ["2023-06-26 00:30:00.0"]
|
||||
|
||||
dt2 = Date_Time.new 2023 03 27 00 30 00 zone=zone
|
||||
x.date_diff dt2 Time_Period.Hour . to_vector . should_equal [23]
|
||||
|
||||
@ -500,24 +540,16 @@ add_specs suite_builder setup =
|
||||
# Again consistent for both backends, when counting in hours - 23 hours is not a full 24-hour day.
|
||||
x.date_diff dt2 Time_Period.Day . to_vector . should_equal [0]
|
||||
|
||||
dt3 = Date_Time.new 2023 03 28 01 30 00 zone=zone
|
||||
dt4 = Date_Time.new 2023 03 29 00 30 00 zone=zone
|
||||
t2 = table_builder [["X", [dt3]]]
|
||||
y = t.at "Y"
|
||||
# No DST switch here, so all backends agree that 0 days elapsed in the 23 hours.
|
||||
(t2.at "X").date_diff dt4 Date_Period.Day . to_vector . should_equal [0]
|
||||
(t2.at "X").date_diff dt4 Time_Period.Day . to_vector . should_equal [0]
|
||||
(t2.at "X").date_diff dt4 Time_Period.Hour . to_vector . should_equal [23]
|
||||
# Snowflake returns 1...
|
||||
y.date_diff dt4 Date_Period.Day . to_vector . should_equal [0]
|
||||
y.date_diff dt4 Time_Period.Day . to_vector . should_equal [0]
|
||||
y.date_diff dt4 Time_Period.Hour . to_vector . should_equal [23]
|
||||
|
||||
if setup.test_selection.date_time.not then
|
||||
suite_builder.group prefix+"partial Date-Time support" group_builder->
|
||||
data = Data.setup create_connection_fn setup.table_builder
|
||||
|
||||
group_builder.teardown <|
|
||||
data.teardown
|
||||
|
||||
table_builder cols =
|
||||
setup.table_builder cols connection=data.connection
|
||||
|
||||
group_builder.specify "will fail when uploading a Table containing Dates" <|
|
||||
d = Date.new 2020 10 24
|
||||
table = table_builder [["A", [d]], ["X", [123]]]
|
||||
@ -533,3 +565,5 @@ add_specs suite_builder setup =
|
||||
|
||||
t.at "A" . date_diff (t.at "B") Date_Period.Day . should_fail_with Invalid_Value_Type
|
||||
t.at "A" . date_add 42 Date_Period.Day . should_fail_with Invalid_Value_Type
|
||||
|
||||
as_local_date_time_repr dt = dt.format "YYYY-MM-DD HH:mm:ss.f"
|
||||
|
@ -13,29 +13,11 @@ from project.Common_Table_Operations.Util import all
|
||||
|
||||
main filter=Nothing = run_default_backend add_specs filter
|
||||
|
||||
type Data
|
||||
Value ~connection
|
||||
|
||||
setup create_connection_fn =
|
||||
Data.Value (create_connection_fn Nothing)
|
||||
|
||||
teardown self =
|
||||
self.connection.close
|
||||
|
||||
|
||||
add_specs suite_builder setup =
|
||||
prefix = setup.prefix
|
||||
create_connection_fn = setup.create_connection_func
|
||||
table_builder = setup.light_table_builder
|
||||
pending_datetime = if setup.test_selection.date_time.not then "Date/Time operations are not supported by this backend."
|
||||
suite_builder.group prefix+"Table.set with Simple_Expression" group_builder->
|
||||
data = Data.setup create_connection_fn
|
||||
|
||||
group_builder.teardown <|
|
||||
data.teardown
|
||||
|
||||
table_builder cols =
|
||||
setup.table_builder cols connection=data.connection
|
||||
|
||||
suite_builder.group prefix+"(Derived_Columns_Spec) Table.set with Simple_Expression" group_builder->
|
||||
group_builder.specify "arithmetics" <|
|
||||
t = table_builder [["A", [1, 2]], ["B", [10, 40]]]
|
||||
t.set (Simple_Expression.Simple_Expr (Column_Ref.Name "A") Simple_Calculation.Copy) "C" . at "C" . to_vector . should_equal [1, 2]
|
||||
@ -113,7 +95,6 @@ add_specs suite_builder setup =
|
||||
t2.set (Simple_Expression.Simple_Expr (Column_Ref.Name "C") (Simple_Calculation.Date_Add 5 Date_Period.Month)) "Z" . at "Z" . to_vector . should_equal [Date.new 2003 5 10, Date.new 2005 6 01]
|
||||
t2.set (Simple_Expression.Simple_Expr (Column_Ref.Name "D") (Simple_Calculation.Date_Add 15 Time_Period.Hour)) "Z" . at "Z" . to_vector . should_equal [Time_Of_Day.new 03 45, Time_Of_Day.new 16 01]
|
||||
|
||||
t2.set (Simple_Expression.Simple_Expr (Column_Ref.Name "C") (Simple_Calculation.Date_Diff (Date.new 2003) Date_Period.Year)) "Z" . at "Z" . to_vector . should_equal [0, -2]
|
||||
t2.set (Simple_Expression.Simple_Expr (Column_Ref.Name "D") (Simple_Calculation.Date_Diff (Time_Of_Day.new 13) Time_Period.Minute)) "Z" . at "Z" . to_vector . should_equal [15, 59+(60*11)]
|
||||
|
||||
t2.set (Simple_Expression.Simple_Expr (Column_Ref.Name "C") (Simple_Calculation.Date_Part Date_Period.Year)) "Z" . at "Z" . to_vector . should_equal [2002, 2005]
|
||||
@ -125,6 +106,10 @@ add_specs suite_builder setup =
|
||||
t.set (Simple_Expression.Simple_Expr (Column_Ref.Name "x") (Simple_Calculation.Date_Part Date_Period.Year)) . should_fail_with Invalid_Value_Type
|
||||
Test.expect_panic Type_Error <| t2.set (Simple_Expression.Simple_Expr 42 (Simple_Calculation.Date_Diff "x" Date_Period.Year))
|
||||
|
||||
group_builder.specify "date/time (edge case)" pending=(pending_datetime.if_nothing (if setup.prefix.contains "Snowflake" then "TODO: https://github.com/enso-org/enso/issues/10438")) <|
|
||||
t2 = table_builder [["C", [Date.new 2002 12 10, Date.new 2005 01 01]], ["D", [Time_Of_Day.new 12 45, Time_Of_Day.new 01 01]]]
|
||||
t2.set (Simple_Expression.Simple_Expr (Column_Ref.Name "C") (Simple_Calculation.Date_Diff (Date.new 2003 01 01) Date_Period.Year)) "Z" . at "Z" . to_vector . should_equal [0, -2]
|
||||
|
||||
group_builder.specify "boolean" <|
|
||||
t = table_builder [["A", [True, False]], ["T", [True, True]]]
|
||||
|
||||
@ -223,15 +208,7 @@ add_specs suite_builder setup =
|
||||
t.set (Simple_Expression.Simple_Expr (Column_Ref.Name "zzz") Simple_Calculation.Not) . should_fail_with No_Such_Column
|
||||
t.set (Simple_Expression.Simple_Expr (Column_Ref.Index 42) Simple_Calculation.Not) . should_fail_with Index_Out_Of_Bounds
|
||||
|
||||
suite_builder.group prefix+"Unique derived column names" group_builder->
|
||||
data = Data.setup create_connection_fn
|
||||
|
||||
group_builder.teardown <|
|
||||
data.teardown
|
||||
|
||||
table_builder cols =
|
||||
setup.table_builder cols connection=data.connection
|
||||
|
||||
suite_builder.group prefix+"(Derived_Columns_Spec) Unique derived column names" group_builder->
|
||||
group_builder.specify "Should not disambiguate two derived columns that would otherwise have had the same name, with Set_Mode.Add_Or_Update" <|
|
||||
t = table_builder [["X", [1, 2, 3]]]
|
||||
column_op = Simple_Expression.Simple_Expr 2 (Simple_Calculation.Power (Column_Ref.Name "X"))
|
||||
|
@ -224,3 +224,11 @@ add_specs suite_builder setup =
|
||||
Test.with_clue "t3[C].value_type="+vt.to_display_text+": " <|
|
||||
vt.should_be_a (Value_Type.Char ...)
|
||||
vt.variable_length.should_be_true
|
||||
|
||||
# Literal tables only make sense in the DB.
|
||||
if setup.is_database then group_builder.specify "iif should work with literal tables" <|
|
||||
literal_table = setup.light_table_builder [["X", [True, False, Nothing]], ["row_id", [1, 2, 3]]] . sort "row_id"
|
||||
literal_table.at "X" . value_type . should_equal Value_Type.Boolean
|
||||
c = literal_table.at "X" . iif 10 20
|
||||
c.to_vector . should_equal [10, 20, Nothing]
|
||||
setup.expect_integer_type <| c
|
||||
|
@ -224,20 +224,20 @@ add_specs suite_builder setup =
|
||||
t2 = table_prepared.merge lookup key_columns=["key1", "key2"] allow_unmatched_rows=allow_unmatched_rows add_new_columns=True
|
||||
t2.column_names . should_equal ["key1", "key2", "X", "Y", "Z", "W", "A", "B", "C", "D", "X2", "Y2", "Z2", "W2"]
|
||||
|
||||
t2.at "key1" . value_type . should_be_a (Value_Type.Integer ...)
|
||||
setup.expect_integer_type <| t2.at "key1"
|
||||
t2.at "key2" . value_type . should_equal Value_Type.Char
|
||||
|
||||
t2.at "X" . value_type . should_be_a (Value_Type.Integer ...)
|
||||
setup.expect_integer_type <| t2.at "X"
|
||||
t2.at "Y" . value_type . should_equal Value_Type.Char
|
||||
t2.at "Z" . value_type . should_equal Value_Type.Float
|
||||
t2.at "W" . value_type . should_equal Value_Type.Boolean
|
||||
|
||||
t2.at "A" . value_type . should_be_a (Value_Type.Integer ...)
|
||||
setup.expect_integer_type <| t2.at "A"
|
||||
t2.at "B" . value_type . should_equal Value_Type.Char
|
||||
t2.at "C" . value_type . should_equal Value_Type.Float
|
||||
t2.at "D" . value_type . should_equal Value_Type.Boolean
|
||||
|
||||
t2.at "X2" . value_type . should_be_a (Value_Type.Integer ...)
|
||||
setup.expect_integer_type <| t2.at "X2"
|
||||
t2.at "Y2" . value_type . should_equal Value_Type.Char
|
||||
t2.at "Z2" . value_type . should_equal Value_Type.Float
|
||||
t2.at "W2" . value_type . should_equal Value_Type.Boolean
|
||||
@ -303,7 +303,7 @@ add_specs suite_builder setup =
|
||||
|
||||
t2 = my_table.merge lookup key_columns="X" allow_unmatched_rows=False
|
||||
t2.column_names . should_equal ["X", "Y", "Z"]
|
||||
t2.at "Y" . value_type . is_integer . should_be_true
|
||||
setup.expect_integer_type <| t2.at "Y"
|
||||
m2 = t2 |> materialize |> _.sort "Z"
|
||||
m2.at "X" . to_vector . should_equal [2, 3, 2, 3]
|
||||
m2.at "Y" . to_vector . should_equal [11, 111, 11, 111]
|
||||
|
@ -9,36 +9,20 @@ from Standard.Database import all
|
||||
|
||||
from Standard.Test import all
|
||||
|
||||
from project.Common_Table_Operations.Util import run_default_backend
|
||||
from project.Common_Table_Operations.Util import run_default_backend, build_sorted_table
|
||||
import project.Util
|
||||
|
||||
main filter=Nothing = run_default_backend add_specs filter
|
||||
|
||||
type Data
|
||||
Value ~connection
|
||||
|
||||
setup create_connection_fn =
|
||||
Data.Value (create_connection_fn Nothing)
|
||||
|
||||
teardown self = self.connection.close
|
||||
|
||||
|
||||
add_specs suite_builder setup =
|
||||
prefix = setup.prefix
|
||||
create_connection_fn = setup.create_connection_func
|
||||
suite_builder.group prefix+"Table.replace" group_builder->
|
||||
data = Data.setup create_connection_fn
|
||||
|
||||
group_builder.teardown <|
|
||||
data.teardown
|
||||
|
||||
table_builder cols =
|
||||
setup.table_builder cols connection=data.connection
|
||||
|
||||
table_builder = build_sorted_table setup
|
||||
table_builder_typed columns value_type =
|
||||
cast_columns = columns.map c->
|
||||
Column.from_vector (c.at 0) (c.at 1) . cast value_type
|
||||
setup.table_builder cast_columns connection=data.connection
|
||||
build_sorted_table setup cast_columns
|
||||
|
||||
group_builder.specify "should be able to replace values via a lookup table, using from/to column defaults" <|
|
||||
table = table_builder [['x', [1, 2, 3, 4, 2]], ['y', ['a', 'b', 'c', 'd', 'e']]]
|
||||
@ -180,3 +164,17 @@ add_specs suite_builder setup =
|
||||
table.replace lookup_table 'x' from_column=8 . should_fail_with Illegal_Argument
|
||||
table.replace lookup_table 'x' to_column=9 . should_fail_with Illegal_Argument
|
||||
table.replace lookup_table 'x' from_column=8 to_column=9 . should_fail_with Illegal_Argument
|
||||
|
||||
if setup.test_selection.date_time then group_builder.specify "should correctly infer types of columns (Date/Time)" <|
|
||||
my_table = table_builder [["X", [1, 2, 3, 2]]]
|
||||
|
||||
t1 = my_table.replace (Dictionary.from_vector [[1, Date.new 2021], [2, Date.new 2022], [3, Date.new 2023]]) "X" allow_unmatched_rows=False
|
||||
t1.at "X" . value_type . should_equal Value_Type.Date
|
||||
t1.at "X" . to_vector . should_equal [Date.new 2021, Date.new 2022, Date.new 2023, Date.new 2022]
|
||||
|
||||
t2 = my_table.replace (Dictionary.from_vector [[1, Time_Of_Day.new 1 2 3], [2, Time_Of_Day.new 4 5 6], [3, Time_Of_Day.new 7 8 9]]) "X" allow_unmatched_rows=False
|
||||
t2.at "X" . value_type . should_equal Value_Type.Time
|
||||
t2.at "X" . to_vector . should_equal [Time_Of_Day.new 1 2 3, Time_Of_Day.new 4 5 6, Time_Of_Day.new 7 8 9, Time_Of_Day.new 4 5 6]
|
||||
|
||||
t3 = my_table.replace (Dictionary.from_vector [[1, Date_Time.new 2021 1 1 1 1 1], [2, Date_Time.new 2022 2 2 2 2 2], [3, Date_Time.new 2023 3 3 3 3 3]]) "X" allow_unmatched_rows=False
|
||||
t3.at "X" . value_type . should_be_a (Value_Type.Date_Time ...)
|
||||
|
@ -285,7 +285,7 @@ run_union_tests group_builder setup call_union =
|
||||
t1 = table_builder [["A", [0, 1, 2]]]
|
||||
t2 = table_builder [["A", [1.0, 2.0, 2.5]]]
|
||||
|
||||
t1.at "A" . value_type . is_integer . should_be_true
|
||||
setup.expect_integer_type <| t1.at "A"
|
||||
t2.at "A" . value_type . is_floating_point . should_be_true
|
||||
|
||||
t3 = call_union [t1, t2]
|
||||
@ -298,13 +298,13 @@ run_union_tests group_builder setup call_union =
|
||||
t1 = table_builder [["A", [0, 1, 20]]]
|
||||
t2 = table_builder [["A", [True, False, True]]]
|
||||
|
||||
t1.at "A" . value_type . is_integer . should_be_true
|
||||
setup.expect_integer_type <| t1.at "A"
|
||||
t2.at "A" . value_type . should_equal Value_Type.Boolean
|
||||
|
||||
t3 = call_union [t1, t2]
|
||||
expect_column_names ["A"] t3
|
||||
Problems.assume_no_problems t3
|
||||
t3.at "A" . value_type . is_integer . should_be_true
|
||||
setup.expect_integer_type <| t3.at "A"
|
||||
t3.at "A" . to_vector . should_equal [0, 1, 20, 1, 0, 1]
|
||||
|
||||
t4 = table_builder [["A", [1.5, 0.0, 2.0]]]
|
||||
@ -397,7 +397,7 @@ run_union_tests group_builder setup call_union =
|
||||
expect_column_names ["A", "B", "C"] table
|
||||
# If type was matched - the columns are merged as is:
|
||||
table.at "A" . to_vector . should_equal [1, 2, 3, 4, 5, 6]
|
||||
table.at "A" . value_type . is_integer . should_be_true
|
||||
setup.expect_integer_type <| table.at "A"
|
||||
# If mixed, they are converted to text representation:
|
||||
table.at "B" . to_vector . should_equal ["a", "b", "c", "1", "2", "3"]
|
||||
table.at "B" . value_type . is_text . should_be_true
|
||||
|
@ -2,6 +2,8 @@ from Standard.Base import all
|
||||
|
||||
import Standard.Database.Internal.Replace_Params.Replace_Params
|
||||
|
||||
from Standard.Test import Test
|
||||
|
||||
import project.Common_Table_Operations.Add_Row_Number_Spec
|
||||
import project.Common_Table_Operations.Aggregate_Spec
|
||||
import project.Common_Table_Operations.Column_Name_Edge_Cases_Spec
|
||||
@ -64,7 +66,9 @@ type Test_Setup
|
||||
- light_table_builder: Like `table_builder`, but for Database backends it
|
||||
will not create a new table, but instead just create a query with the values inlined.
|
||||
Note that it has less guarantees about column types, but is faster.
|
||||
Config prefix table_fn empty_table_fn (table_builder : (Vector Any -> (Any|Nothing)) -> Any) materialize is_database test_selection aggregate_test_selection create_connection_func light_table_builder
|
||||
- is_integer_type: A predicate specifying if the given backend treats the
|
||||
given type as an integer type. See `SQL_Type_Mapping.is_integer_type` for more information.
|
||||
Config prefix table_fn empty_table_fn (table_builder : (Vector Any -> (Any|Nothing)) -> Any) materialize is_database test_selection aggregate_test_selection create_connection_func light_table_builder is_integer_type=(.is_integer)
|
||||
|
||||
## Specifies if the given Table backend supports custom Enso types.
|
||||
|
||||
@ -72,6 +76,13 @@ type Test_Setup
|
||||
supports_custom_objects : Boolean
|
||||
supports_custom_objects self = self.is_database.not
|
||||
|
||||
## Checks if the provided column has integer type as defined by the backend.
|
||||
expect_integer_type self column =
|
||||
loc = Meta.get_source_location 1
|
||||
value_type = column.value_type
|
||||
if self.is_integer_type value_type then column else
|
||||
Test.fail ("Expected column ["+column.name+"] to have integer type, but got "+value_type.to_display_text+" (at " + loc + ").")
|
||||
|
||||
type Test_Selection
|
||||
## The configuration specifying what features are supported by a given
|
||||
backend, driving what kind of test suites should be enabled.
|
||||
@ -136,7 +147,10 @@ type Test_Selection
|
||||
databases may have this flag disabled to save time.
|
||||
If `ENSO_ADVANCED_EDGE_CASE_TESTS` environment variable is set to `True`,
|
||||
the advanced tests are ran regardless of this setting.
|
||||
Config supports_case_sensitive_columns=True order_by=True natural_ordering=False case_insensitive_ordering=True order_by_unicode_normalization_by_default=False case_insensitive_ascii_only=False allows_mixed_type_comparisons=True supports_unicode_normalization=False is_nan_and_nothing_distinct=True distinct_returns_first_row_from_group_if_ordered=True date_time=True fixed_length_text_columns=False length_restricted_text_columns=True removes_trailing_whitespace_casting_from_char_to_varchar=False different_size_integer_types=True supports_8bit_integer=False supports_decimal_type=False supports_time_duration=False supports_nanoseconds_in_time=False supports_mixed_columns=False supported_replace_params=Nothing run_advanced_edge_case_tests_by_default=True
|
||||
- supports_date_time_without_timezone: Specifies if the backend supports
|
||||
date/time operations without a timezone (true for most Database backends).
|
||||
Defaults to `.is_integer`.
|
||||
Config supports_case_sensitive_columns=True order_by=True natural_ordering=False case_insensitive_ordering=True order_by_unicode_normalization_by_default=False case_insensitive_ascii_only=False allows_mixed_type_comparisons=True supports_unicode_normalization=False is_nan_and_nothing_distinct=True distinct_returns_first_row_from_group_if_ordered=True date_time=True fixed_length_text_columns=False length_restricted_text_columns=True removes_trailing_whitespace_casting_from_char_to_varchar=False different_size_integer_types=True supports_8bit_integer=False supports_decimal_type=False supports_time_duration=False supports_nanoseconds_in_time=False supports_mixed_columns=False supported_replace_params=Nothing run_advanced_edge_case_tests_by_default=True supports_date_time_without_timezone=False
|
||||
|
||||
## Specifies if the advanced edge case tests shall be run.
|
||||
|
||||
|
@ -3,6 +3,8 @@ import Standard.Base.Errors.Common.Assertion_Error
|
||||
import Standard.Base.Errors.Empty_Error.Empty_Error
|
||||
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
|
||||
|
||||
from Standard.Table import Value_Type
|
||||
|
||||
from Standard.Database.DB_Table import MAX_LITERAL_ELEMENT_COUNT
|
||||
|
||||
from Standard.Test import all
|
||||
@ -12,41 +14,32 @@ from project.Common_Table_Operations.Util import run_default_backend
|
||||
main = run_default_backend add_specs
|
||||
|
||||
type Data
|
||||
Value ~data
|
||||
|
||||
connection self = self.data.at 0
|
||||
dummy_table self = self.data.at 1
|
||||
|
||||
setup test_setup create_connection_fn =
|
||||
connection = (create_connection_fn Nothing)
|
||||
dummy_table = test_setup.table_builder [['x', [1]]] connection=connection
|
||||
Data.Value [connection, dummy_table]
|
||||
|
||||
teardown self = self.connection.close
|
||||
Value ~dummy_table
|
||||
|
||||
setup table_builder =
|
||||
Data.Value (table_builder [['x', [1]]])
|
||||
|
||||
add_specs suite_builder setup =
|
||||
prefix = setup.prefix
|
||||
create_connection_fn = setup.create_connection_func
|
||||
suite_builder.group prefix+"Table.make_table_from_dictionary/vectors" group_builder->
|
||||
data = Data.setup setup create_connection_fn
|
||||
|
||||
group_builder.teardown <|
|
||||
data.teardown
|
||||
|
||||
group_builder.specify "should be able to create a literal table with one column" <|
|
||||
vecs = [[0], [3], [6]]
|
||||
data = Data.setup setup.table_builder
|
||||
group_builder.specify "should be able to create a literal table with one row" <|
|
||||
vecs = [[0], [3.5], ['ABC']]
|
||||
t = data.dummy_table.make_table_from_vectors vecs ['x', 'y', 'z']
|
||||
t.at 'x' . to_vector . should_equal [0]
|
||||
t.at 'y' . to_vector . should_equal [3]
|
||||
t.at 'z' . to_vector . should_equal [6]
|
||||
t.at 'y' . to_vector . should_equal [3.5]
|
||||
t.at 'z' . to_vector . should_equal ['ABC']
|
||||
|
||||
group_builder.specify "should be able to create a literal table with three columns" <|
|
||||
vecs = [[0, 1, 2], [3, 4, 5], [6, 7, 8]]
|
||||
t = data.dummy_table.make_table_from_vectors vecs ['x', 'y', 'z']
|
||||
setup.expect_integer_type <| t.at 'x'
|
||||
t.at 'y' . value_type . should_be_a (Value_Type.Float ...)
|
||||
t.at 'z' . value_type . should_be_a (Value_Type.Char ...)
|
||||
|
||||
group_builder.specify "should be able to create a literal table with three rows" <|
|
||||
vecs = [[0, 1, 2], [3, 4, 5], ['a', 'b', 'c']]
|
||||
t = data.dummy_table.make_table_from_vectors vecs ['x', 'y', 'z'] . read . order_by 'x'
|
||||
t.at 'x' . to_vector . should_equal [0, 1, 2]
|
||||
t.at 'y' . to_vector . should_equal [3, 4, 5]
|
||||
t.at 'z' . to_vector . should_equal [6, 7, 8]
|
||||
t.at 'z' . to_vector . should_equal ['a', 'b', 'c']
|
||||
|
||||
if setup.is_database then
|
||||
group_builder.specify "should not be able to create a literal table with empty columns (db backend)" <|
|
||||
@ -66,6 +59,39 @@ add_specs suite_builder setup =
|
||||
vecs2 = [[], [3, 4, 5], [6, 7, 8]]
|
||||
data.dummy_table.make_table_from_vectors vecs2 ['x', 'y', 'z'] . read . should_fail_with Illegal_Argument
|
||||
|
||||
if setup.test_selection.date_time then
|
||||
group_builder.specify "should be able to create a table of date-time values" <|
|
||||
vecs = [[Date.new 2021 01 02], [Time_Of_Day.new 12 35 4], [Date_Time.new 2020 04 02 18 55], ['2001-01-01'], ['01:01:01']]
|
||||
t = data.dummy_table.make_table_from_vectors vecs ['date', 'time', 'datetime', 'date-like-text', 'time-like-text']
|
||||
m = t.read
|
||||
m.at 'date' . to_vector . should_equal [Date.new 2021 01 02]
|
||||
m.at 'time' . to_vector . should_equal [Time_Of_Day.new 12 35 4]
|
||||
m.at 'date-like-text' . to_vector . should_equal ['2001-01-01']
|
||||
m.at 'time-like-text' . to_vector . should_equal ['01:01:01']
|
||||
|
||||
t.at 'date' . value_type . should_equal Value_Type.Date
|
||||
t.at 'time' . value_type . should_equal Value_Type.Time
|
||||
t.at 'datetime' . value_type . should_be_a (Value_Type.Date_Time ...)
|
||||
t.at 'date-like-text' . value_type . should_be_a (Value_Type.Char ...)
|
||||
t.at 'time-like-text' . value_type . should_be_a (Value_Type.Char ...)
|
||||
|
||||
# The types should be preserved after an 'identity' operation:
|
||||
new_cols = ['date', 'time', 'datetime', 'date-like-text', 'time-like-text'].map name->
|
||||
c = t.at name
|
||||
(c == c).iif c c . rename name
|
||||
t2 = new_cols.fold t acc-> c-> acc.set c set_mode=..Update
|
||||
t2.at 'date' . value_type . should_equal Value_Type.Date
|
||||
t2.at 'time' . value_type . should_equal Value_Type.Time
|
||||
t2.at 'datetime' . value_type . should_be_a (Value_Type.Date_Time ...)
|
||||
t2.at 'date-like-text' . value_type . should_be_a (Value_Type.Char ...)
|
||||
t2.at 'time-like-text' . value_type . should_be_a (Value_Type.Char ...)
|
||||
|
||||
m2 = t2.read
|
||||
m2.at 'date' . to_vector . should_equal [Date.new 2021 01 02]
|
||||
m2.at 'time' . to_vector . should_equal [Time_Of_Day.new 12 35 4]
|
||||
m2.at 'date-like-text' . to_vector . should_equal ['2001-01-01']
|
||||
m2.at 'time-like-text' . to_vector . should_equal ['01:01:01']
|
||||
|
||||
group_builder.specify "should be able to create a literal table from a dictionary" <|
|
||||
map = Dictionary.from_vector [['x', 1], ['y', 2], ['z', 3]]
|
||||
t = data.dummy_table.make_table_from_dictionary map 'k' 'v' . sort 'v'
|
||||
|
@ -7,17 +7,16 @@ from Standard.Table.Errors import all
|
||||
from Standard.Test import all
|
||||
|
||||
from project.Common_Table_Operations.Util import expect_column_names, run_default_backend
|
||||
import project.Common_Table_Operations.Util
|
||||
|
||||
main filter=Nothing = run_default_backend add_specs filter
|
||||
|
||||
type Select_Columns_Data
|
||||
Value ~data
|
||||
|
||||
connection self = self.data.at 0
|
||||
table self = self.data.at 1
|
||||
table self = self.data.at 0
|
||||
|
||||
setup create_connection_fn table_builder = Select_Columns_Data.Value <|
|
||||
connection = create_connection_fn Nothing
|
||||
setup table_builder = Select_Columns_Data.Value <|
|
||||
table =
|
||||
col1 = ["foo", [1,2,3]]
|
||||
col2 = ["bar", [4,5,6]]
|
||||
@ -26,39 +25,29 @@ type Select_Columns_Data
|
||||
col5 = ["foo 2", [13,14,15]]
|
||||
col6 = ["ab.+123", [16,17,18]]
|
||||
col7 = ["abcd123", [19,20,21]]
|
||||
table_builder [col1, col2, col3, col4, col5, col6, col7] connection=connection
|
||||
[connection, table]
|
||||
|
||||
teardown self =
|
||||
self.connection.close
|
||||
table_builder [col1, col2, col3, col4, col5, col6, col7]
|
||||
[table]
|
||||
|
||||
type Mixed_Columns_Data
|
||||
Value ~data
|
||||
|
||||
connection self = self.data.at 0
|
||||
table self = self.data.at 1
|
||||
table self = self.data.at 0
|
||||
|
||||
setup create_connection_fn table_builder = Mixed_Columns_Data.Value <|
|
||||
connection = create_connection_fn Nothing
|
||||
setup table_builder = Mixed_Columns_Data.Value <|
|
||||
table =
|
||||
col1 = ["int", [1, 2, 3]]
|
||||
col2 = ["float", [4.1, 5.2, 6.3]]
|
||||
col3 = ["text", ["A", "B", "C"]]
|
||||
col4 = ["bool", [True, False, True]]
|
||||
table_builder [col1, col2, col3, col4] connection=connection
|
||||
[connection, table]
|
||||
|
||||
teardown self =
|
||||
self.connection.close
|
||||
table_builder [col1, col2, col3, col4]
|
||||
[table]
|
||||
|
||||
type Sort_Columns_Data
|
||||
Value ~data
|
||||
|
||||
connection self = self.data.at 0
|
||||
table self = self.data.at 1
|
||||
table self = self.data.at 0
|
||||
|
||||
setup create_connection_fn table_builder = Sort_Columns_Data.Value <|
|
||||
connection = create_connection_fn Nothing
|
||||
setup table_builder = Sort_Columns_Data.Value <|
|
||||
table =
|
||||
col1 = ["foo 21", [1,2,3]]
|
||||
col2 = ["foo 100", [4,5,6]]
|
||||
@ -67,44 +56,30 @@ type Sort_Columns_Data
|
||||
col5 = ["foo 3", [13,14,15]]
|
||||
col6 = ["foo 001", [16,17,18]]
|
||||
col7 = ["bar", [19,20,21]]
|
||||
table_builder [col1, col2, col3, col4, col5, col6, col7] connection=connection
|
||||
[connection, table]
|
||||
|
||||
teardown self =
|
||||
self.connection.close
|
||||
|
||||
table_builder [col1, col2, col3, col4, col5, col6, col7]
|
||||
[table]
|
||||
|
||||
type Rename_Columns_Data
|
||||
Value ~data
|
||||
|
||||
connection self = self.data.at 0
|
||||
table self = self.data.at 1
|
||||
table self = self.data.at 0
|
||||
|
||||
setup create_connection_fn table_builder = Rename_Columns_Data.Value <|
|
||||
connection = create_connection_fn Nothing
|
||||
setup table_builder = Rename_Columns_Data.Value <|
|
||||
table =
|
||||
col1 = ["alpha", [1,2,3]]
|
||||
col2 = ["beta", [4,5,6]]
|
||||
col3 = ["gamma", [16,17,18]]
|
||||
col4 = ["delta", [19,20,21]]
|
||||
table_builder [col1, col2, col3, col4] connection=connection
|
||||
[connection, table]
|
||||
|
||||
teardown self =
|
||||
self.connection.close
|
||||
|
||||
table_builder [col1, col2, col3, col4]
|
||||
[table]
|
||||
|
||||
add_specs suite_builder setup =
|
||||
prefix = setup.prefix
|
||||
table_builder = setup.table_builder
|
||||
create_connection_fn = setup.create_connection_func
|
||||
table_builder = Util.build_sorted_table setup
|
||||
test_selection = setup.test_selection
|
||||
|
||||
suite_builder.group prefix+"Table.select_columns" group_builder->
|
||||
data = Select_Columns_Data.setup create_connection_fn table_builder
|
||||
|
||||
group_builder.teardown <|
|
||||
data.teardown
|
||||
data = Select_Columns_Data.setup table_builder
|
||||
|
||||
group_builder.specify "should work as shown in the doc examples" <|
|
||||
expect_column_names ["foo", "bar"] <| data.table.select_columns ["bar", "foo"]
|
||||
@ -222,11 +197,8 @@ add_specs suite_builder setup =
|
||||
r2.catch.cause . should_be_a Missing_Input_Columns
|
||||
r2.catch.to_display_text . should_equal "No columns in the result, because of another problem: The criteria 'hmmm' did not match any columns."
|
||||
|
||||
suite_builder.group prefix+"Table.select_columns_by_type and Table.remove_columns_by_type" group_builder->
|
||||
data = Mixed_Columns_Data.setup create_connection_fn table_builder
|
||||
|
||||
group_builder.teardown <|
|
||||
data.teardown
|
||||
suite_builder.group prefix+"Table.select_columns By_Type and Table.remove_columns By_Type" group_builder->
|
||||
data = Mixed_Columns_Data.setup table_builder
|
||||
|
||||
group_builder.specify "should be able to select by type of columns" <|
|
||||
expect_column_names ["int"] <| data.table.select_columns [..By_Type ..Integer]
|
||||
@ -235,13 +207,6 @@ add_specs suite_builder setup =
|
||||
expect_column_names ["text"] <| data.table.select_columns [..By_Type ..Char]
|
||||
expect_column_names ["bool"] <| data.table.select_columns [..By_Type ..Boolean]
|
||||
|
||||
group_builder.specify "should be able to select by type of columns (deprecated)" <|
|
||||
expect_column_names ["int"] <| data.table.select_columns_by_type [Value_Type.Integer]
|
||||
expect_column_names ["float"] <| data.table.select_columns_by_type [Value_Type.Float]
|
||||
expect_column_names ["int", "float"] <| data.table.select_columns_by_type [Value_Type.Integer, Value_Type.Float]
|
||||
expect_column_names ["text"] <| data.table.select_columns_by_type [Value_Type.Char]
|
||||
expect_column_names ["bool"] <| data.table.select_columns_by_type [Value_Type.Boolean]
|
||||
|
||||
group_builder.specify "should be able to drop by type of columns" <|
|
||||
expect_column_names ["float", "text", "bool"] <| data.table.remove_columns [..By_Type ..Integer]
|
||||
expect_column_names ["int", "text", "bool"] <| data.table.remove_columns [..By_Type ..Float]
|
||||
@ -249,35 +214,8 @@ add_specs suite_builder setup =
|
||||
expect_column_names ["int", "float", "bool"] <| data.table.remove_columns [..By_Type ..Char]
|
||||
expect_column_names ["int", "float", "text"] <| data.table.remove_columns [..By_Type ..Boolean]
|
||||
|
||||
group_builder.specify "should be able to drop by type of columns (deprecated)" <|
|
||||
expect_column_names ["float", "text", "bool"] <| data.table.remove_columns_by_type [Value_Type.Integer]
|
||||
expect_column_names ["int", "text", "bool"] <| data.table.remove_columns_by_type [Value_Type.Float]
|
||||
expect_column_names ["text", "bool"] <| data.table.remove_columns_by_type [Value_Type.Integer, Value_Type.Float]
|
||||
expect_column_names ["int", "float", "bool"] <| data.table.remove_columns_by_type [Value_Type.Char]
|
||||
expect_column_names ["int", "float", "text"] <| data.table.remove_columns_by_type [Value_Type.Boolean]
|
||||
|
||||
group_builder.specify "should be able to select by strict type (deprecated)" <|
|
||||
expect_column_names ["int"] <| data.table.select_columns_by_type [Value_Type.Integer] strict=True
|
||||
data.table.select_columns_by_type [Value_Type.Integer Bits.Bits_16] strict=True . should_fail_with No_Output_Columns
|
||||
expect_column_names ["float"] <| data.table.select_columns_by_type [Value_Type.Float] strict=True
|
||||
data.table.select_columns_by_type [Value_Type.Float Bits.Bits_32] strict=True . should_fail_with No_Output_Columns
|
||||
expect_column_names ["text"] <| data.table.select_columns_by_type [Value_Type.Char] strict=True
|
||||
data.table.select_columns_by_type [Value_Type.Char 3 False] strict=True . should_fail_with No_Output_Columns
|
||||
|
||||
group_builder.specify "should be able to remove by strict type (deprecated)" <|
|
||||
expect_column_names ["float", "text", "bool"] <| data.table.remove_columns_by_type [Value_Type.Integer] strict=True
|
||||
expect_column_names ["int", "float", "text", "bool"] <| data.table.remove_columns_by_type [Value_Type.Integer Bits.Bits_16] strict=True
|
||||
expect_column_names ["int", "text", "bool"] <| data.table.remove_columns_by_type [Value_Type.Float] strict=True
|
||||
expect_column_names ["int", "float", "text", "bool"] <| data.table.remove_columns_by_type [Value_Type.Float Bits.Bits_32] strict=True
|
||||
expect_column_names ["int", "float", "bool"] <| data.table.remove_columns_by_type [Value_Type.Char] strict=True
|
||||
expect_column_names ["int", "float", "text", "bool"] <| data.table.remove_columns_by_type [Value_Type.Char 3 False] strict=True
|
||||
|
||||
|
||||
suite_builder.group prefix+"Table.remove_columns" group_builder->
|
||||
data = Select_Columns_Data.setup create_connection_fn table_builder
|
||||
|
||||
group_builder.teardown <|
|
||||
data.teardown
|
||||
data = Select_Columns_Data.setup table_builder
|
||||
|
||||
group_builder.specify "should work as shown in the doc examples" <|
|
||||
expect_column_names ["Baz", "foo 1", "foo 2", "ab.+123", "abcd123"] <| data.table.remove_columns ["bar", "foo"]
|
||||
@ -362,10 +300,7 @@ add_specs suite_builder setup =
|
||||
t1.catch.cause . should_equal Nothing
|
||||
|
||||
suite_builder.group prefix+"Table.reorder_columns" group_builder->
|
||||
data = Select_Columns_Data.setup create_connection_fn table_builder
|
||||
|
||||
group_builder.teardown <|
|
||||
data.teardown
|
||||
data = Select_Columns_Data.setup table_builder
|
||||
|
||||
group_builder.specify "should work as shown in the doc examples" <|
|
||||
expect_column_names ["bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123", "foo"] <| data.table.reorder_columns "foo" Position.After_Other_Columns
|
||||
@ -434,20 +369,14 @@ add_specs suite_builder setup =
|
||||
err.should_fail_with Missing_Input_Columns
|
||||
|
||||
suite_builder.group prefix+"Table.reorder_columns by type" group_builder->
|
||||
data = Mixed_Columns_Data.setup create_connection_fn table_builder
|
||||
|
||||
group_builder.teardown <|
|
||||
data.teardown
|
||||
data = Mixed_Columns_Data.setup table_builder
|
||||
|
||||
group_builder.specify "should correctly handle By_Type matching" <|
|
||||
expect_column_names ["float", "text", "bool", "int"] <| data.table.reorder_columns [..By_Type ..Integer] Position.After_Other_Columns
|
||||
expect_column_names ["float", "text", "int", "bool"] <| data.table.reorder_columns [..By_Type ..Integer, ..By_Type ..Boolean] Position.After_Other_Columns
|
||||
|
||||
suite_builder.group prefix+"Table.sort_columns" group_builder->
|
||||
data = Sort_Columns_Data.setup create_connection_fn table_builder
|
||||
|
||||
group_builder.teardown <|
|
||||
data.teardown
|
||||
data = Sort_Columns_Data.setup table_builder
|
||||
|
||||
group_builder.specify "should work as shown in the doc examples" <|
|
||||
sorted = data.table.sort_columns
|
||||
@ -467,10 +396,7 @@ add_specs suite_builder setup =
|
||||
expect_column_names ["foo 100", "foo 21", "foo 3", "Foo 2", "foo 1", "foo 001", "bar"] <| data.table.sort_columns Sort_Direction.Descending text_ordering=(Text_Ordering.Case_Insensitive sort_digits_as_numbers=True)
|
||||
|
||||
suite_builder.group prefix+"Table.rename_columns" group_builder->
|
||||
data = Rename_Columns_Data.setup create_connection_fn table_builder
|
||||
|
||||
group_builder.teardown <|
|
||||
data.teardown
|
||||
data = Rename_Columns_Data.setup table_builder
|
||||
|
||||
group_builder.specify "should work as shown in the doc examples" <|
|
||||
expect_column_names ["FirstColumn", "beta", "gamma", "delta"] <|
|
||||
|
@ -3,25 +3,13 @@ from Standard.Table import Table
|
||||
|
||||
from Standard.Test import all
|
||||
|
||||
import project.Common_Table_Operations.Main.Test_Setup
|
||||
import project.Common_Table_Operations.Main.Test_Selection
|
||||
import project.Common_Table_Operations.Aggregate_Spec
|
||||
from project.In_Memory.Common_Spec import in_memory_setup
|
||||
|
||||
expect_column_names names table ignore_order=False =
|
||||
case ignore_order of
|
||||
False -> table.columns . map .name . should_equal names frames_to_skip=2
|
||||
True -> table.columns . map .name . sort . should_equal names.sort frames_to_skip=2
|
||||
|
||||
type Dummy_Connection
|
||||
Value
|
||||
|
||||
close self = Nothing
|
||||
|
||||
drop_table self table =
|
||||
_ = table
|
||||
Nothing
|
||||
|
||||
|
||||
## These tests are parametrized by various backends and so they should be run in
|
||||
context of a specific backend. However, for the purpose of testing we provide
|
||||
a shortcut that allows to run these tests with the in-memory backend.
|
||||
@ -32,27 +20,8 @@ type Dummy_Connection
|
||||
- filter: An optional Text filter for filtering out test specs and groups. See the
|
||||
docs of `Test.Suite.run_with_filter`.
|
||||
run_default_backend add_specs filter=Nothing =
|
||||
selection = Test_Selection.Config supports_case_sensitive_columns=True natural_ordering=True case_insensitive_ordering=True order_by_unicode_normalization_by_default=True supports_unicode_normalization=True supports_time_duration=True supports_nanoseconds_in_time=True supports_mixed_columns=True fixed_length_text_columns=True supports_8bit_integer=True
|
||||
aggregate_selection = Aggregate_Spec.Test_Selection.Config
|
||||
|
||||
table_fn _ = (enso_project.data / "data.csv") . read
|
||||
|
||||
empty_table_fn _ =
|
||||
table = table_fn Nothing
|
||||
table.take 0
|
||||
|
||||
materialize = x->x
|
||||
|
||||
table_builder cols connection=Nothing =
|
||||
_ = connection
|
||||
Table.new cols
|
||||
|
||||
create_connection_func _ =
|
||||
Dummy_Connection.Value
|
||||
|
||||
setup = Test_Setup.Config "[In-Memory] " table_fn empty_table_fn table_builder materialize is_database=False test_selection=selection aggregate_test_selection=aggregate_selection create_connection_func table_builder
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder setup
|
||||
add_specs suite_builder in_memory_setup
|
||||
suite.run_with_filter filter
|
||||
|
||||
|
||||
@ -97,3 +66,12 @@ Error.should_equal_tz_agnostic self other =
|
||||
loc = Meta.get_source_location 1
|
||||
_ = other
|
||||
Test.fail "Expected a vector but got a dataflow error "+self.catch.to_display_text+" (at "+loc+")."
|
||||
|
||||
## PRIVATE
|
||||
Builds a table ensuring that the rows are in the order as given.
|
||||
build_sorted_table setup table_structure =
|
||||
# Workaround for https://github.com/enso-org/enso/issues/10321
|
||||
if setup.prefix.contains "Snowflake" . not then setup.table_builder table_structure else
|
||||
row_count = table_structure.first.second.length
|
||||
new_structure = table_structure+[["row_id", (0.up_to row_count) . to_vector]]
|
||||
setup.table_builder new_structure . sort "row_id" . remove_columns ["row_id"]
|
||||
|
@ -690,7 +690,7 @@ add_postgres_specs suite_builder create_connection_fn db_name =
|
||||
|
||||
Common_Spec.add_specs suite_builder prefix create_connection_fn
|
||||
|
||||
common_selection = Common_Table_Operations.Main.Test_Selection.Config supports_case_sensitive_columns=True order_by_unicode_normalization_by_default=True allows_mixed_type_comparisons=False fixed_length_text_columns=True removes_trailing_whitespace_casting_from_char_to_varchar=True supports_decimal_type=True supported_replace_params=supported_replace_params run_advanced_edge_case_tests_by_default=True
|
||||
common_selection = Common_Table_Operations.Main.Test_Selection.Config supports_case_sensitive_columns=True order_by_unicode_normalization_by_default=True allows_mixed_type_comparisons=False fixed_length_text_columns=True removes_trailing_whitespace_casting_from_char_to_varchar=True supports_decimal_type=True supported_replace_params=supported_replace_params run_advanced_edge_case_tests_by_default=True supports_date_time_without_timezone=True
|
||||
aggregate_selection = Common_Table_Operations.Aggregate_Spec.Test_Selection.Config first_last_row_order=False aggregation_problems=False
|
||||
agg_in_memory_table = (enso_project.data / "data.csv") . read
|
||||
|
||||
@ -704,6 +704,7 @@ add_postgres_specs suite_builder create_connection_fn db_name =
|
||||
|
||||
postgres_specific_spec suite_builder create_connection_fn db_name setup
|
||||
Common_Table_Operations.Main.add_specs suite_builder setup
|
||||
Upload_Spec.add_specs suite_builder setup create_connection_fn
|
||||
|
||||
## PRIVATE
|
||||
supported_replace_params : Hashset Replace_Params
|
||||
@ -761,7 +762,6 @@ add_table_specs suite_builder =
|
||||
Postgres_Type_Mapping_Spec.add_specs suite_builder connection_builder
|
||||
|
||||
Transaction_Spec.add_specs suite_builder connection_builder "[PostgreSQL] "
|
||||
Upload_Spec.add_specs suite_builder connection_builder "[PostgreSQL] "
|
||||
|
||||
suite_builder.group "[PostgreSQL] Secrets in connection settings" group_builder->
|
||||
cloud_setup = Cloud_Tests_Setup.prepare
|
||||
|
@ -310,7 +310,7 @@ sqlite_specific_spec suite_builder prefix create_connection_func setup =
|
||||
type Lazy_Ref
|
||||
Value ~get
|
||||
|
||||
sqlite_spec suite_builder prefix create_connection_func =
|
||||
sqlite_spec suite_builder prefix create_connection_func persistent_connector =
|
||||
name_counter = Ref.new 0
|
||||
# We keep a default connection to avoid creating connections each time.
|
||||
default_connection = Lazy_Ref.Value (create_connection_func Nothing)
|
||||
@ -353,6 +353,7 @@ sqlite_spec suite_builder prefix create_connection_func =
|
||||
setup = Common_Table_Operations.Main.Test_Setup.Config prefix agg_table_fn empty_agg_table_fn table_builder materialize is_database=True test_selection=common_selection aggregate_test_selection=aggregate_selection create_connection_func=create_connection_func light_table_builder=light_table_builder
|
||||
sqlite_specific_spec suite_builder prefix create_connection_func setup
|
||||
Common_Table_Operations.Main.add_specs suite_builder setup
|
||||
Upload_Spec.add_specs suite_builder setup create_connection_func persistent_connector=persistent_connector
|
||||
|
||||
|
||||
## PRIVATE
|
||||
@ -403,14 +404,12 @@ add_specs suite_builder =
|
||||
# https://github.com/enso-org/enso/issues/9437
|
||||
database_file = Database_File.create
|
||||
|
||||
sqlite_spec suite_builder in_file_prefix (_ -> create_file_connection database_file.file)
|
||||
sqlite_spec suite_builder in_file_prefix (_ -> create_file_connection database_file.file) persistent_connector=True
|
||||
Transaction_Spec.add_specs suite_builder (_ -> create_file_connection database_file.file) in_file_prefix
|
||||
Upload_Spec.add_specs suite_builder (_ -> create_file_connection database_file.file) in_file_prefix
|
||||
|
||||
in_memory_prefix = "[SQLite In-Memory] "
|
||||
sqlite_spec suite_builder in_memory_prefix (_ -> create_inmem_connection)
|
||||
sqlite_spec suite_builder in_memory_prefix (_ -> create_inmem_connection) persistent_connector=False
|
||||
Transaction_Spec.add_specs suite_builder (_ -> create_inmem_connection) in_memory_prefix
|
||||
Upload_Spec.add_specs suite_builder (_ -> create_inmem_connection) in_memory_prefix persistent_connector=False
|
||||
|
||||
SQLite_Type_Mapping_Spec.add_specs suite_builder
|
||||
|
||||
|
@ -63,12 +63,12 @@ database_table_builder name_prefix args primary_key=[] connection =
|
||||
Arguments:
|
||||
- make_new_connection: a function that takes `Nothing` and returns a new
|
||||
connection.
|
||||
- prefix: a string that will be prepended to the test names.
|
||||
- persistent_connector: specifies if the database is persisted between
|
||||
connections. Should be `True` for all databases except SQLite in the
|
||||
`In_Memory` mode in which every re-connect creates a separate in-memory
|
||||
database, so features relying on persistence cannot really be tested.
|
||||
add_specs suite_builder make_new_connection prefix persistent_connector=True =
|
||||
add_specs suite_builder setup make_new_connection persistent_connector=True =
|
||||
prefix = setup.prefix
|
||||
suite_builder.group prefix+"Creating an empty table" group_builder->
|
||||
data = Data.setup make_new_connection
|
||||
|
||||
@ -79,7 +79,7 @@ add_specs suite_builder make_new_connection prefix persistent_connector=True =
|
||||
t = data.connection.create_table (Name_Generator.random_name "creating-table") structure=[Column_Description.Value "X" Value_Type.Integer, Column_Description.Value "Y" Value_Type.Char] temporary=True
|
||||
t.column_names . should_equal ["X", "Y"]
|
||||
t.at "X" . to_vector . should_equal []
|
||||
t.at "X" . value_type . is_integer . should_be_true
|
||||
setup.expect_integer_type <| t.at "X"
|
||||
t.at "Y" . to_vector . should_equal []
|
||||
t.at "Y" . value_type . is_text . should_be_true
|
||||
t.row_count . should_equal 0
|
||||
@ -90,7 +90,7 @@ add_specs suite_builder make_new_connection prefix persistent_connector=True =
|
||||
db_table = data.connection.create_table (Name_Generator.random_name "creating-table") structure=t temporary=True
|
||||
db_table.column_names . should_equal ["X", "Y"]
|
||||
db_table.at "X" . to_vector . should_equal []
|
||||
db_table.at "X" . value_type . is_integer . should_be_true
|
||||
setup.expect_integer_type <| db_table.at "X"
|
||||
db_table.at "Y" . to_vector . should_equal []
|
||||
db_table.at "Y" . value_type . is_text . should_be_true
|
||||
db_table.row_count . should_equal 0
|
||||
@ -103,7 +103,7 @@ add_specs suite_builder make_new_connection prefix persistent_connector=True =
|
||||
db_table = data.connection.create_table (Name_Generator.random_name "creating-table") structure=input_db_table temporary=True
|
||||
db_table.column_names . should_equal ["X", "Y"]
|
||||
db_table.at "X" . to_vector . should_equal []
|
||||
db_table.at "X" . value_type . is_integer . should_be_true
|
||||
setup.expect_integer_type <| db_table.at "X"
|
||||
db_table.at "Y" . to_vector . should_equal []
|
||||
db_table.at "Y" . value_type . is_text . should_be_true
|
||||
db_table.row_count . should_equal 0
|
||||
@ -221,11 +221,11 @@ add_specs suite_builder make_new_connection prefix persistent_connector=True =
|
||||
Panic.with_finalizer (data.connection.drop_table name) <|
|
||||
t1 = tmp_connection.query (SQL_Query.Table_Name name)
|
||||
t1.column_names . should_equal ["X"]
|
||||
t1.at "X" . value_type . is_integer . should_be_true
|
||||
setup.expect_integer_type <| t1.at "X"
|
||||
tmp_connection.close
|
||||
t2 = data.connection.query (SQL_Query.Table_Name name)
|
||||
t2.column_names . should_equal ["X"]
|
||||
t2.at "X" . value_type . is_integer . should_be_true
|
||||
setup.expect_integer_type <| t2.at "X"
|
||||
|
||||
group_builder.specify "should be able to specify a primary key" <|
|
||||
name = Name_Generator.random_name "primary_key 1"
|
||||
@ -308,7 +308,7 @@ add_specs suite_builder make_new_connection prefix persistent_connector=True =
|
||||
db_table.column_names . should_equal ["X", "Y"]
|
||||
db_table.at "X" . to_vector . should_equal [1, 2, 3]
|
||||
db_table.at "Y" . to_vector . should_equal ['a', 'b', 'c']
|
||||
db_table.at "X" . value_type . is_integer . should_be_true
|
||||
setup.expect_integer_type <| db_table.at "X"
|
||||
db_table.at "Y" . value_type . is_text . should_be_true
|
||||
db_table.row_count . should_equal 3
|
||||
db_table.is_trivial_query . should_be_true
|
||||
@ -472,7 +472,7 @@ add_specs suite_builder make_new_connection prefix persistent_connector=True =
|
||||
copied_table.is_trivial_query . should_be_true
|
||||
name = copied_table.name
|
||||
Panic.with_finalizer (data.connection.drop_table name) <|
|
||||
copied_table.at "X" . value_type . is_integer . should_be_true
|
||||
setup.expect_integer_type <| copied_table.at "X"
|
||||
copied_table.at "Y" . value_type . is_text . should_be_true
|
||||
copied_table.at "Z" . value_type . is_floating_point . should_be_true
|
||||
|
||||
@ -1473,5 +1473,3 @@ wait_until_temporary_table_is_deleted_after_closing_connection connection table_
|
||||
Thread.sleep retry_interval_ms
|
||||
@Tail_Call go (ix + 1)
|
||||
go 0
|
||||
|
||||
|
||||
|
@ -7,33 +7,37 @@ from Standard.Test import Test
|
||||
import project.Common_Table_Operations
|
||||
|
||||
type Dummy_Connection
|
||||
close = Nothing
|
||||
Value
|
||||
|
||||
add_specs suite_builder =
|
||||
selection = Common_Table_Operations.Main.Test_Selection.Config supports_case_sensitive_columns=True natural_ordering=True case_insensitive_ordering=True order_by_unicode_normalization_by_default=True supports_unicode_normalization=True supports_time_duration=True supports_nanoseconds_in_time=True supports_mixed_columns=True fixed_length_text_columns=True supports_8bit_integer=True run_advanced_edge_case_tests_by_default=True
|
||||
close self = Nothing
|
||||
|
||||
drop_table self table =
|
||||
_ = table
|
||||
Nothing
|
||||
|
||||
in_memory_setup =
|
||||
selection = Common_Table_Operations.Main.Test_Selection.Config supports_case_sensitive_columns=True natural_ordering=True case_insensitive_ordering=True order_by_unicode_normalization_by_default=True supports_unicode_normalization=True supports_time_duration=True supports_nanoseconds_in_time=True supports_mixed_columns=True fixed_length_text_columns=True supports_8bit_integer=True
|
||||
aggregate_selection = Common_Table_Operations.Aggregate_Spec.Test_Selection.Config
|
||||
|
||||
agg_table_fn = _ ->
|
||||
(enso_project.data / "data.csv") . read
|
||||
|
||||
empty_table_fn = _ ->
|
||||
table = (enso_project.data / "data.csv") . read
|
||||
agg_table_fn _ = (enso_project.data / "data.csv") . read
|
||||
empty_table_fn _ =
|
||||
table = agg_table_fn Nothing
|
||||
table.take 0
|
||||
|
||||
materialize = x->x
|
||||
|
||||
table_builder columns connection=Nothing =
|
||||
table_builder cols connection=Nothing =
|
||||
_ = connection
|
||||
Table.new columns
|
||||
Table.new cols
|
||||
light_table_builder columns =
|
||||
Table.new columns
|
||||
create_connection_func _ =
|
||||
Dummy_Connection.Value
|
||||
|
||||
create_connection_func = _-> Dummy_Connection
|
||||
|
||||
setup = Common_Table_Operations.Main.Test_Setup.Config "[In-Memory] " agg_table_fn empty_table_fn table_builder materialize is_database=False test_selection=selection aggregate_test_selection=aggregate_selection create_connection_func=create_connection_func light_table_builder=light_table_builder
|
||||
|
||||
Common_Table_Operations.Main.add_specs suite_builder setup
|
||||
Common_Table_Operations.Main.Test_Setup.Config "[In-Memory] " agg_table_fn empty_table_fn table_builder materialize is_database=False test_selection=selection aggregate_test_selection=aggregate_selection create_connection_func=create_connection_func light_table_builder=light_table_builder
|
||||
|
||||
add_specs suite_builder =
|
||||
Common_Table_Operations.Main.add_specs suite_builder in_memory_setup
|
||||
|
||||
main filter=Nothing =
|
||||
suite = Test.build suite_builder->
|
||||
|
Loading…
Reference in New Issue
Block a user