Complete migration to Dialect/Feature flags + SQLServer support for Datetime (#11527)

* Supports_Sort_Digits_As_Numbers

* case_insensitive_ordering

* Case_Insensitive_Ordering

* Case_Insensitive_Ordering

* Enable tests

* Case_Insensitive_Non_Ascii

* Order_By_Unicode_Normalization_By_Default

* Allows_Mixed_Type_Comparisons

* Supports_Unicode_Normalization

* NaN_Non_Comparable

* Distinct_Returns_First_Row_From_Group_If_Ordered

* Datetime support

* refactor

* refactor

* Refactor

* Move remaining flags

* Cleanup

* sbt javafmtAll

* Fix

* Fix tests

* Code review changes

* More date_diff month tests

* Fix sowflake

* Fix snowflake tests

Bypassing failing test.
This commit is contained in:
AdRiley 2024-11-20 10:41:49 +00:00 committed by somebody1234
parent 3a33e81990
commit 4676ff1950
34 changed files with 389 additions and 290 deletions

View File

@ -162,6 +162,9 @@ type Redshift_Dialect
prepare_fetch_types_query self expression context =
Base_Generator.default_fetch_types_query self expression context
## PRIVATE
generate_collate self collation_name:Text -> Text = Base_Generator.default_generate_collate collation_name
## PRIVATE
check_aggregate_support : Aggregate_Column -> Boolean ! Unsupported_Database_Operation
check_aggregate_support self aggregate =
@ -194,6 +197,26 @@ type Redshift_Dialect
Dialect_Flag.Supports_Case_Sensitive_Columns -> True
Dialect_Flag.Supports_Infinity -> True
Dialect_Flag.Case_Sensitive_Text_Comparison -> True
Dialect_Flag.Supports_Sort_Digits_As_Numbers -> False
Dialect_Flag.Case_Insensitive_Ordering -> False
Dialect_Flag.Order_By_Unicode_Normalization_By_Default -> True
Dialect_Flag.Allows_Mixed_Type_Comparisons -> False
Dialect_Flag.Supports_Unicode_Normalization -> False
Dialect_Flag.NaN_Non_Comparable -> True
Dialect_Flag.Distinct_Returns_First_Row_From_Group_If_Ordered -> True
Dialect_Flag.Date_Time -> True
Dialect_Flag.Text_Length_Limited_Columns -> False
Dialect_Flag.Fixed_Length_Text_Columns -> False
Dialect_Flag.Length_Restricted_Text_Columns -> True
Dialect_Flag.Removes_Trailing_Whitespace_Casting_From_Char_To_Varchar -> False
Dialect_Flag.Char_Max_Size_After_Substring_Kept -> True
Dialect_Flag.Different_Size_Integer_Types -> True
Dialect_Flag.Supports_8bit_Integer -> False
Dialect_Flag.Supports_Decimal_Type -> True
Dialect_Flag.Supports_Time_Duration -> False
Dialect_Flag.Supports_Nanoseconds_In_Time -> False
Dialect_Flag.Supports_Mixed_Columns -> False
Dialect_Flag.Supports_Date_Time_Without_Timezone -> False
## PRIVATE
The default table types to use when listing tables.

View File

@ -171,6 +171,12 @@ type Dialect
_ = [expression, context]
Unimplemented.throw "This is an interface only."
## PRIVATE
generate_collate self collation_name:Text -> Text =
_ = collation_name
Unimplemented.throw "This is an interface only."
## PRIVATE
Checks if the given aggregate is supported.

View File

@ -29,3 +29,87 @@ type Dialect_Flag
## PRIVATE
Specifies text comparisons are case sensitive by default.
Case_Sensitive_Text_Comparison
## PRIVATE
Specifies if the backend supports natural ordering operations.
Supports_Sort_Digits_As_Numbers
## PRIAVTE
Specifies if the backend supports case insensitive ordering.
Case_Insensitive_Ordering
## PRIVATE
Specifies if the backend supports unicode normalization in its default ordering.
Order_By_Unicode_Normalization_By_Default
## PRIVATE
Specifies if mixed operations comparing
mixed types are allowed by a given backend. Some backends will allow
such comparisons, when mixed type storage is allowed or by coercing to
the target type; others will fail with a type error.
Allows_Mixed_Type_Comparisons
## PRIVATE
Specifies if the backend compares
strings taking Unicode Normalization into account, i.e. whether
's\u0301' is considered equal to 'ś'.
Supports_Unicode_Normalization
## PRIVATE
Specifies if NaN value is
treated as greater than all numbers. If `False`, `NaN` is expected to
yield False to both < and > comparisons.
NaN_Non_Comparable
## PRIAVTE
If `order_by` was
applied before, the distinct operation will return the first row from
each group. Guaranteed in the in-memory backend, but may not be
supported by all databases.
Distinct_Returns_First_Row_From_Group_If_Ordered
## PRIVATE
Specifies if the backend supports date/time operations.
Date_Time
## PRIVATE
Specifies if the backend supports setting
a length limit on text columns.
Text_Length_Limited_Columns
## PRIVATE
Specifies if the backend supports fixed
length text columns.
Fixed_Length_Text_Columns
## PRIVATE
Specifies if the backend supports
length restrictions for text columns.
Length_Restricted_Text_Columns
## PRIVATE
if SELECT concat('X', CAST(CAST(' ' AS CHAR(3)) AS VARCHAR(3)), 'X')
returns XX then this should be set to True
Removes_Trailing_Whitespace_Casting_From_Char_To_Varchar
## PRIVATE
Specifies how the max size of the char
type behaves after text_left/text_right.
If True the orginal size is kept, if False the size is reset.
Char_Max_Size_After_Substring_Kept
## PRIVATE
Specifies if the backend supports
integer types of various sizes, like 16-bit or 32-bit integers.
Different_Size_Integer_Types
## PRIVATE
Specifies if the backend supports 8-bit
integers.
Supports_8bit_Integer
## PRIVATE
Specifies if the backend supports the `Decimal`
high-precision type.
Supports_Decimal_Type
## PRIVATE
Specifies if the backend supports a
`Duration`/`Period` type.
Supports_Time_Duration
## PRIVATE
Specifies if the backend supports
nanosecond precision in time values.
Supports_Nanoseconds_In_Time
## PRIVATE
Specifies if the backend supports mixed-type
columns.
Supports_Mixed_Columns
## PRIVATE
Specifies if the backend supports
date/time operations without a timezone (true for most Database backends).
Defaults to `.is_integer`.
Supports_Date_Time_Without_Timezone

View File

@ -166,7 +166,7 @@ type SQL_Generator
Nulls_Order.Last -> " NULLS LAST"
collation = case order_descriptor.collation of
Nothing -> ""
collation_name -> ' COLLATE "' + collation_name + '"'
collation_name -> dialect.generate_collate collation_name
base_expression = self.generate_expression dialect order_descriptor.expression
base_expression ++ collation ++ order_suffix ++ nulls_suffix
@ -725,6 +725,9 @@ default_fetch_types_query dialect expression context where_filter_always_false_l
empty_context = context.add_where_filters [SQL_Expression.Literal where_filter_always_false_literal]
dialect.generate_sql (Query.Select [["typed_column", expression]] empty_context)
## PRIVATE
default_generate_collate collation_name:Text -> Text = ' COLLATE "' + collation_name + '"'
## PRIVATE
Helper class for shortening the binder names generated for WITH clauses.

View File

@ -236,6 +236,9 @@ type Postgres_Dialect
prepare_fetch_types_query self expression context =
Base_Generator.default_fetch_types_query self expression context
## PRIVATE
generate_collate self collation_name:Text -> Text = Base_Generator.default_generate_collate collation_name
## PRIVATE
check_aggregate_support : Aggregate_Column -> Boolean ! Unsupported_Database_Operation
check_aggregate_support self aggregate =
@ -266,6 +269,26 @@ type Postgres_Dialect
Dialect_Flag.Supports_Case_Sensitive_Columns -> True
Dialect_Flag.Supports_Infinity -> True
Dialect_Flag.Case_Sensitive_Text_Comparison -> True
Dialect_Flag.Supports_Sort_Digits_As_Numbers -> False
Dialect_Flag.Case_Insensitive_Ordering -> True
Dialect_Flag.Order_By_Unicode_Normalization_By_Default -> True
Dialect_Flag.Allows_Mixed_Type_Comparisons -> False
Dialect_Flag.Supports_Unicode_Normalization -> False
Dialect_Flag.NaN_Non_Comparable -> False
Dialect_Flag.Distinct_Returns_First_Row_From_Group_If_Ordered -> True
Dialect_Flag.Date_Time -> True
Dialect_Flag.Text_Length_Limited_Columns -> True
Dialect_Flag.Fixed_Length_Text_Columns -> True
Dialect_Flag.Length_Restricted_Text_Columns -> True
Dialect_Flag.Removes_Trailing_Whitespace_Casting_From_Char_To_Varchar -> True
Dialect_Flag.Char_Max_Size_After_Substring_Kept -> False
Dialect_Flag.Different_Size_Integer_Types -> True
Dialect_Flag.Supports_8bit_Integer -> False
Dialect_Flag.Supports_Decimal_Type -> True
Dialect_Flag.Supports_Time_Duration -> False
Dialect_Flag.Supports_Nanoseconds_In_Time -> False
Dialect_Flag.Supports_Mixed_Columns -> False
Dialect_Flag.Supports_Date_Time_Without_Timezone -> True
## PRIVATE
The default table types to use when listing tables.
@ -555,7 +578,7 @@ make_order_descriptor internal_column sort_direction text_ordering =
Order_Descriptor.Value internal_column.expression sort_direction nulls_order=nulls collation=Nothing
_ ->
## In the future we can modify this error to suggest using a custom defined collation.
if text_ordering.sort_digits_as_numbers then Error.throw (Unsupported_Database_Operation.Error "Natural ordering") else
if text_ordering.sort_digits_as_numbers then Error.throw (Unsupported_Database_Operation.Error "sort_digits_as_numbers") else
case text_ordering.case_sensitivity of
Case_Sensitivity.Default ->
Order_Descriptor.Value internal_column.expression sort_direction nulls_order=nulls collation=Nothing

View File

@ -117,7 +117,7 @@ type SQLite_Dialect
Nothing ->
Order_Descriptor.Value internal_column.expression sort_direction collation=Nothing
_ ->
if text_ordering.sort_digits_as_numbers then Error.throw (Unsupported_Database_Operation.Error "Natural ordering") else
if text_ordering.sort_digits_as_numbers then Error.throw (Unsupported_Database_Operation.Error "sort_digits_as_numbers") else
case text_ordering.case_sensitivity of
Case_Sensitivity.Default ->
Order_Descriptor.Value internal_column.expression sort_direction collation=Nothing
@ -221,6 +221,9 @@ type SQLite_Dialect
_ = [expression, context]
Panic.throw (Illegal_State.Error "Type inference by asking the Database for the expected types is not supported in SQLite since it tended to give wrong results. This should have never been called - if it was - that is a bug in the Database library.")
## PRIVATE
generate_collate self collation_name:Text -> Text = Base_Generator.default_generate_collate collation_name
## PRIVATE
check_aggregate_support : Aggregate_Column -> Boolean ! Unsupported_Database_Operation
check_aggregate_support self aggregate = case aggregate of
@ -277,6 +280,26 @@ type SQLite_Dialect
Dialect_Flag.Supports_Case_Sensitive_Columns -> False
Dialect_Flag.Supports_Infinity -> True
Dialect_Flag.Case_Sensitive_Text_Comparison -> True
Dialect_Flag.Supports_Sort_Digits_As_Numbers -> False
Dialect_Flag.Case_Insensitive_Ordering -> False
Dialect_Flag.Order_By_Unicode_Normalization_By_Default -> False
Dialect_Flag.Allows_Mixed_Type_Comparisons -> True
Dialect_Flag.Supports_Unicode_Normalization -> False
Dialect_Flag.NaN_Non_Comparable -> True
Dialect_Flag.Distinct_Returns_First_Row_From_Group_If_Ordered -> True
Dialect_Flag.Date_Time -> False
Dialect_Flag.Text_Length_Limited_Columns -> False
Dialect_Flag.Fixed_Length_Text_Columns -> False
Dialect_Flag.Length_Restricted_Text_Columns -> False
Dialect_Flag.Removes_Trailing_Whitespace_Casting_From_Char_To_Varchar -> False
Dialect_Flag.Char_Max_Size_After_Substring_Kept -> False
Dialect_Flag.Different_Size_Integer_Types -> False
Dialect_Flag.Supports_8bit_Integer -> False
Dialect_Flag.Supports_Decimal_Type -> False
Dialect_Flag.Supports_Time_Duration -> False
Dialect_Flag.Supports_Nanoseconds_In_Time -> False
Dialect_Flag.Supports_Mixed_Columns -> False
Dialect_Flag.Supports_Date_Time_Without_Timezone -> False
## PRIVATE
The default table types to use when listing tables.

View File

@ -57,7 +57,8 @@ type SQLServer_Details
account = [Pair.new 'encrypt' 'false']
database = [Pair.new 'databaseName' self.database]
credentials = [Pair.new 'user' self.credentials.username, Pair.new 'password' self.credentials.password]
account + database + credentials
send_time_as_datetime = [Pair.new 'sendTimeAsDatetime' 'false']
account + database + credentials + send_time_as_datetime
## PRIVATE
private create_data_link_structure details:SQLServer_Details data_link_location:Enso_File -> JS_Object =

View File

@ -164,7 +164,6 @@ type SQLServer_Dialect
_ : Float ->
if value.is_nan || value.is_infinite then stmt.setNull i Java_Types.REAL else
stmt.setDouble i value
_ : Time_Of_Day -> JDBCUtils.setLocalTimeViaTimeStamp stmt i value
# Fallback to default logic for everything else
_ -> fill_hole_default stmt i type_hint value
Statement_Setter.Value custom_fill_hole
@ -221,6 +220,9 @@ type SQLServer_Dialect
prepare_fetch_types_query self expression context =
Base_Generator.default_fetch_types_query self expression context
## PRIVATE
generate_collate self collation_name:Text -> Text = ' COLLATE ' + collation_name
## PRIVATE
check_aggregate_support : Aggregate_Column -> Boolean ! Unsupported_Database_Operation
check_aggregate_support self aggregate =
@ -255,6 +257,26 @@ type SQLServer_Dialect
Dialect_Flag.Supports_Case_Sensitive_Columns -> False
Dialect_Flag.Supports_Infinity -> False
Dialect_Flag.Case_Sensitive_Text_Comparison -> False
Dialect_Flag.Supports_Sort_Digits_As_Numbers -> False
Dialect_Flag.Case_Insensitive_Ordering -> True
Dialect_Flag.Order_By_Unicode_Normalization_By_Default -> True
Dialect_Flag.Allows_Mixed_Type_Comparisons -> False
Dialect_Flag.Supports_Unicode_Normalization -> False
Dialect_Flag.NaN_Non_Comparable -> True
Dialect_Flag.Distinct_Returns_First_Row_From_Group_If_Ordered -> True
Dialect_Flag.Date_Time -> True
Dialect_Flag.Text_Length_Limited_Columns -> True
Dialect_Flag.Fixed_Length_Text_Columns -> True
Dialect_Flag.Length_Restricted_Text_Columns -> True
Dialect_Flag.Removes_Trailing_Whitespace_Casting_From_Char_To_Varchar -> True
Dialect_Flag.Char_Max_Size_After_Substring_Kept -> False
Dialect_Flag.Different_Size_Integer_Types -> True
Dialect_Flag.Supports_8bit_Integer -> False
Dialect_Flag.Supports_Decimal_Type -> True
Dialect_Flag.Supports_Time_Duration -> False
Dialect_Flag.Supports_Nanoseconds_In_Time -> False
Dialect_Flag.Supports_Mixed_Columns -> False
Dialect_Flag.Supports_Date_Time_Without_Timezone -> False
## PRIVATE
The default table types to use when listing tables.
@ -279,7 +301,8 @@ type SQLServer_Dialect
if the given period is supported.
prepare_metadata_for_period : Date_Period | Time_Period -> Value_Type -> Any
prepare_metadata_for_period self period operation_input_type =
Date_Period_Metadata.Value period operation_input_type
if period == Time_Period.Nanosecond then Error.throw (Unsupported_Database_Operation.Error "Nanosecond precision date/times") else
Date_Period_Metadata.Value period operation_input_type
## PRIVATE
Returns true if the `replace` parameters are supported by this backend.
@ -422,7 +445,7 @@ make_dialect_operations =
stddev_pop = ["STDDEV_POP", Base_Generator.make_function "stddev_pop"]
stddev_samp = ["STDDEV_SAMP", Base_Generator.make_function "stddev_samp"]
stats = [agg_median, agg_mode, agg_percentile, stddev_pop, stddev_samp]
date_ops = [make_extract_as_int "year", make_extract_as_int "quarter", make_extract_as_int "month", make_extract_as_int "week", make_extract_as_int "day", make_extract_as_int "hour", make_extract_as_int "minute", make_extract_fractional_as_int "second", make_extract_fractional_as_int "millisecond" modulus=1000, make_extract_fractional_as_int "microsecond" modulus=1000, ["date_add", make_date_add], ["date_diff", make_date_diff], ["date_trunc_to_day", make_date_trunc_to_day]]
date_ops = [["year", Base_Generator.make_function "year"], make_datepart "quarter", ["month", Base_Generator.make_function "month"], make_datepart "week" "iso_week", ["day", Base_Generator.make_function "day"], make_datepart "hour", make_datepart "minute", make_datepart "day_of_year" "dayofyear", make_day_of_week, make_datepart "second", make_datepart "millisecond", make_extract_microsecond, ["date_add", make_date_add], ["date_diff", make_date_diff], ["date_trunc_to_day", make_date_trunc_to_day]]
special_overrides = [is_empty]
other = [["RUNTIME_ERROR", make_runtime_error_op]]
my_mappings = text + counts + stats + first_last_aggregators + arith_extensions + bool + date_ops + special_overrides + other
@ -584,19 +607,17 @@ make_order_descriptor internal_column sort_direction text_ordering =
Order_Descriptor.Value (Internals_Access.column_expression internal_column) sort_direction nulls_order=nulls collation=Nothing
_ ->
## In the future we can modify this error to suggest using a custom defined collation.
if text_ordering.sort_digits_as_numbers then Error.throw (Unsupported_Database_Operation.Error "Natural ordering") else
if text_ordering.sort_digits_as_numbers then Error.throw (Unsupported_Database_Operation.Error "sort_digits_as_numbers") else
case text_ordering.case_sensitivity of
Case_Sensitivity.Default ->
Order_Descriptor.Value (Internals_Access.column_expression internal_column) sort_direction nulls_order=nulls collation=Nothing
Case_Sensitivity.Sensitive ->
Order_Descriptor.Value (Internals_Access.column_expression internal_column) sort_direction nulls_order=nulls collation="ucs_basic"
Order_Descriptor.Value (Internals_Access.column_expression internal_column) sort_direction nulls_order=nulls collation="Latin1_General_BIN2"
Case_Sensitivity.Insensitive locale -> case locale == Locale.default of
False ->
Error.throw (Unsupported_Database_Operation.Error "Case insensitive ordering with custom locale")
True ->
upper = SQL_Expression.Operation "UPPER" [Internals_Access.column_expression internal_column]
folded_expression = SQL_Expression.Operation "LOWER" [upper]
Order_Descriptor.Value folded_expression sort_direction nulls_order=nulls collation=Nothing
Order_Descriptor.Value (Internals_Access.column_expression internal_column) sort_direction nulls_order=nulls collation="Latin1_General_CI_AS"
## PRIVATE
bool_or = Base_Generator.lift_unary_op "BOOL_OR" arg->
@ -682,115 +703,60 @@ replace args metadata =
True -> Nothing
expression.if_nothing (replace_params.throw_unsupported sqlserver_dialect_name)
## PRIVATE
make_extract_as_int enso_name sql_name=enso_name =
make_datepart enso_name sql_name=enso_name =
Base_Generator.lift_unary_op enso_name arg->
as_int32 <| SQL_Builder.code "EXTRACT(" ++ sql_name ++ " FROM " ++ arg ++ ")"
SQL_Builder.code "DATEPART(" ++ sql_name ++ ", " ++ arg ++ ")"
## PRIVATE
make_extract_fractional_as_int enso_name sql_name=enso_name modulus=Nothing =
Base_Generator.lift_unary_op enso_name arg->
result = as_int32 <| SQL_Builder.code "TRUNC(EXTRACT(" ++ sql_name ++ " FROM " ++ arg ++ "))"
case modulus of
Nothing -> result
_ : Integer ->
(result ++ (" % "+modulus.to_text)).paren
make_day_of_week =
Base_Generator.lift_unary_op "day_of_week" arg->
SQL_Builder.code "((DATEPART(weekday, " ++ arg ++ ") + @@DATEFIRST - 2) % 7) + 1"
make_extract_microsecond =
Base_Generator.lift_unary_op "microsecond" arg->
SQL_Builder.code "DATEPART(microsecond, " ++ arg ++ ")%1000"
private _get_sqlserver_interval interval:Date_Period|Time_Period -> Text =
case interval of
Date_Period.Year -> "year"
Date_Period.Quarter -> "quarter"
Date_Period.Month -> "month"
Date_Period.Week _ -> "week"
Date_Period.Day -> "day"
Time_Period.Hour -> "hour"
Time_Period.Minute -> "minute"
Time_Period.Second -> "second"
Time_Period.Millisecond -> "millisecond"
Time_Period.Microsecond -> "microsecond"
Time_Period.Nanosecond -> "nanosecond"
## PRIVATE
make_date_add arguments (metadata : Date_Period_Metadata) =
if arguments.length != 2 then Error.throw (Illegal_State.Error "date_add expects exactly 2 sub expressions. This is a bug in Database library.") else
expr = arguments.at 0
amount = arguments.at 1
interval_arg = case metadata.period of
Date_Period.Year ->
"years=>1"
Date_Period.Quarter ->
"months=>3"
Date_Period.Month ->
"months=>1"
Date_Period.Week _ ->
"weeks=>1"
Date_Period.Day ->
"days=>1"
Time_Period.Hour ->
"hours=>1"
Time_Period.Minute ->
"mins=>1"
Time_Period.Second ->
"secs=>1"
Time_Period.Millisecond ->
"secs=>0.001"
Time_Period.Microsecond ->
"secs=>0.000001"
interval_expression = SQL_Builder.code "make_interval(" ++ interval_arg ++ ")"
shifted = SQL_Builder.code "(" ++ expr ++ " + (" ++ amount ++ " * " ++ interval_expression ++ "))"
case metadata.input_value_type of
Value_Type.Date ->
SQL_Builder.code "(" ++ shifted ++ "::date)"
_ -> shifted
interval = _get_sqlserver_interval metadata.period
SQL_Builder.code "DATEADD(" ++ interval ++ ", " ++ amount ++ ", " ++ expr ++ ")"
## PRIVATE
make_date_diff arguments (metadata : Date_Period_Metadata) =
if arguments.length != 2 then Error.throw (Illegal_State.Error "date_diff expects exactly 2 sub expressions. This is a bug in Database library.") else
start = arguments.at 0
end = arguments.at 1
interval = _get_sqlserver_interval metadata.period
truncate expr =
SQL_Builder.code "TRUNC(" ++ expr ++ ")"
# `age` computes a 'symbolic' difference expressed in years, months and days.
extract_years =
as_int32 <| SQL_Builder.code "EXTRACT(YEARS FROM age(" ++ end ++ ", " ++ start ++ "))"
# To get total months, we need to sum up with whole years.
extract_months =
months = as_int32 <|
SQL_Builder.code "EXTRACT(MONTHS FROM age(" ++ end ++ ", " ++ start ++ "))"
SQL_Builder.code "(" ++ extract_years ++ " * 12 + " ++ months ++ ")"
## To get total days, we cannot use `age`, because we cannot convert an
amount of months to days (month lengths vary). Instead we rely on `-`
returning an interval based in 'raw' days.
extract_days =
as_int32 <| case metadata.input_value_type of
## For pure 'date' datatype, the difference is a simple integer
count of days.
Value_Type.Date -> (end ++ " - " ++ start).paren
# For others, it is an interval, so we need to extract.
_ -> SQL_Builder.code "EXTRACT(DAYS FROM (" ++ end ++ " - " ++ start ++ "))"
## We round the amount of seconds towards zero, as we only count full
elapsed seconds in the interval.
Note that it is important the interval is computed using `-`. The
symbolic `age` has no clear mapping to the count of days, skewing the
result.
extract_seconds =
seconds_numeric = SQL_Builder.code "EXTRACT(EPOCH FROM (" ++ end ++ " - " ++ start ++ "))"
as_int64 (truncate seconds_numeric)
case metadata.period of
Date_Period.Year -> extract_years
Date_Period.Month -> extract_months
Date_Period.Quarter -> (extract_months ++ " / 3").paren
Date_Period.Week _ -> (extract_days ++ " / 7").paren
Date_Period.Day -> extract_days
## EXTRACT HOURS/MINUTES would yield only a date part, but we need
the total which is easiest achieved by EPOCH
Time_Period.Hour -> (extract_seconds ++ " / 3600").paren
Time_Period.Minute -> (extract_seconds ++ " / 60").paren
Time_Period.Second -> extract_seconds
## The EPOCH gives back just the integer amount of seconds, without
the fractional part. So we get the fractional part using
MILLISECONDS - but that does not give the _total_ just the
'seconds of minute' part, expressed in milliseconds. So we need
to merge both - but then seconds of minute appear twice, so we %
the milliseconds to get just the fractional part from it and sum
both.
Time_Period.Millisecond ->
millis = truncate <|
SQL_Builder.code "EXTRACT(MILLISECONDS FROM (" ++ end ++ " - " ++ start ++ "))"
as_int64 <|
((extract_seconds ++ " * 1000").paren ++ " + " ++ (millis ++ " % 1000").paren).paren
Time_Period.Microsecond ->
micros = SQL_Builder.code "EXTRACT(MICROSECONDS FROM (" ++ end ++ " - " ++ start ++ "))"
as_int64 <|
((extract_seconds ++ " * 1000000").paren ++ " + " ++ (micros ++ " % 1000000").paren).paren
## SQLServer's DATEDIFF does not take the day of the month into account when
calculating the month or quarter difference between two dates. To match the
in-memory version we have to do some extra calculations.
Date_Period.Month -> _make_month_datediff start end
Date_Period.Quarter -> ((_make_month_datediff start end) ++ " / 3").paren
_ -> SQL_Builder.code "DATEDIFF_BIG(" ++ interval ++ ", " ++ start ++ ", " ++ end ++ ")"
private _make_month_datediff start end -> SQL_Builder =
sqlserver_month_diff = SQL_Builder.code "DATEDIFF(month, " ++ start ++ ", " ++ end ++ ")"
day_of_month_diff = SQL_Builder.code "DAY(" ++ start ++ ") - DAY(" ++ end ++ ")"
adjustment_amount = SQL_Builder.code "IIF(SIGN(" ++ sqlserver_month_diff ++ ")!=SIGN(" ++ day_of_month_diff ++ "), 0, SIGN(" ++ day_of_month_diff ++ "))"
SQL_Builder.code "IIF(" ++ sqlserver_month_diff ++ "=0, 0," ++ sqlserver_month_diff ++ " - " ++ adjustment_amount ++ ")"
## PRIVATE
make_date_trunc_to_day arguments =
@ -798,18 +764,6 @@ make_date_trunc_to_day arguments =
expr = arguments.at 0
SQL_Builder.code "(DATE_TRUNC('day'," ++ expr ++ ") :: DATE)"
## PRIVATE
Alters the expression casting the value to a 64-bit integer.
TODO probably remove
as_int64 expr =
SQL_Builder.code "(" ++ expr ++ "::int8)"
## PRIVATE
Alters the expression casting the value to a 32-bit integer.
TODO probably remove
as_int32 expr =
SQL_Builder.code "(" ++ expr ++ "::int4)"
## PRIVATE
The RUNTIME_ERROR operation should allow the query to compile fine and it
will not prevent it from running if the branch including this operation is

View File

@ -224,6 +224,9 @@ type Snowflake_Dialect
prepare_fetch_types_query self expression context =
Base_Generator.default_fetch_types_query self expression context
## PRIVATE
generate_collate self collation_name:Text -> Text = Base_Generator.default_generate_collate collation_name
## PRIVATE
check_aggregate_support : Aggregate_Column -> Boolean ! Unsupported_Database_Operation
check_aggregate_support self aggregate =
@ -255,6 +258,26 @@ type Snowflake_Dialect
Dialect_Flag.Supports_Case_Sensitive_Columns -> True
Dialect_Flag.Supports_Infinity -> True
Dialect_Flag.Case_Sensitive_Text_Comparison -> True
Dialect_Flag.Supports_Sort_Digits_As_Numbers -> False
Dialect_Flag.Case_Insensitive_Ordering -> False
Dialect_Flag.Order_By_Unicode_Normalization_By_Default -> False
Dialect_Flag.Allows_Mixed_Type_Comparisons -> False
Dialect_Flag.Supports_Unicode_Normalization -> False
Dialect_Flag.NaN_Non_Comparable -> False
Dialect_Flag.Distinct_Returns_First_Row_From_Group_If_Ordered -> False
Dialect_Flag.Date_Time -> True
Dialect_Flag.Text_Length_Limited_Columns -> True
Dialect_Flag.Fixed_Length_Text_Columns -> False
Dialect_Flag.Length_Restricted_Text_Columns -> True
Dialect_Flag.Removes_Trailing_Whitespace_Casting_From_Char_To_Varchar -> False
Dialect_Flag.Char_Max_Size_After_Substring_Kept -> True
Dialect_Flag.Different_Size_Integer_Types -> False
Dialect_Flag.Supports_8bit_Integer -> False
Dialect_Flag.Supports_Decimal_Type -> True
Dialect_Flag.Supports_Time_Duration -> False
Dialect_Flag.Supports_Nanoseconds_In_Time -> True
Dialect_Flag.Supports_Mixed_Columns -> False
Dialect_Flag.Supports_Date_Time_Without_Timezone -> True
## PRIVATE
The default table types to use when listing tables.
@ -553,7 +576,7 @@ make_order_descriptor internal_column sort_direction text_ordering =
Order_Descriptor.Value (Internals_Access.column_expression internal_column) sort_direction nulls_order=nulls collation=Nothing
_ ->
## In the future we can modify this error to suggest using a custom defined collation.
if text_ordering.sort_digits_as_numbers then Error.throw (Unsupported_Database_Operation.Error "Natural ordering") else
if text_ordering.sort_digits_as_numbers then Error.throw (Unsupported_Database_Operation.Error "sort_digits_as_numbers") else
case text_ordering.case_sensitivity of
Case_Sensitivity.Default ->
Order_Descriptor.Value (Internals_Access.column_expression internal_column) sort_direction nulls_order=nulls collation=Nothing
@ -816,7 +839,7 @@ make_distinct_extension distinct_expressions =
## We could use the ORDER BY here to ensure any previous ordering is preserved by distinct.
But to do so we need to have a robust way of checking such ordering, even if subqueries were taken in the meantime.
This may be related to #10321
Once fixed, should re-enable the `distinct_returns_first_row_from_group_if_ordered` flag back to `True`.
Once fixed, should re-enable the `Distinct_Returns_First_Row_From_Group_If_Ordered` flag back to `True`.
SQL_Builder.code " QUALIFY ROW_NUMBER() OVER (PARTITION BY " ++ joined ++ " ORDER BY 1) = 1 "
Context_Extension.Value position=550 expressions=distinct_expressions run_generator=run_generator

View File

@ -3,7 +3,6 @@ package org.enso.database;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.sql.Types;
import java.time.LocalDate;
import java.time.LocalDateTime;
@ -72,18 +71,6 @@ public class JDBCUtils {
stmt.setObject(columnIndex, localTime, Types.TIME);
}
/**
* Sets a LocalTime in a PreparedStatement via TimeStamp.
*
* @param stmt
*/
public static void setLocalTimeViaTimeStamp(
PreparedStatement stmt, int columnIndex, LocalTime localTime) throws SQLException {
Timestamp timestamp = Timestamp.valueOf(localTime.atDate(LocalDate.of(1970, 1, 1)));
stmt.setTimestamp(columnIndex, timestamp);
}
/** Sets a LocalDate in a PreparedStatement. */
public static void setLocalDate(PreparedStatement stmt, int columnIndex, LocalDate localDate)
throws SQLException {

View File

@ -75,7 +75,7 @@ add_database_specs suite_builder create_connection_fn =
default_connection.get.base_connection.create_literal_table (Table.new columns) "literal_table"
materialize = .read
common_selection = Common_Table_Operations.Main.Test_Selection.Config order_by_unicode_normalization_by_default=True allows_mixed_type_comparisons=False supports_decimal_type=True run_advanced_edge_case_tests_by_default=False
common_selection = Common_Table_Operations.Main.Test_Selection.Config run_advanced_edge_case_tests_by_default=False
aggregate_selection = Common_Table_Operations.Aggregate_Spec.Test_Selection.Config first_last_row_order=False aggregation_problems=False date_support=False
agg_in_memory_table = (enso_project.data / "data.csv") . read
agg_table_fn = _->

View File

@ -199,7 +199,7 @@ add_sqlserver_specs suite_builder create_connection_fn =
materialize = .read
common_selection = Common_Table_Operations.Main.Test_Selection.Config order_by_unicode_normalization_by_default=True allows_mixed_type_comparisons=False text_length_limited_columns=True fixed_length_text_columns=True removes_trailing_whitespace_casting_from_char_to_varchar=True char_max_size_after_substring=..Reset supports_decimal_type=True supported_replace_params=supported_replace_params run_advanced_edge_case_tests_by_default=True supports_date_time_without_timezone=False date_time=False is_nan_comparable=False
common_selection = Common_Table_Operations.Main.Test_Selection.Config supported_replace_params=supported_replace_params run_advanced_edge_case_tests_by_default=True
aggregate_selection = Common_Table_Operations.Aggregate_Spec.Test_Selection.Config first_last_row_order=False aggregation_problems=False
agg_in_memory_table = (enso_project.data / "data.csv") . read

View File

@ -535,7 +535,7 @@ add_snowflake_specs suite_builder create_connection_fn db_name =
default_connection.get.base_connection.create_literal_table (Table.new columns) "literal_table"
materialize = .read
common_selection = Common_Table_Operations.Main.Test_Selection.Config order_by_unicode_normalization_by_default=True allows_mixed_type_comparisons=False text_length_limited_columns=True fixed_length_text_columns=False different_size_integer_types=False removes_trailing_whitespace_casting_from_char_to_varchar=False supports_decimal_type=True supported_replace_params=supported_replace_params run_advanced_edge_case_tests_by_default=False supports_date_time_without_timezone=True supports_nanoseconds_in_time=True is_nan_comparable=True distinct_returns_first_row_from_group_if_ordered=False
common_selection = Common_Table_Operations.Main.Test_Selection.Config supported_replace_params=supported_replace_params run_advanced_edge_case_tests_by_default=False
aggregate_selection = Common_Table_Operations.Aggregate_Spec.Test_Selection.Config first_last_row_order=False aggregation_problems=False text_concat=False
agg_in_memory_table = ((Project_Description.new enso_dev.Table_Tests).data / "data.csv") . read

View File

@ -1395,7 +1395,7 @@ add_aggregate_specs suite_builder setup =
m3.at "Average Sum Y" . to_vector . should_equal [7]
m3.at "Maximum Minimum Y" . to_vector . should_equal [6]
if setup.test_selection.supports_unicode_normalization then
if setup.flagged ..Supports_Unicode_Normalization then
group_builder.specify "should correctly handle Unicode normalization within grouping" <|
table = table_builder [["A", ['s', 's\u0301', 'ś', 's\u0301']], ["B", [1, 2, 4, 8]]]
grouped = table.aggregate ["A"] [Sum "B"]

View File

@ -20,7 +20,6 @@ from Standard.Test import all
import enso_dev.Base_Tests.Data.Round_Spec
import project.Common_Table_Operations.Main.Char_Max_Size_After_Substring_Behavior
from project.Common_Table_Operations.Util import all
main filter=Nothing = run_default_backend add_specs filter
@ -154,7 +153,7 @@ add_column_operation_specs suite_builder setup =
table_builder = setup.table_builder
create_connection_fn = setup.create_connection_func
light_table_builder = setup.light_table_builder
pending_datetime = if setup.test_selection.date_time.not then "Date/Time operations are not supported by this backend."
pending_datetime = if (setup.flagged ..Date_Time).not then "Date/Time operations are not supported by this backend."
suite_builder.group prefix+"(Column_Operations_Spec) Rounding numeric tests" group_builder->
round_one n dp=0 use_bankers=False =
@ -214,7 +213,7 @@ add_column_operation_specs suite_builder setup =
clue = table.columns.map (c-> c.name + "->" + c.value_type.to_display_text) . join ", " "{Table: " "} "
Test.with_clue clue <|
callback table
case setup.test_selection.supports_mixed_columns of
case setup.flagged ..Supports_Mixed_Columns of
False -> callback_with_clue (build_sorted_table table_structure)
True ->
all_combinations (Vector.fill table_structure.length [Nothing, Mixed_Type_Object]) . each combination->
@ -289,7 +288,7 @@ add_column_operation_specs suite_builder setup =
c2.to_vector . should_equal [10, "<NA>"]
False -> c2.should_fail_with No_Common_Type
group_builder.specify "iif should correctly unify text columns of various lengths" pending=(if setup.test_selection.fixed_length_text_columns.not then "Fixed-length Char columns are not supported by this backend.") <|
group_builder.specify "iif should correctly unify text columns of various lengths" pending=(if (setup.flagged ..Fixed_Length_Text_Columns).not then "Fixed-length Char columns are not supported by this backend.") <|
t0 = table_builder [["x", [False, True, False]], ["A", ["a", "b", "c"]], ["B", ["xyz", "abc", "def"]]]
t1 = t0 . cast "A" (Value_Type.Char size=1 variable_length=False) . cast "B" (Value_Type.Char size=3 variable_length=False)
@ -504,7 +503,7 @@ add_column_operation_specs suite_builder setup =
# SELECT SYSTEM$TYPEOF(COALESCE(CAST("literal_table"."x" AS CHAR(1)), 'N/A')) FROM (VALUES ('1'), (NULL), ('a')) AS "literal_table"("x");
# VARCHAR(3) - so it is correct
# TODO - investigate JDBC / our mapping bug
case setup.test_selection.length_restricted_text_columns of
case setup.flagged ..Length_Restricted_Text_Columns of
True -> actual.at "col0" . value_type . should_equal (Value_Type.Char size=5 variable_length=True)
False -> actual.at "col0" . value_type . should_equal (Value_Type.Char variable_length=True)
@ -518,7 +517,7 @@ add_column_operation_specs suite_builder setup =
t = table_builder [["col0", [Nothing, "200", Nothing, "400", "500", Nothing]]] . cast "col0" (Value_Type.Char size=3 variable_length=False)
actual = t.fill_nothing ["col0"] ""
actual.at "col0" . to_vector . should_equal ["", "200", "", "400", "500", ""]
case setup.test_selection.length_restricted_text_columns of
case setup.flagged ..Length_Restricted_Text_Columns of
True -> actual.at "col0" . value_type . should_equal (Value_Type.Char size=3 variable_length=True)
False -> actual.at "col0" . value_type . should_equal (Value_Type.Char variable_length=True)
@ -527,10 +526,10 @@ add_column_operation_specs suite_builder setup =
fillBlank = t.fill_nothing ["col0"] ""
fillOneSpace = t.fill_nothing ["col0"] " "
fillTwoSpaces = t.fill_nothing ["col0"] " "
case setup.test_selection.length_restricted_text_columns of
case setup.flagged ..Length_Restricted_Text_Columns of
True ->
fillBlank.at "col0" . value_type . should_equal (Value_Type.Char size=1 variable_length=True)
case setup.test_selection.fixed_length_text_columns of
case setup.flagged ..Fixed_Length_Text_Columns of
True -> fillOneSpace.at "col0" . value_type . should_equal (Value_Type.Char size=1 variable_length=False)
False -> fillOneSpace.at "col0" . value_type . should_equal (Value_Type.Char size=1 variable_length=True)
fillTwoSpaces.at "col0" . value_type . should_equal (Value_Type.Char size=2 variable_length=True)
@ -539,13 +538,13 @@ add_column_operation_specs suite_builder setup =
fillOneSpace.at "col0" . value_type . should_equal (Value_Type.Char variable_length=True)
fillTwoSpaces.at "col0" . value_type . should_equal (Value_Type.Char variable_length=True)
case setup.test_selection.removes_trailing_whitespace_casting_from_char_to_varchar of
case setup.flagged ..Removes_Trailing_Whitespace_Casting_From_Char_To_Varchar of
True -> fillBlank.at "col0" . to_vector . should_equal ["a", "", ""]
False -> fillBlank.at "col0" . to_vector . should_equal ["a", "", " "]
fillOneSpace.at "col0" . to_vector . should_equal ["a", " ", " "]
case setup.test_selection.removes_trailing_whitespace_casting_from_char_to_varchar of
case setup.flagged ..Removes_Trailing_Whitespace_Casting_From_Char_To_Varchar of
True -> fillTwoSpaces.at "col0" . to_vector . should_equal ["a", "", ""]
False -> fillTwoSpaces.at "col0" . to_vector . should_equal ["a", " ", " "]
@ -554,9 +553,9 @@ add_column_operation_specs suite_builder setup =
actual = t.fill_nothing ["col0"] " "
actual.at "col0" . to_vector . should_equal [" ", "200", " ", "400", "500", " "]
case setup.test_selection.length_restricted_text_columns of
case setup.flagged ..Length_Restricted_Text_Columns of
True ->
case setup.test_selection.fixed_length_text_columns of
case setup.flagged ..Fixed_Length_Text_Columns of
True -> actual.at "col0" . value_type . should_equal (Value_Type.Char size=3 variable_length=False)
False -> actual.at "col0" . value_type . should_equal (Value_Type.Char size=3 variable_length=True)
False -> actual.at "col0" . value_type . should_equal (Value_Type.Char variable_length=True)
@ -698,7 +697,7 @@ add_column_operation_specs suite_builder setup =
setup.is_integer_type (t.at "i").inferred_precise_value_type . should_be_true
(t.at "f").inferred_precise_value_type . should_be_a (Value_Type.Float ...)
case setup.test_selection.supports_decimal_type of
case setup.flagged ..Supports_Decimal_Type of
True ->
(t.at "d").inferred_precise_value_type . should_be_a (Value_Type.Decimal ...)
False ->
@ -792,7 +791,7 @@ add_column_operation_specs suite_builder setup =
op (t.at "Z") 32 . should_fail_with Invalid_Value_Type
op (t.at "Z") (t.at "X") . should_fail_with Invalid_Value_Type
if setup.test_selection.supports_mixed_columns then
if setup.flagged ..Supports_Mixed_Columns then
group_builder.specify "should allow comparing Mixed columns" <|
t1 = table_builder [["X", ["a", 23]], ["Y", ["b", 1]]]
((t1.at "X") == (t1.at "Y")) . to_vector . should_equal [False, False]
@ -886,7 +885,7 @@ add_column_operation_specs suite_builder setup =
(data.x ^ data.x).value_type . is_numeric . should_be_true
if setup.test_selection.supports_decimal_type then
if setup.flagged ..Supports_Decimal_Type then
# TODO in-memory specific tests may be added in In_Memory/Table_Spec as part of #10429
divide_in_memory_pending = if setup.is_database.not then "TODO: https://github.com/enso-org/enso/issues/10429"
group_builder.specify "should correctly infer the types (for decimal division)" pending=divide_in_memory_pending <|
@ -1123,7 +1122,7 @@ add_column_operation_specs suite_builder setup =
result.name . should_equal "round([x])"
test_floatlike Value_Type.Float
if setup.test_selection.supports_decimal_type then
if setup.flagged ..Supports_Decimal_Type then
test_floatlike Value_Type.Decimal
group_builder.specify "should allow round on an int column" <|
@ -1154,7 +1153,7 @@ add_column_operation_specs suite_builder setup =
table = table_builder [["x", [0, 3, -3, 1, -2]]]
table.at "x" . round 16 . should_fail_with Illegal_Argument
if setup.test_selection.supports_decimal_type then
if setup.flagged ..Supports_Decimal_Type then
group_builder.specify "ceil, floor and truncate should work correctly on Integers outside the java Long range" <|
positive_values = [9223372036854775806, 9223372036854775807, 9223372036854775808, 9223372036854775809, 9223372036854775807000000]
values = positive_values + positive_values.map .negate
@ -1175,7 +1174,7 @@ add_column_operation_specs suite_builder setup =
c.floor . to_vector . should_equal [x_int, -x_int]
c.truncate . to_vector . should_equal [x_int, -x_int]
if setup.test_selection.supports_decimal_type then
if setup.flagged ..Supports_Decimal_Type then
group_builder.specify "ceil, floor and truncate should work correctly on Decimals" <|
c = table_builder [["X", [Decimal.new "123492233720368547758075678.25", Decimal.new "179243023788662739454197523.625", Decimal.new "-123492233720368547758075678.25", Decimal.new "-179243023788662739454197523.625"]]] . at "X"
c.value_type.is_decimal . should_be_true
@ -1282,12 +1281,15 @@ add_column_operation_specs suite_builder setup =
resLeft . to_vector . should_equal ["a", "f", "", Nothing, "c", "I"]
resRight . to_vector . should_equal ["a", "r", "", Nothing, "é", "."]
expected_new_size = case setup.test_selection.char_max_size_after_substring of
Char_Max_Size_After_Substring_Behavior.Reset -> Nothing
Char_Max_Size_After_Substring_Behavior.Kept -> 286
Char_Max_Size_After_Substring_Behavior.Reduced -> 1
expected_new_size = case setup.flagged ..Char_Max_Size_After_Substring_Kept of
False -> Nothing
True -> 286
Test.with_clue "(Expected behaviour for substring size = "+setup.test_selection.char_max_size_after_substring.to_text+".) " <|
expected_behavior = case setup.flagged ..Char_Max_Size_After_Substring_Kept of
False -> "Reset"
True -> "Kept"
Test.with_clue "(Expected behaviour for substring size = "+expected_behavior+".) " <|
resLeft.value_type . should_equal (Value_Type.Char size=expected_new_size variable_length=True)
resRight.value_type . should_equal (Value_Type.Char size=expected_new_size variable_length=True)
@ -1662,7 +1664,7 @@ add_column_operation_specs suite_builder setup =
vt1.should_be_a (Value_Type.Char ...)
vt1.variable_length.should_be_true
group_builder.specify "should infer correct return type (2)" pending=(if setup.test_selection.fixed_length_text_columns.not then "Fixed-length Char columns are not supported by this backend.") <|
group_builder.specify "should infer correct return type (2)" pending=(if (setup.flagged ..Fixed_Length_Text_Columns).not then "Fixed-length Char columns are not supported by this backend.") <|
c = table_builder [["texts", ["foo", "bar"]]] . at "texts"
c2 = c.cast (Value_Type.Char size=2 variable_length=False)
c3 = c2.text_replace "a" "---"
@ -1851,7 +1853,7 @@ add_column_operation_specs suite_builder setup =
c4.to_vector . should_equal ["asdf", "asdf", "asdf"]
c4.value_type.should_be_a (Value_Type.Char ...)
if setup.test_selection.date_time then group_builder.specify "Should allow the creation of constant date/time columns" <|
if setup.flagged ..Date_Time then group_builder.specify "Should allow the creation of constant date/time columns" <|
t = table_builder [["x", ["1", "2", "3"]]]
d = Date.new 2024 07 05
c1 = t.make_constant_column d

View File

@ -31,7 +31,7 @@ add_specs suite_builder setup =
add_conversion_specs suite_builder setup =
prefix = setup.prefix
materialize = setup.materialize
supports_dates = setup.test_selection.date_time
supports_dates = setup.flagged ..Date_Time
supports_conversion_failure_reporting = setup.is_database.not
table_builder = build_sorted_table setup
suite_builder.group prefix+"(Conversion_Spec) Table/Column.cast - to text" group_builder->
@ -94,7 +94,7 @@ add_conversion_specs suite_builder setup =
c.value_type.is_decimal . should_be_true
c.to_vector . should_equal [1, 2, 3000]
if setup.test_selection.fixed_length_text_columns then
if setup.flagged ..Fixed_Length_Text_Columns then
group_builder.specify "should allow to cast a text column to fixed-length" <|
t = table.get
c = t.at "W" . cast (Value_Type.Char size=3 variable_length=False)
@ -133,7 +133,7 @@ add_conversion_specs suite_builder setup =
r1.should_fail_with Illegal_Argument
r1.catch.to_display_text . should_contain "positive"
if setup.test_selection.text_length_limited_columns then
if setup.flagged ..Length_Restricted_Text_Columns then
group_builder.specify "should allow to cast a text column to variable-length with a max size" <|
t = table_builder [["X", ["a", "DEF", "a slightly longer text"]]]
c = t.at "X" . cast (Value_Type.Char size=3 variable_length=True)
@ -161,7 +161,7 @@ add_conversion_specs suite_builder setup =
c.value_type.is_floating_point . should_be_true
c.to_vector . should_equal [1.0, 2.0, 3.0]
if setup.test_selection.different_size_integer_types then
if setup.flagged ..Different_Size_Integer_Types then
group_builder.specify "should allow to cast an integer column to a smaller bit-width and larger bit-width" <|
t = table_builder [["X", [1, 2, 3]]]
c = t.at "X" . cast (Value_Type.Integer Bits.Bits_16)
@ -187,7 +187,7 @@ add_conversion_specs suite_builder setup =
c3.value_type . should_equal (Value_Type.Integer Bits.Bits_32)
c3.to_vector . should_equal [1, 2, 3]
if setup.test_selection.supports_8bit_integer then
if setup.flagged ..Supports_8bit_Integer then
group_builder.specify "should allow to cast an integer column to a byte and back" <|
t = table_builder [["X", [1, 2, 3]]]
c1 = t.at "X" . cast Value_Type.Byte
@ -427,14 +427,14 @@ add_conversion_specs suite_builder setup =
t2.at "Z" . to_vector . should_equal ["7", "8", "9"]
t2.at "A" . to_vector . should_equal [True, False, True]
if setup.test_selection.different_size_integer_types then
if setup.flagged ..Different_Size_Integer_Types then
group_builder.specify "should preserve the overridden types when materialized (Integer)" <|
t = table_builder [["X", [1, 2, 100]]]
t2 = t . cast "X" (Value_Type.Integer Bits.Bits_16)
t3 = materialize t2
t3.at "X" . value_type . should_equal (Value_Type.Integer Bits.Bits_16)
if setup.test_selection.fixed_length_text_columns then
if setup.flagged ..Fixed_Length_Text_Columns then
group_builder.specify "should preserve the overridden types when materialized (Char)" <|
t = table_builder [["Y", ["a", "abcdef", "abc"]]]
t2 = t . cast "Y" (Value_Type.Char size=3 variable_length=False)
@ -513,7 +513,7 @@ add_conversion_specs suite_builder setup =
c3.to_vector . should_equal ["1", "2.25", "3", "-45.25", "2.0", "X", "2020-01-01", date_time_str, "12:30:00", "{{{MY Type [x=42] }}}", "True", Nothing, "True"]
Problems.assume_no_problems c3
if setup.test_selection.text_length_limited_columns then
if setup.flagged ..Text_Length_Limited_Columns then
c3_2 = t2.cast "super-mix" (Value_Type.Char size=2) . at "super-mix"
c3_2.value_type . should_equal (Value_Type.Char size=2)
c3_2.to_vector . should_equal ["1", "2.", "3", "-4", "2.", "X", "20", "20", "12", "{{", "Tr", Nothing, "Tr"]

View File

@ -40,7 +40,7 @@ add_specs suite_builder setup =
prefix = setup.prefix
table_builder = Util.build_sorted_table setup
data = Data.setup table_builder
pending_datetime = if setup.test_selection.date_time.not then "Date/Time operations are not supported by this backend."
pending_datetime = if (setup.flagged ..Date_Time).not then "Date/Time operations are not supported by this backend."
suite_builder.group prefix+"Date-Time support" pending=pending_datetime group_builder->
group_builder.specify "should allow to create Table with Date columns and round-trip them back to Enso" <|
@ -75,7 +75,7 @@ add_specs suite_builder setup =
# Currently we support Value_Type.Date_Time with_timezone=False in Database backends, not in-memory.
# In this test we create the table using the connection, so it only can run on Database.
# This is the only way to enforce uploading Date Times without TZ without relying on Casting (which works on another level so it would not be a good test of the upload mechanism).
if setup.is_database && setup.test_selection.supports_date_time_without_timezone then group_builder.specify "should allow to create Table with Date Time (without timezone) columns and round-trip them back to Enso, preserving the date/time parts, possibly losing the timezone/offset" <|
if setup.is_database && setup.flagged ..Supports_Date_Time_Without_Timezone then group_builder.specify "should allow to create Table with Date Time (without timezone) columns and round-trip them back to Enso, preserving the date/time parts, possibly losing the timezone/offset" <|
dt1 = Date_Time.new 2020 10 24 1 2 3 millisecond=4 microsecond=5
offset_zone = Time_Zone.new hours=(-11) minutes=(-30)
dt2 = Date_Time.new 2021 11 23 1 2 3 zone=offset_zone
@ -171,14 +171,14 @@ add_specs suite_builder setup =
a.date_part Time_Period.Second . to_vector . should_equal [59, 44, 0, Nothing]
a.date_part Time_Period.Millisecond . to_vector . should_equal [567, 1, 0, Nothing]
a.date_part Time_Period.Microsecond . to_vector . should_equal [0, 2, 0, Nothing]
case setup.test_selection.supports_nanoseconds_in_time of
case setup.flagged ..Supports_Nanoseconds_In_Time of
True ->
a.date_part Time_Period.Nanosecond . to_vector . should_equal [123, 0, 0, Nothing]
False ->
a.date_part Time_Period.Nanosecond . should_fail_with Unsupported_Database_Operation
testCases = [a.date_part Time_Period.Second, a.date_part Time_Period.Millisecond, a.date_part Time_Period.Microsecond]
if setup.test_selection.supports_nanoseconds_in_time then
if setup.flagged ..Supports_Nanoseconds_In_Time then
testCases = testCases + (a.date_part Time_Period.Nanosecond)
testCases.each c->
Test.with_clue "The column "+c.name+" value type ("+c.value_type.to_display_text+") should be an integer: " <|
@ -199,14 +199,14 @@ add_specs suite_builder setup =
a.date_part Date_Period.Week . to_vector . should_equal [53, 9, 1, Nothing, 21]
a.date_part Time_Period.Millisecond . to_vector . should_equal [567, 1, 0, Nothing, 0]
a.date_part Time_Period.Microsecond . to_vector . should_equal [0, 2, 0, Nothing, 0]
case setup.test_selection.supports_nanoseconds_in_time of
case setup.flagged ..Supports_Nanoseconds_In_Time of
True ->
a.date_part Time_Period.Nanosecond . to_vector . should_equal [123, 0, 0, Nothing, 0]
False ->
a.date_part Time_Period.Nanosecond . should_fail_with Unsupported_Database_Operation
testCases = [a.date_part Date_Period.Quarter, a.date_part Date_Period.Week, a.date_part Time_Period.Second, a.date_part Time_Period.Millisecond, a.date_part Time_Period.Microsecond]
if setup.test_selection.supports_nanoseconds_in_time then
if setup.flagged ..Supports_Nanoseconds_In_Time then
testCases = testCases + (a.date_part Time_Period.Nanosecond)
testCases.each c->
Test.with_clue "The column "+c.name+" value type ("+c.value_type.to_display_text+") should be an integer: " <|
@ -231,11 +231,10 @@ add_specs suite_builder setup =
group_builder.specify "should allow to compare time-of-day" <|
t = table_builder [["X", [Time_Of_Day.new 12 30 0]], ["Y", [Time_Of_Day.new 12 30 1]]]
[(<), (<=), (>), (>=), (==), (!=)].each op->
op (t.at "X") (t.at "Y") . value_type . should_equal Value_Type.Boolean
op (t.at "X") (t.at "Y") . to_vector . should_succeed
op (t.at "X") (Time_Of_Day.new 12 30 0) . to_vector . should_succeed
op (t.at "X") (Time_Of_Day.new 12 30 5) . to_vector . should_succeed
group_builder.specify "should not allow to mix types in ordering comparisons" <|
t = table_builder [["X", [Date.new 2021 12 3]], ["Y", [Date_Time.new 2021 12 5 12 30 0]], ["Z", [Time_Of_Day.new 12 30 0]]]
@ -244,7 +243,7 @@ add_specs suite_builder setup =
op (t.at "X") (t.at "Y") . should_fail_with Invalid_Value_Type
op (t.at "X") (t.at "Z") . should_fail_with Invalid_Value_Type
if setup.test_selection.supports_time_duration then
if setup.flagged ..Supports_Time_Duration then
group_builder.specify "should allow to subtract two Dates" <|
t = table_builder [["X", [Date.new 2021 11 3]], ["Y", [Date.new 2021 12 5]]]
@ -266,7 +265,7 @@ add_specs suite_builder setup =
((t.at "Y") - (t.at "X")) . to_vector . should_equal [Duration.new hours=2 minutes=15 seconds=20, Duration.new hours=(-1) minutes=0 seconds=0]
((t.at "Y") - (Time_Of_Day.new 0 0 0)) . to_vector . should_equal [Duration.new hours=12 minutes=30 seconds=20, Duration.zero]
if setup.test_selection.supports_time_duration.not then
if (setup.flagged ..Supports_Time_Duration).not then
group_builder.specify "should report unsupported operation for subtracting date/time" <|
t1 = table_builder [["X", [Date.new 2021 11 3]], ["Y", [Date.new 2021 12 5]]]
t2 = table_builder [["X", [Date_Time.new 2021 11 3 10 15 0]], ["Y", [Date_Time.new 2021 12 5 12 30 20]]]
@ -301,10 +300,6 @@ add_specs suite_builder setup =
(t1.at "Y").date_diff (t1.at "X") Date_Period.Day . to_vector . should_equal [-32]
(t1.at "X").date_diff (Date.new 2021 11 3) Date_Period.Day . to_vector . should_equal [0]
(t1.at "X").date_diff (t1.at "Y") Date_Period.Month . to_vector . should_equal [1]
(t1.at "X").date_diff (Date.new 2020 12 1) Date_Period.Month . to_vector . should_equal [-11]
(t1.at "X").date_diff (Date.new 2021 12 1) Date_Period.Month . to_vector . should_equal [0]
(t1.at "X").date_diff (t1.at "Y") Date_Period.Quarter . to_vector . should_equal [0]
(t1.at "X").date_diff (Date.new 2021 5 1) Date_Period.Quarter . to_vector . should_equal [-2]
(t1.at "X").date_diff (Date.new 2023 7 1) Date_Period.Quarter . to_vector . should_equal [6]
@ -348,7 +343,7 @@ add_specs suite_builder setup =
(t2.at "X").date_diff (t2.at "Y") Time_Period.Microsecond . to_vector . should_equal [2772920000000]
(t2.at "X").date_diff (Date_Time.new 2021 11 3 10 15 30 123 456 zone=zone) Time_Period.Microsecond . to_vector . should_equal [30123456]
case setup.test_selection.supports_nanoseconds_in_time of
case setup.flagged ..Supports_Nanoseconds_In_Time of
True ->
(t2.at "X").date_diff (t2.at "Y") Time_Period.Nanosecond . to_vector . should_equal [2772920000000000]
(t2.at "X").date_diff (Date_Time.new 2021 11 3 10 15 30 123 456 789 zone=zone) Time_Period.Nanosecond . to_vector . should_equal [30123456789]
@ -376,7 +371,7 @@ add_specs suite_builder setup =
(t3.at "X").date_diff (t3.at "Y") Time_Period.Microsecond . to_vector . should_equal [8120*1000*1000]
(t3.at "X").date_diff (Time_Of_Day.new 10 15 12 34 56) Time_Period.Microsecond . to_vector . should_equal [12034056]
case setup.test_selection.supports_nanoseconds_in_time of
case setup.flagged ..Supports_Nanoseconds_In_Time of
True ->
(t3.at "X").date_diff (t3.at "Y") Time_Period.Nanosecond . to_vector . should_equal [8120*1000*1000*1000]
(t3.at "X").date_diff (Time_Of_Day.new 10 15 12 34 56 78) Time_Period.Nanosecond . to_vector . should_equal [12034056078]
@ -384,6 +379,29 @@ add_specs suite_builder setup =
(t3.at "X").date_diff (t3.at "Y") Time_Period.Nanosecond . should_fail_with Unsupported_Database_Operation
(t3.at "X").date_diff (Time_Of_Day.new 10 15 12 34 56 78) Time_Period.Nanosecond . should_fail_with Unsupported_Database_Operation
## dates = [["d_X", [Date.new 2021 11 3]], ["d_Y", [Date.new 2021 12 5]], ["X1", [Date.new 2021 01 02]]]
group_builder.specify "should allow computing a SQL-like month difference (date_diff)" <|
t1 = dates1.get.select_columns ["d_X", "d_Y", "X1"] reorder=True . rename_columns ["X", "Y", "X1"]
(t1.at "X").date_diff (t1.at "Y") Date_Period.Month . to_vector . should_equal [1]
## same month always 0
(t1.at "X").date_diff (Date.new 2021 11 1) Date_Period.Month . to_vector . should_equal [0]
(t1.at "X").date_diff (Date.new 2021 11 3) Date_Period.Month . to_vector . should_equal [0]
(t1.at "X").date_diff (Date.new 2021 11 30) Date_Period.Month . to_vector . should_equal [0]
## next month 1 if date equal or later
(t1.at "X").date_diff (Date.new 2021 12 1) Date_Period.Month . to_vector . should_equal [0]
(t1.at "X").date_diff (Date.new 2021 12 3) Date_Period.Month . to_vector . should_equal [1]
(t1.at "X").date_diff (Date.new 2021 12 31) Date_Period.Month . to_vector . should_equal [1]
## previous month -1 if date equal or earlier
(t1.at "X").date_diff (Date.new 2021 10 1) Date_Period.Month . to_vector . should_equal [-1]
(t1.at "X").date_diff (Date.new 2021 10 3) Date_Period.Month . to_vector . should_equal [-1]
(t1.at "X").date_diff (Date.new 2021 10 31) Date_Period.Month . to_vector . should_equal [0]
(t1.at "X").date_diff (Date.new 2020 12 1) Date_Period.Month . to_vector . should_equal [-11]
(t1.at "X").date_diff (Date.new 2021 12 1) Date_Period.Month . to_vector . should_equal [0]
group_builder.specify "date_diff should return integers" <|
t = table_builder [["X", [Date.new 2021 01 31]], ["Y", [Time_Of_Day.new 12 30 20]], ["Z", [Date_Time.new 2021 12 5 12 30 20]]]
@ -443,7 +461,7 @@ add_specs suite_builder setup =
(t2.at "X").date_add 1 Time_Period.Second . to_vector . should_equal_tz_agnostic [Date_Time.new 2021 01 31 12 30 1, Date_Time.new 2021 01 01 12 30 1, Date_Time.new 2021 12 31 12 30 1]
(t2.at "X").date_add 1 Time_Period.Millisecond . to_vector . should_equal_tz_agnostic [Date_Time.new 2021 01 31 12 30 millisecond=1, Date_Time.new 2021 01 01 12 30 millisecond=1, Date_Time.new 2021 12 31 12 30 millisecond=1]
(t2.at "X").date_add 1 Time_Period.Microsecond . to_vector . should_equal_tz_agnostic [Date_Time.new 2021 01 31 12 30 microsecond=1, Date_Time.new 2021 01 01 12 30 microsecond=1, Date_Time.new 2021 12 31 12 30 microsecond=1]
case setup.test_selection.supports_nanoseconds_in_time of
case setup.flagged ..Supports_Nanoseconds_In_Time of
True ->
(t2.at "X").date_add 1 Time_Period.Nanosecond . to_vector . should_equal_tz_agnostic [Date_Time.new 2021 01 31 12 30 nanosecond=1, Date_Time.new 2021 01 01 12 30 nanosecond=1, Date_Time.new 2021 12 31 12 30 nanosecond=1]
False ->
@ -459,7 +477,7 @@ add_specs suite_builder setup =
(t3.at "X").date_add 1 Time_Period.Second . to_vector . should_equal [Time_Of_Day.new 12 30 1, Time_Of_Day.new 23 45 1, Time_Of_Day.new 1 30 1]
(t3.at "X").date_add 1 Time_Period.Millisecond . to_vector . should_equal [Time_Of_Day.new 12 30 millisecond=1, Time_Of_Day.new 23 45 millisecond=1, Time_Of_Day.new 1 30 millisecond=1]
(t3.at "X").date_add 1 Time_Period.Microsecond . to_vector . should_equal [Time_Of_Day.new 12 30 microsecond=1, Time_Of_Day.new 23 45 microsecond=1, Time_Of_Day.new 1 30 microsecond=1]
case setup.test_selection.supports_nanoseconds_in_time of
case setup.flagged ..Supports_Nanoseconds_In_Time of
True ->
(t3.at "X").date_add 1 Time_Period.Nanosecond . to_vector . should_equal [Time_Of_Day.new 12 30 nanosecond=1, Time_Of_Day.new 23 45 nanosecond=1, Time_Of_Day.new 1 30 nanosecond=1]
False ->
@ -520,7 +538,7 @@ add_specs suite_builder setup =
y.date_diff dt4 Date_Period.Day . to_vector . should_equal [0]
y.date_diff dt4 Time_Period.Hour . to_vector . should_equal [23]
if setup.test_selection.date_time.not then
if (setup.flagged ..Date_Time).not then
suite_builder.group prefix+"partial Date-Time support" group_builder->
group_builder.specify "will fail when uploading a Table containing Dates" pending="TODO" <|
d = Date.new 2020 10 24

View File

@ -22,7 +22,7 @@ add_specs suite_builder setup =
add_derived_columns_specs suite_builder setup =
prefix = setup.prefix
table_builder = setup.light_table_builder
pending_datetime = if setup.test_selection.date_time.not then "Date/Time operations are not supported by this backend."
pending_datetime = if (setup.flagged ..Date_Time).not then "Date/Time operations are not supported by this backend."
suite_builder.group prefix+"(Derived_Columns_Spec) Table.set with Simple_Expression" group_builder->
group_builder.specify "arithmetics" pending=(if prefix.contains "Snowflake" then "TODO: re-enable these once https://github.com/enso-org/enso/pull/10583 is merged") <|
t = table_builder [["A", [1, 2]], ["B", [10, 40]]]

View File

@ -69,7 +69,7 @@ add_distinct_specs suite_builder setup =
[0.1, 0.2, 0.5].contains (cv.at 0) . should_be_true
[0.3, 0.4, 0.6].contains (cv.at 1) . should_be_true
if setup.test_selection.distinct_returns_first_row_from_group_if_ordered then
if setup.flagged ..Distinct_Returns_First_Row_From_Group_If_Ordered then
group_builder.specify "should allow to select distinct rows based on a subset of columns, returning first from each group if the table was ordered" <|
a = ["A", ["a", "a", "a", "a", "a", "a"]]
b = ["B", [1, 1, 2, 2, 1, 2]]

View File

@ -34,7 +34,7 @@ add_expression_specs suite_builder detailed setup =
column_c = ["C", ["Hello", "World", "Hello World!", "", Nothing]]
table_builder [column_a_description, column_b, column_c, column_odd_description]
pending_datetime = if setup.test_selection.date_time.not then "Date/Time operations are not supported by this backend."
pending_datetime = if (setup.flagged ..Date_Time).not then "Date/Time operations are not supported by this backend."
epsilon=0.0000000001

View File

@ -79,7 +79,7 @@ add_filter_specs suite_builder setup =
## We do not do typechecking at Enso level here, as it is
DB-dependent if such mixing is allowed, so we will rely on an SQL
error.
case test_selection.allows_mixed_type_comparisons of
case setup.flagged ..Allows_Mixed_Type_Comparisons of
True -> v.should_equal []
False -> v.should_fail_with SQL_Error
@ -108,7 +108,7 @@ add_filter_specs suite_builder setup =
False -> t.filter "X" (Filter_Condition.Less than=10.0) . at "X" . to_vector . should_equal [2.5]
# In some backends, NaN is greater than any other value, so it is > 10.0; in other implementations it is usually not greater nor smaller, so it gets filtered out.
nan_is_comparable = setup.test_selection.is_nan_comparable
nan_is_comparable = setup.flagged ..NaN_Non_Comparable . not
t.filter "X" (Filter_Condition.Greater than=10.0) . at "ix" . to_vector . should_equal <|
case setup.flagged ..Supports_Infinity of
True -> if nan_is_comparable then [1, 4, 5] else [1, 5]
@ -149,7 +149,7 @@ add_filter_specs suite_builder setup =
t.filter "X" (Filter_Condition.Between "b" "c") . at "X" . to_vector . should_equal ["baca", "b", "c"]
t.filter "X" (Filter_Condition.Equal to="baca") . at "X" . to_vector . should_equal ["baca"]
v = t.filter "X" (Filter_Condition.Equal to=52) . at "X" . to_vector
case test_selection.allows_mixed_type_comparisons of
case setup.flagged ..Allows_Mixed_Type_Comparisons of
True -> v.should_equal []
False -> v.should_fail_with SQL_Error
@ -240,7 +240,7 @@ add_filter_specs suite_builder setup =
t.filter "X" (Filter_Condition.Ends_With '\n') . at "X" . to_vector . should_equal ['a\n\n\n', 'a\n']
t.filter "X" (Filter_Condition.Starts_With 'c') . at "X" . to_vector . should_equal ['caa\nbb']
if test_selection.supports_unicode_normalization then
if setup.flagged ..Supports_Unicode_Normalization then
t = table_builder [["X", ['śnieg', 's\u0301nieg', 'X', Nothing, 'połać', 'połac\u0301']]]
group_builder.specify "text operations should support Unicode normalization" <|
t.filter "X" (Filter_Condition.Starts_With 'ś') . at "X" . to_vector . should_equal ['śnieg', 's\u0301nieg']
@ -314,7 +314,7 @@ add_filter_specs suite_builder setup =
t.filter "X" (Filter_Condition.Is_In ["ccc"]) . at "X" . to_vector . should_equal ["ccc"]
t.filter "X" (Filter_Condition.Is_In []) . at "X" . to_vector . should_equal []
if test_selection.allows_mixed_type_comparisons then
if setup.flagged ..Allows_Mixed_Type_Comparisons then
mixed = t.filter "X" (Filter_Condition.Is_In (t1.at "int")) . at "X" . to_vector
# Some backends (e.g. SQLite) allow to coerce integer and text types when doing mixed type comparisons.
((mixed == []) || (mixed == ["2"])).should_be_true
@ -325,11 +325,11 @@ add_filter_specs suite_builder setup =
t.filter "ix" (Filter_Condition.Is_In [Nothing]) . at "ix" . to_vector . should_equal []
v1 = t.filter "X" (Filter_Condition.Is_In ["c", "f", "b", "b", "b", 15, Nothing]) . at "X" . to_vector
case test_selection.allows_mixed_type_comparisons of
case setup.flagged ..Allows_Mixed_Type_Comparisons of
True -> v1.should_equal ["b", "f"]
False -> v1.should_fail_with SQL_Error
v2 = t.filter "ix" (Filter_Condition.Is_In ["c", 3, 2, "a"]) . at "ix" . to_vector
case test_selection.allows_mixed_type_comparisons of
case setup.flagged ..Allows_Mixed_Type_Comparisons of
True -> v2.should_equal [2, 3]
False -> v2.should_fail_with SQL_Error

View File

@ -118,7 +118,7 @@ add_integration_specs suite_builder setup =
t4 = t1.sort "Y" . distinct "X" . sort "Y" |> materialize
t4.row_count . should_equal 2
if setup.test_selection.distinct_returns_first_row_from_group_if_ordered then
if setup.flagged ..Distinct_Returns_First_Row_From_Group_If_Ordered then
group_builder.specify "distinct and ordering if first row is returned after ordering" <|
a = ["A", ["a", "a", "a", "a", "a", "a"]]
b = ["B", [1, 1, 2, 2, 1, 2]]
@ -194,7 +194,7 @@ add_integration_specs suite_builder setup =
r123.at "X" . to_vector . should_equal ["a", "b", "c", "ddd", "eee", "fff", "a", "b", "c"]
r123.at "Row" . to_vector . should_equal [1, 2, 3, 1, 2, 3, 1.5, 2.5, 3.5]
if setup.test_selection.fixed_length_text_columns then
if setup.flagged ..Fixed_Length_Text_Columns then
group_builder.specify "types of unioned fixed-length columns should be correctly inferred after passing through other operations that infer types from Database, like aggregate Shortest" <|
t1 = (table_builder [["X", ["a", "b", "c"]], ["Y", [1, 0, 2]]]) . cast "X" (Value_Type.Char 1 False)
t2 = (table_builder [["X", ["ddd", "eee", "fff"]], ["Y", [0, 1, 0]]]) . cast "X" (Value_Type.Char 3 False)

View File

@ -164,7 +164,7 @@ add_join_specs suite_builder setup =
r2 . at "Y" . to_vector . should_equal [1, 1, 2]
r2 . at "Z" . to_vector . should_equal [1, 2, 3]
if setup.test_selection.supports_unicode_normalization then
if setup.flagged ..Supports_Unicode_Normalization then
group_builder.specify "should correctly handle Unicode equality" <|
t1 = table_builder [["X", ['s\u0301', 'S\u0301']], ["Y", [1, 2]]]
t2 = table_builder [["X", ['s', 'S', 'ś']], ["Z", [1, 2, 3]]]
@ -291,7 +291,7 @@ add_join_specs suite_builder setup =
r2.column_names . should_equal ["X"]
r2.at "X" . to_vector . should_equal [10]
if setup.test_selection.supports_unicode_normalization then
if setup.flagged ..Supports_Unicode_Normalization then
group_builder.specify "should allow range-based joins (using Between) for text with Unicode normalization" <|
t1 = table_builder [["X", ['s\u0301', 's']], ["Y", [1, 2]]]
t2 = table_builder [["lower", ['s', 'ś']], ["upper", ['sa', 'ś']], ["Z", [10, 20]]]

View File

@ -237,7 +237,7 @@ add_merge_specs suite_builder setup =
t2.at "Z2" . value_type . should_equal Value_Type.Float
t2.at "W2" . value_type . should_equal Value_Type.Boolean
if setup.test_selection.fixed_length_text_columns then group_builder.specify "should correctly preserve types of original, merged and added columns (various Char types test case)" <|
if setup.flagged ..Fixed_Length_Text_Columns then group_builder.specify "should correctly preserve types of original, merged and added columns (various Char types test case)" <|
table2 = (table_builder [["key", ["0"]], ["X", ["a"]], ["A", ["bbbbb"]]]) . cast "key" (Value_Type.Char size=50) . cast "X" (Value_Type.Char size=1) . cast "A" (Value_Type.Char size=5 variable_length=False)
lookup2 = (table_builder [["key", ["0"]], ["X2", ["ccc"]], ["A", ["dddd"]]]) . cast "key" (Value_Type.Char size=100) . cast "X2" (Value_Type.Char size=3 variable_length=False) . cast "A" (Value_Type.Char size=4 variable_length=False)

View File

@ -180,7 +180,7 @@ add_replace_specs suite_builder setup =
table.replace lookup_table 'x' to_column=9 . should_fail_with Illegal_Argument
table.replace lookup_table 'x' from_column=8 to_column=9 . should_fail_with Illegal_Argument
if setup.test_selection.date_time then group_builder.specify "should correctly infer types of columns (Date/Time)" <|
if setup.flagged ..Date_Time then group_builder.specify "should correctly infer types of columns (Date/Time)" <|
my_table = setup.table_builder [["X", [1, 2, 3, 2]], ["rowid", [1, 2, 3, 4]]]
t1 = my_table.replace (Dictionary.from_vector [[1, Date.new 2021], [2, Date.new 2022], [3, Date.new 2023]]) "X" allow_unmatched_rows=False . sort "rowid"

View File

@ -329,7 +329,7 @@ run_union_tests group_builder setup call_union =
check_same <| call_union [t1] columns_to_keep=(..In_List ["A", "B"])
group_builder.specify "should correctly unify text columns of various lengths" pending=(if setup.test_selection.fixed_length_text_columns.not then "Fixed-length Char columns are not supported by this backend.") <|
group_builder.specify "should correctly unify text columns of various lengths" pending=(if (setup.flagged ..Fixed_Length_Text_Columns).not then "Fixed-length Char columns are not supported by this backend.") <|
t1 = (table_builder [["A", ["a", "b", "c"]]]) . cast "A" (Value_Type.Char size=1 variable_length=False)
t2 = (table_builder [["A", ["xyz", "abc", "def"]]]) . cast "A" (Value_Type.Char size=3 variable_length=False)
@ -413,7 +413,7 @@ run_union_tests group_builder setup call_union =
m12.at "X" . to_vector . should_equal [0, 1, 2, 3, 4, 5]
m12.at "Y" . to_vector . should_equal ['aa', 'bb', 'cc', 'x', 'y', 'z']
date_time_pending = if setup.test_selection.date_time.not then "Date/Time operations are not supported."
date_time_pending = if (setup.flagged ..Date_Time).not then "Date/Time operations are not supported."
group_builder.specify "should warn when converting a Date to Date_Time" pending=date_time_pending <|
t1 = build_table_with_maybe_rowid [["D", [Date_Time.new 2024 5 16 16 48 23]]]
t2 = build_table_with_maybe_rowid [["D", [Date.new 2019 10 23, Date.new 2020]]]

View File

@ -93,51 +93,6 @@ type Test_Selection
backend, driving what kind of test suites should be enabled.
Arguments:
- natural_ordering: Specifies if the backend supports natural ordering
operations.
- case_insensitive_ordering: Specifies if the backend supports case
insensitive ordering.
- order_by_unicode_normalization_by_default: Specifies if the backend
supports unicode normalization in its default ordering.
- case_insensitive_ascii_only:
- allows_mixed_type_comparisons: Specifies if mixed operations comparing
mixed types are allowed by a given backend. Some backends will allow
such comparisons, when mixed type storage is allowed or by coercing to
the target type; others will fail with a type error.
- supports_unicode_normalization: Specifies if the backend compares
strings taking Unicode Normalization into account, i.e. whether
's\u0301' is considered equal to 'ś'.
- is_nan_comparable: Specifies if NaN value is
treated as greater than all numbers. If `False`, `NaN` is expected to
yield False to both < and > comparisons.
- distinct_returns_first_row_from_group_if_ordered: If `order_by` was
applied before, the distinct operation will return the first row from
each group. Guaranteed in the in-memory backend, but may not be
supported by all databases.
- date_time: Specifies if the backend supports date/time operations.
- text_length_limited_columns: Specifies if the backend supports setting
a length limit on text columns.
- fixed_length_text_columns: Specifies if the backend supports fixed
length text columns.
- length_restricted_text_columns: Specifies if the backend supports
length restrictions for text columns.
- removes_trailing_whitespace_casting_from_char_to_varchar: if
SELECT concat('X', CAST(CAST(' ' AS CHAR(3)) AS VARCHAR(3)), 'X')
returns XX then this should be set to True
- char_max_size_after_substring: Specifies how the max size of the char
type behaves after text_left/text_right.
- different_size_integer_types: Specifies if the backend supports
integer types of various sizes, like 16-bit or 32-bit integers.
- supports_8bit_integer: Specifies if the backend supports 8-bit
integers.
- supports_decimal_type: Specifies if the backend supports the `Decimal`
high-precision type.
- supports_time_duration: Specifies if the backend supports a
`Duration`/`Period` type.
- supports_nanoseconds_in_time: Specifies if the backend supports
nanosecond precision in time values.
- supports_mixed_columns: Specifies if the backend supports mixed-type
columns.
- supported_replace_params: Specifies the possible values of
Replace_Params that a backend supports.
- run_advanced_edge_case_tests_by_default: A flag that tells whether to
@ -150,11 +105,8 @@ type Test_Selection
local backends like SQLite or Postgres. Backends that speak to remote
databases may have this flag disabled to save time.
If `ENSO_ADVANCED_EDGE_CASE_TESTS` environment variable is set to `True`,
the advanced tests are ran regardless of this setting.
- supports_date_time_without_timezone: Specifies if the backend supports
date/time operations without a timezone (true for most Database backends).
Defaults to `.is_integer`.
Config natural_ordering=False case_insensitive_ordering=True order_by_unicode_normalization_by_default=False case_insensitive_ascii_only=False allows_mixed_type_comparisons=True supports_unicode_normalization=False is_nan_comparable=False distinct_returns_first_row_from_group_if_ordered=True date_time=True text_length_limited_columns=False fixed_length_text_columns=False length_restricted_text_columns=True removes_trailing_whitespace_casting_from_char_to_varchar=False char_max_size_after_substring:Char_Max_Size_After_Substring_Behavior=..Kept different_size_integer_types=True supports_8bit_integer=False supports_decimal_type=False supports_time_duration=False supports_nanoseconds_in_time=False supports_mixed_columns=False supported_replace_params=Nothing run_advanced_edge_case_tests_by_default=True supports_date_time_without_timezone=False
the advanced tests are ran regardless of this setting..
Config supported_replace_params=Nothing run_advanced_edge_case_tests_by_default=True
## Specifies if the advanced edge case tests shall be run.
@ -164,16 +116,6 @@ type Test_Selection
run_advanced_edge_case_tests self -> Boolean =
self.run_advanced_edge_case_tests_by_default || (Environment.get "ENSO_ADVANCED_EDGE_CASE_TESTS" . is_nothing . not)
type Char_Max_Size_After_Substring_Behavior
# The max size is reset to unbounded.
Reset
# The max size is kept unchanged.
Kept
# The max size is reduced to the length of the substring.
Reduced
add_specs suite_builder setup =
Core_Spec.add_specs suite_builder setup
Select_Columns_Spec.add_specs suite_builder setup

View File

@ -74,7 +74,7 @@ add_map_to_table_specs suite_builder setup =
vecs2 = [[], [3, 4, 5], [6, 7, 8]]
data.dummy_table.make_table_from_vectors vecs2 ['x', 'y', 'z'] . read . should_fail_with Illegal_Argument
if setup.test_selection.date_time then
if setup.flagged ..Date_Time then
group_builder.specify "should be able to create a table of date-time values" <|
vecs = [[Date.new 2021 01 02], [Time_Of_Day.new 12 35 4], [Date_Time.new 2020 04 02 18 55], ['2001-01-01'], ['01:01:01']]
t = data.dummy_table.make_table_from_vectors vecs ['date', 'time', 'datetime', 'date-like-text', 'time-like-text']

View File

@ -256,7 +256,7 @@ add_missing_value_specs suite_builder setup =
mix_filled.to_vector . should_equal [1, 0, 2, 0]
mix_filled.value_type . should_equal Value_Type.Mixed
group_builder.specify "should correctly unify text columns of various fixed lengths" pending=(if setup.test_selection.fixed_length_text_columns.not then "Fixed-length Char columns are not supported by this backend.") <|
group_builder.specify "should correctly unify text columns of various fixed lengths" pending=(if (setup.flagged ..Fixed_Length_Text_Columns).not then "Fixed-length Char columns are not supported by this backend.") <|
t0 = table_builder [["A", ["a", Nothing, "c"]], ["B", ["X", "Y", "Z"]], ["C", ["xyz", "abc", "def"]]]
t1 = t0 . cast "A" (Value_Type.Char size=1 variable_length=False) . cast "B" (Value_Type.Char size=1 variable_length=False) . cast "C" (Value_Type.Char size=3 variable_length=False)
@ -279,7 +279,7 @@ add_missing_value_specs suite_builder setup =
max_len = e.value_type.size
(max_len.is_nothing || (max_len.integer >= 3)).should_be_true
group_builder.specify "should correctly unify text columns of various varying lengths" pending=(if setup.test_selection.text_length_limited_columns.not then "Length-limited Char columns are not supported by this backend.") <|
group_builder.specify "should correctly unify text columns of various varying lengths" pending=(if (setup.flagged ..Text_Length_Limited_Columns).not then "Length-limited Char columns are not supported by this backend.") <|
t0 = table_builder [["A", ["a", Nothing, "c"]], ["B", ["X", "Y", "Z"]], ["C", ["xyz", "abc", "def"]]]
t1 = t0 . cast "A" (Value_Type.Char size=1 variable_length=True) . cast "B" (Value_Type.Char size=1 variable_length=True) . cast "C" (Value_Type.Char size=3 variable_length=True)

View File

@ -32,7 +32,7 @@ add_nothing_specs suite_builder setup =
mixed_values = if setup.is_database then [] else
[[My_Type.Value "1", My_Type.Value "2", Value_Type.Mixed]]
+ [[[1], [2], Value_Type.Mixed]]
date_time_values = if setup.test_selection.date_time.not then [] else
date_time_values = if (setup.flagged ..Date_Time).not then [] else
[[Date.new 2024 1 3, Date.new 2024 1 4, Value_Type.Date]]
+ [[Date_Time.new 2024 1 3 2 30 10 zone=zone, Date_Time.new 2024 1 3 2 30 11 zone=zone, Value_Type.Date_Time]]
+ [[Time_Of_Day.new 2 30 10, Time_Of_Day.new 2 30 11, Value_Type.Time]]

View File

@ -55,8 +55,6 @@ add_specs suite_builder setup =
add_order_specs suite_builder setup =
prefix = setup.prefix
create_connection_fn = setup.create_connection_func
test_selection = setup.test_selection
pending_collation = "Need more reliable testing for collation locale support"
suite_builder.group prefix+"Table.sort" group_builder->
data = Data.setup create_connection_fn setup.table_builder
@ -246,9 +244,9 @@ add_order_specs suite_builder setup =
t3 = data.table.sort [..Name "rho" ..Descending]
t3.at "rho" . to_vector . should_equal ["BB", "B", Nothing, Nothing]
group_builder.specify "should behave as expected with Unicode normalization, depending on the defaults settings" pending=pending_collation <|
group_builder.specify "should behave as expected with Unicode normalization, depending on the defaults settings" <|
t1 = data.table.sort [..Name "phi"]
case test_selection.order_by_unicode_normalization_by_default of
case setup.flagged ..Order_By_Unicode_Normalization_By_Default of
True ->
t1.at "phi" . to_vector . should_equal [Nothing, "śa", 's\u0301b', "śc"]
t1.at "alpha" . to_vector . should_equal [2, 0, 1, 3]
@ -256,20 +254,24 @@ add_order_specs suite_builder setup =
t1.at "phi" . to_vector . should_equal [Nothing, 's\u0301b', "śa", "śc"]
t1.at "alpha" . to_vector . should_equal [2, 1, 0, 3]
group_builder.specify "should support natural ordering" pending=(if test_selection.natural_ordering.not then "Natural ordering is not supported.") <|
group_builder.specify "should support natural ordering" <|
t1 = data.table.sort [..Name "delta"] text_ordering=(..Default sort_digits_as_numbers=True)
t1.at "delta" . to_vector . should_equal ["a1", "a2", "a03", "a10"]
t1.at "alpha" . to_vector . should_equal [2, 1, 0, 3]
case setup.flagged ..Supports_Sort_Digits_As_Numbers of
True ->
t1.at "delta" . to_vector . should_equal ["a1", "a2", "a03", "a10"]
t1.at "alpha" . to_vector . should_equal [2, 1, 0, 3]
False ->
t1.should_fail_with (Unsupported_Database_Operation.Error "sort_digits_as_numbers")
t2 = data.table.sort ["delta"] text_ordering=(..Default sort_digits_as_numbers=False)
t2.at "delta" . to_vector . should_equal ["a03", "a1", "a10", "a2"]
t2.at "alpha" . to_vector . should_equal [0, 2, 3, 1]
group_builder.specify "should support case insensitive ordering" pending=(if test_selection.case_insensitive_ordering.not then "Case insensitive ordering is not supported." else pending_collation) <|
if setup.flagged ..Case_Insensitive_Ordering then group_builder.specify "should support case insensitive ordering" <|
t1 = data.table.sort [..Name "eta"] text_ordering=(..Case_Insensitive)
expected = case test_selection.case_insensitive_ascii_only of
True -> ["Aleph", "alpha", "Beta", "bądź"]
False -> ["Aleph", "alpha", "bądź", "Beta"]
expected = case setup.flagged ..Order_By_Unicode_Normalization_By_Default of
False -> ["Aleph", "alpha", "Beta", "bądź"]
True -> ["Aleph", "alpha", "bądź", "Beta"]
t1.at "eta" . to_vector . should_equal expected
t2 = data.table.sort [..Name "eta"] text_ordering=(..Case_Sensitive)
@ -281,18 +283,24 @@ add_order_specs suite_builder setup =
t4 = data.table.sort [..Name "psi" ..Descending] text_ordering=(..Case_Sensitive)
t4.at "psi" . to_vector . should_equal ["c10", "c01", "C2", Nothing]
group_builder.specify "should support natural and case insensitive ordering at the same time" pending=(if (test_selection.natural_ordering.not || test_selection.case_insensitive_ordering.not) then "Natural ordering or case sensitive ordering is not supported.") <|
group_builder.specify "should support natural and case insensitive ordering at the same time" <|
t1 = data.table.sort [..Name "psi"] text_ordering=(..Case_Insensitive sort_digits_as_numbers=True)
t1.at "psi" . to_vector . should_equal [Nothing, "c01", "C2", "c10"]
case setup.flagged ..Supports_Sort_Digits_As_Numbers of
True -> t1.at "psi" . to_vector . should_equal [Nothing, "c01", "C2", "c10"]
False -> t1.should_fail_with (Unsupported_Database_Operation.Error "sort_digits_as_numbers")
t2 = data.table.sort [..Name "psi"] text_ordering=(..Default sort_digits_as_numbers=True)
t2.at "psi" . to_vector . should_equal [Nothing, "C2", "c01", "c10"]
case setup.flagged ..Supports_Sort_Digits_As_Numbers of
True -> t2.at "psi" . to_vector . should_equal [Nothing, "C2", "c01", "c10"]
False -> t2.should_fail_with (Unsupported_Database_Operation.Error "sort_digits_as_numbers")
t3 = data.table.sort [..Name "psi"] text_ordering=(..Case_Insensitive)
t3.at "psi" . to_vector . should_equal [Nothing, "c01", "c10", "C2"]
t4 = data.table.sort [..Name "psi"]
t4.at "psi" . to_vector . should_equal [Nothing, "C2", "c01", "c10"]
case setup.flagged ..Case_Insensitive_Ordering of
True -> t4.at "psi" . to_vector . should_equal [Nothing, "c01", "c10", "C2"]
False -> t4.at "psi" . to_vector . should_equal [Nothing, "C2", "c01", "c10"]
group_builder.specify "text ordering settings should not affect numeric columns" <|
ordering = ..Case_Insensitive sort_digits_as_numbers=True

View File

@ -725,7 +725,7 @@ add_postgres_specs suite_builder create_connection_fn db_name =
common_selection = Common_Table_Operations.Main.Test_Selection.Config order_by_unicode_normalization_by_default=True allows_mixed_type_comparisons=False text_length_limited_columns=True fixed_length_text_columns=True removes_trailing_whitespace_casting_from_char_to_varchar=True char_max_size_after_substring=..Reset supports_decimal_type=True supported_replace_params=supported_replace_params run_advanced_edge_case_tests_by_default=True supports_date_time_without_timezone=True is_nan_comparable=True
common_selection = Common_Table_Operations.Main.Test_Selection.Config supported_replace_params=supported_replace_params run_advanced_edge_case_tests_by_default=True
aggregate_selection = Common_Table_Operations.Aggregate_Spec.Test_Selection.Config first_last_row_order=False aggregation_problems=False
agg_in_memory_table = (enso_project.data / "data.csv") . read

View File

@ -333,7 +333,7 @@ sqlite_spec suite_builder prefix create_connection_func persistent_connector =
materialize = .read
common_selection = Common_Table_Operations.Main.Test_Selection.Config natural_ordering=False case_insensitive_ordering=True case_insensitive_ascii_only=True date_time=False supported_replace_params=supported_replace_params different_size_integer_types=False length_restricted_text_columns=False char_max_size_after_substring=..Reset run_advanced_edge_case_tests_by_default=True
common_selection = Common_Table_Operations.Main.Test_Selection.Config supported_replace_params=supported_replace_params run_advanced_edge_case_tests_by_default=True
## For now `advanced_stats`, `text_shortest_longest` and
`multi_distinct` remain disabled, because SQLite does not provide the

View File

@ -18,7 +18,7 @@ type Dummy_Connection
Nothing
in_memory_setup =
selection = Common_Table_Operations.Main.Test_Selection.Config natural_ordering=True case_insensitive_ordering=True order_by_unicode_normalization_by_default=True supports_unicode_normalization=True supports_decimal_type=True supports_time_duration=True supports_nanoseconds_in_time=True supports_mixed_columns=True text_length_limited_columns=True fixed_length_text_columns=True supports_8bit_integer=True
selection = Common_Table_Operations.Main.Test_Selection.Config
aggregate_selection = Common_Table_Operations.Aggregate_Spec.Test_Selection.Config
agg_table_fn _ = (enso_project.data / "data.csv") . read
@ -40,8 +40,10 @@ in_memory_setup =
is_operation_supported_fn _ =
True
flagged_fn flag:Dialect_Flag =
_ = flag
True
case flag of
Dialect_Flag.Case_Insensitive_Ordering -> False
Dialect_Flag.Removes_Trailing_Whitespace_Casting_From_Char_To_Varchar -> False
_ -> True
Common_Table_Operations.Main.Test_Setup.Config "[In-Memory] " agg_table_fn empty_table_fn table_builder materialize is_database=False test_selection=selection aggregate_test_selection=aggregate_selection create_connection_func=create_connection_func light_table_builder=light_table_builder is_feature_supported=is_feature_supported_fn flagged=flagged_fn is_operation_supported=is_operation_supported_fn