mirror of
https://github.com/enso-org/enso.git
synced 2024-12-22 14:41:31 +03:00
SQLServer enable filter (#11471)
* checkpoint * Passing by.integer.comparisons * More passing tests * by empty text * Add flag * 2 more green * Back to 6 red * 5 red * 2 red * 1 red * Green * Refactor * refactor * Refactor * refactor * Refactor * clean up * Green * Refactor * Cleanup * Refactor * Clean up * Cleanup * Clean up * Fix tests * Fix * Fix * Code review * Refactor * Code review changes * Add literals * Code review changes * Code review changes * Checkpoint * checkpoint * checkpoint * Cleanup * Refactor * Refactor * refactor * Fix type name * Fix
This commit is contained in:
parent
a5ebdf4e79
commit
676a7d4256
@ -192,6 +192,8 @@ type Redshift_Dialect
|
||||
## TODO: Check if Redshift supports WITH clauses in nested queries
|
||||
Dialect_Flag.Supports_Nested_With_Clause -> True
|
||||
Dialect_Flag.Supports_Case_Sensitive_Columns -> True
|
||||
Dialect_Flag.Supports_Infinity -> True
|
||||
Dialect_Flag.Case_Sensitive_Text_Comparison -> True
|
||||
|
||||
## PRIVATE
|
||||
The default table types to use when listing tables.
|
||||
@ -225,8 +227,9 @@ type Redshift_Dialect
|
||||
False
|
||||
|
||||
## PRIVATE
|
||||
generate_column_for_select self base_gen expr:(SQL_Expression | Order_Descriptor | Query) name:Text -> SQL_Builder =
|
||||
base_gen.default_generate_column self expr name
|
||||
generate_expression self base_gen expr:(SQL_Expression | Order_Descriptor | Query) for_select:Boolean -> SQL_Builder =
|
||||
_ = for_select
|
||||
base_gen.generate_expression self expr
|
||||
|
||||
## PRIVATE
|
||||
ensure_query_has_no_holes : JDBC_Connection -> Text -> Nothing ! Illegal_Argument
|
||||
|
@ -1129,22 +1129,21 @@ type DB_Table
|
||||
Create a constant column from a value.
|
||||
make_constant_column : Any -> DB_Column ! Illegal_Argument
|
||||
make_constant_column self value =
|
||||
Feature.Column_Operations.if_supported_else_throw self.connection.dialect "make_constant_column" <|
|
||||
if Table_Helpers.is_column value then Error.throw (Illegal_Argument.Error "A constant value may only be created from a scalar, not a DB_Column") else
|
||||
type_mapping = self.connection.dialect.get_type_mapping
|
||||
argument_value_type = Value_Type_Helpers.find_argument_type value
|
||||
sql_type = case argument_value_type of
|
||||
Nothing -> SQL_Type.null
|
||||
_ -> type_mapping.value_type_to_sql argument_value_type Problem_Behavior.Ignore
|
||||
expr = SQL_Expression.Constant value
|
||||
new_type_ref = SQL_Type_Reference.from_constant sql_type
|
||||
base_column = Internal_Column.Value value.pretty new_type_ref expr
|
||||
needs_cast = argument_value_type.is_nothing.not && self.connection.dialect.needs_literal_table_cast argument_value_type
|
||||
result_internal_column = if needs_cast.not then base_column else
|
||||
infer_type_from_database new_expression =
|
||||
SQL_Type_Reference.new self.connection self.context new_expression
|
||||
self.connection.dialect.make_cast base_column sql_type infer_type_from_database
|
||||
self.make_column result_internal_column
|
||||
if Table_Helpers.is_column value then Error.throw (Illegal_Argument.Error "A constant value may only be created from a scalar, not a DB_Column") else
|
||||
type_mapping = self.connection.dialect.get_type_mapping
|
||||
argument_value_type = Value_Type_Helpers.find_argument_type value
|
||||
sql_type = case argument_value_type of
|
||||
Nothing -> SQL_Type.null
|
||||
_ -> type_mapping.value_type_to_sql argument_value_type Problem_Behavior.Ignore
|
||||
expr = SQL_Expression.Constant value
|
||||
new_type_ref = SQL_Type_Reference.from_constant sql_type
|
||||
base_column = Internal_Column.Value value.pretty new_type_ref expr
|
||||
needs_cast = argument_value_type.is_nothing.not && self.connection.dialect.needs_literal_table_cast argument_value_type
|
||||
result_internal_column = if needs_cast.not then base_column else
|
||||
infer_type_from_database new_expression =
|
||||
SQL_Type_Reference.new self.connection self.context new_expression
|
||||
self.connection.dialect.make_cast base_column sql_type infer_type_from_database
|
||||
self.make_column result_internal_column
|
||||
|
||||
## PRIVATE
|
||||
Create a unique temporary column name.
|
||||
|
@ -270,11 +270,11 @@ type Dialect
|
||||
Unimplemented.throw "This is an interface only."
|
||||
|
||||
## PRIVATE
|
||||
Generates a column for the given expression for use in the SELECT clause.
|
||||
Generates a expression for use in the SELECT OR WHERE clause.
|
||||
Used for databases where the expression syntax is different in the SELECT clause
|
||||
to the syntax in the WHERE clause
|
||||
generate_column_for_select self base_gen expr:(SQL_Expression | Order_Descriptor | Query) name:Text -> SQL_Builder =
|
||||
_ = [base_gen, expr, name]
|
||||
generate_expression self base_gen expr:(SQL_Expression | Order_Descriptor | Query) for_select:Boolean -> SQL_Builder =
|
||||
_ = [base_gen, expr, for_select]
|
||||
Unimplemented.throw "This is an interface only."
|
||||
|
||||
## PRIVATE
|
||||
|
@ -23,3 +23,9 @@ type Dialect_Flag
|
||||
match column names in a case insensitive way, so that "Foo" and "foo" will refer to the
|
||||
same column.
|
||||
Supports_Case_Sensitive_Columns
|
||||
## PRIVATE
|
||||
Specifies if the backend supports the `Infinity` value for floating point columns.
|
||||
Supports_Infinity
|
||||
## PRIVATE
|
||||
Specifies text comparisons are case sensitive by default.
|
||||
Case_Sensitive_Text_Comparison
|
||||
|
@ -81,11 +81,7 @@ type SQL_Generator
|
||||
SQL_Expression.Operation kind arguments metadata ->
|
||||
op = dialect.dialect_operations.operations_dict.get kind (Error.throw <| Unsupported_Database_Operation.Error kind)
|
||||
parsed_args = arguments.map (self.generate_expression dialect)
|
||||
result = op parsed_args
|
||||
# If the function expects more arguments, we pass the metadata as the last argument.
|
||||
case result of
|
||||
_ : Function -> result metadata
|
||||
_ -> result
|
||||
self.apply_op_generator_with_metadata op parsed_args metadata
|
||||
SQL_Expression.Let _ binder bindee body ->
|
||||
wrapped_binder = dialect.wrap_identifier binder
|
||||
bindee_exp = SQL_Builder.code "SELECT " ++ (self.generate_expression dialect bindee).paren ++ " AS " ++ "x"
|
||||
@ -106,6 +102,14 @@ type SQL_Generator
|
||||
query : Query -> self.generate_sub_query dialect query
|
||||
descriptor : Order_Descriptor -> self.generate_order dialect descriptor
|
||||
|
||||
## PRIVATE
|
||||
apply_op_generator_with_metadata self op arguments metadata =
|
||||
result = op arguments
|
||||
# If the function expects more arguments, we pass the metadata as the last argument.
|
||||
case result of
|
||||
_ : Function -> result metadata
|
||||
_ -> result
|
||||
|
||||
## PRIVATE
|
||||
|
||||
Builds code for the FROM clause.
|
||||
@ -166,10 +170,6 @@ type SQL_Generator
|
||||
base_expression = self.generate_expression dialect order_descriptor.expression
|
||||
base_expression ++ collation ++ order_suffix ++ nulls_suffix
|
||||
|
||||
## PRIVATE
|
||||
default_generate_column self dialect expr:(SQL_Expression | Order_Descriptor | Query) name:Text -> SQL_Builder =
|
||||
self.generate_expression dialect expr ++ alias dialect name
|
||||
|
||||
## PRIVATE
|
||||
Generates SQL code corresponding to a SELECT statement.
|
||||
|
||||
@ -178,11 +178,12 @@ type SQL_Generator
|
||||
- ctx: A description of the SELECT clause.
|
||||
generate_select_query_sql : Dialect -> Vector (Pair Text SQL_Expression) -> Context -> SQL_Builder
|
||||
generate_select_query_sql self dialect columns ctx =
|
||||
gen_exprs exprs = exprs.map (self.generate_expression dialect)
|
||||
gen_exprs exprs = exprs.map (expr-> dialect.generate_expression self expr for_select=False)
|
||||
gen_column pair = (dialect.generate_expression self expr=pair.second for_select=True) ++ alias dialect pair.first
|
||||
|
||||
generated_columns = case columns of
|
||||
Nothing -> SQL_Builder.code "*"
|
||||
_ -> SQL_Builder.join ", " (columns.map (c-> dialect.generate_column_for_select self expr=c.second name=c.first))
|
||||
_ -> SQL_Builder.join ", " (columns.map gen_column)
|
||||
|
||||
from_part = self.generate_from_part dialect ctx.from_spec
|
||||
where_part = (SQL_Builder.join " AND " (gen_exprs ctx.where_filters)) . prefix_if_present " WHERE "
|
||||
|
@ -264,6 +264,8 @@ type Postgres_Dialect
|
||||
Dialect_Flag.Supports_Separate_NaN -> True
|
||||
Dialect_Flag.Supports_Nested_With_Clause -> True
|
||||
Dialect_Flag.Supports_Case_Sensitive_Columns -> True
|
||||
Dialect_Flag.Supports_Infinity -> True
|
||||
Dialect_Flag.Case_Sensitive_Text_Comparison -> True
|
||||
|
||||
## PRIVATE
|
||||
The default table types to use when listing tables.
|
||||
@ -326,8 +328,9 @@ type Postgres_Dialect
|
||||
False
|
||||
|
||||
## PRIVATE
|
||||
generate_column_for_select self base_gen expr:(SQL_Expression | Order_Descriptor | Query) name:Text -> SQL_Builder =
|
||||
base_gen.default_generate_column self expr name
|
||||
generate_expression self base_gen expr:(SQL_Expression | Order_Descriptor | Query) for_select:Boolean -> SQL_Builder =
|
||||
_ = for_select
|
||||
base_gen.generate_expression self expr
|
||||
|
||||
## PRIVATE
|
||||
ensure_query_has_no_holes : JDBC_Connection -> Text -> Nothing ! Illegal_Argument
|
||||
|
@ -275,6 +275,8 @@ type SQLite_Dialect
|
||||
Dialect_Flag.Supports_Separate_NaN -> False
|
||||
Dialect_Flag.Supports_Nested_With_Clause -> True
|
||||
Dialect_Flag.Supports_Case_Sensitive_Columns -> False
|
||||
Dialect_Flag.Supports_Infinity -> True
|
||||
Dialect_Flag.Case_Sensitive_Text_Comparison -> True
|
||||
|
||||
## PRIVATE
|
||||
The default table types to use when listing tables.
|
||||
@ -329,8 +331,9 @@ type SQLite_Dialect
|
||||
False
|
||||
|
||||
## PRIVATE
|
||||
generate_column_for_select self base_gen expr:(SQL_Expression | Order_Descriptor | Query) name:Text -> SQL_Builder =
|
||||
base_gen.default_generate_column self expr name
|
||||
generate_expression self base_gen expr:(SQL_Expression | Order_Descriptor | Query) for_select:Boolean -> SQL_Builder =
|
||||
_ = for_select
|
||||
base_gen.generate_expression self expr
|
||||
|
||||
## PRIVATE
|
||||
ensure_query_has_no_holes : JDBC_Connection -> Text -> Nothing ! Illegal_Argument
|
||||
|
@ -161,10 +161,14 @@ type SQLServer_Dialect
|
||||
sql_type = type_mapping.value_type_to_sql type_hint Problem_Behavior.Ignore
|
||||
sql_type.typeid
|
||||
stmt.setNull i java_type
|
||||
_ : Float ->
|
||||
if value.is_nan || value.is_infinite then stmt.setNull i Java_Types.REAL else
|
||||
stmt.setDouble i value
|
||||
_ : Time_Of_Day -> JDBCUtils.setLocalTimeViaTimeStamp stmt i value
|
||||
# Fallback to default logic for everything else
|
||||
_ -> fill_hole_default stmt i type_hint value
|
||||
Statement_Setter.Value custom_fill_hole
|
||||
|
||||
## PRIVATE
|
||||
make_cast : Internal_Column -> SQL_Type -> (SQL_Expression -> SQL_Type_Reference) -> Internal_Column
|
||||
make_cast self column target_type infer_result_type_from_database_callback =
|
||||
@ -215,7 +219,7 @@ type SQLServer_Dialect
|
||||
## PRIVATE
|
||||
prepare_fetch_types_query : SQL_Expression -> Context -> SQL_Statement
|
||||
prepare_fetch_types_query self expression context =
|
||||
Base_Generator.default_fetch_types_query self expression context where_filter_always_false_literal="1=0"
|
||||
Base_Generator.default_fetch_types_query self expression context
|
||||
|
||||
## PRIVATE
|
||||
check_aggregate_support : Aggregate_Column -> Boolean ! Unsupported_Database_Operation
|
||||
@ -234,6 +238,7 @@ type SQLServer_Dialect
|
||||
case feature of
|
||||
Feature.Select_Columns -> True
|
||||
Feature.Sort -> True
|
||||
Feature.Filter -> True
|
||||
_ -> False
|
||||
|
||||
## PRIVATE
|
||||
@ -247,6 +252,8 @@ type SQLServer_Dialect
|
||||
Dialect_Flag.Supports_Separate_NaN -> False
|
||||
Dialect_Flag.Supports_Nested_With_Clause -> True
|
||||
Dialect_Flag.Supports_Case_Sensitive_Columns -> False
|
||||
Dialect_Flag.Supports_Infinity -> False
|
||||
Dialect_Flag.Case_Sensitive_Text_Comparison -> False
|
||||
|
||||
## PRIVATE
|
||||
The default table types to use when listing tables.
|
||||
@ -310,97 +317,113 @@ type SQLServer_Dialect
|
||||
jdbc.ensure_query_has_no_holes raw_sql
|
||||
|
||||
## PRIVATE
|
||||
Returns a pair of a SQL_Builder for the given expression and a vector of columns
|
||||
that have been used in the expression and need to be checked for nulls.
|
||||
SQL Server needs special handling commpared to ther databases as it does not have a
|
||||
boolean data type.
|
||||
This means that you can write
|
||||
SELECT * FROM MyTable WHERE [Column1] > [Column2]
|
||||
but you cannot write
|
||||
SELECT [Column1] > [Column2] FROM MyTable
|
||||
to write the second query you need to write
|
||||
SELECT CASE WHEN [Column1] IS NULL OR [Column2] IS NULL WHEN [Column1] > [Column2] THEN 1 ELSE 0 END FROM MyTable
|
||||
The below function collects all of the fields which are needed to be checked for nulls returning them in a vector
|
||||
as the second element of the pair.
|
||||
The first element of the pair is the SQL_Builder for the expression.
|
||||
generate_expression self base_gen expr = case expr of
|
||||
SQL_Expression.Column _ _ ->
|
||||
wrapped_name = base_gen.generate_expression self expr
|
||||
pair wrapped_name [wrapped_name]
|
||||
SQL_Expression.Constant value ->
|
||||
wrapped = case value of
|
||||
Nothing -> SQL_Builder.code "NULL"
|
||||
_ -> SQL_Builder.interpolation value
|
||||
pair wrapped [wrapped]
|
||||
SQL_Expression.Literal value ->
|
||||
wrapped = SQL_Builder.code value
|
||||
pair wrapped [wrapped]
|
||||
SQL_Expression.Text_Literal _ ->
|
||||
wrapped_literal = base_gen.generate_expression self expr
|
||||
pair wrapped_literal []
|
||||
SQL_Expression.Operation kind arguments metadata ->
|
||||
op = self.dialect_operations.operations_dict.get kind (Error.throw <| Unsupported_Database_Operation.Error kind)
|
||||
parsed_args_and_null_checks = arguments.map (c -> self.generate_expression base_gen c)
|
||||
parsed_args = parsed_args_and_null_checks.map .first
|
||||
null_checks = parsed_args_and_null_checks.map .second . flatten
|
||||
generate_expression self base_gen expr:(SQL_Expression | Order_Descriptor | Query) for_select:Boolean -> SQL_Builder =
|
||||
if for_select then _generate_expression self base_gen expr Expression_Kind.Value materialize_null_check=True . first else
|
||||
_generate_expression self base_gen expr Expression_Kind.Boolean_Condition . first
|
||||
|
||||
expr_result = case kind of
|
||||
## In the case that we actually want to check for null then we need to generate the null
|
||||
check sql for all the columns that have been used up to this point and that
|
||||
becomes the expression.
|
||||
"IS_NULL" -> _generate_null_check_sql_builder null_checks
|
||||
_ ->
|
||||
result = op parsed_args
|
||||
# If the function expects more arguments, we pass the metadata as the last argument.
|
||||
case result of
|
||||
_ : Function -> result metadata
|
||||
_ -> result
|
||||
null_checks_result = if kind == "IS_NULL" then [] else null_checks
|
||||
## Returns a pair of a SQL_Builder for the given expression and a vector of columns
|
||||
that have been used in the expression and need to be checked for nulls.
|
||||
SQL Server needs special handling commpared to ther databases as it does not have a
|
||||
boolean data type.
|
||||
This means that you can write
|
||||
SELECT * FROM MyTable WHERE [Column1] > [Column2]
|
||||
but you cannot write
|
||||
SELECT [Column1] > [Column2] FROM MyTable
|
||||
to write the second query you need to write
|
||||
SELECT CASE WHEN [Column1] IS NULL OR [Column2] IS NULL WHEN [Column1] > [Column2] THEN 1 ELSE 0 END FROM MyTable
|
||||
The below function collects all of the fields which are needed to be checked for nulls returning them in a vector
|
||||
as the second element of the pair.
|
||||
The first element of the pair is the SQL_Builder for the expression.
|
||||
private _generate_expression dialect base_gen expr expression_kind:Expression_Kind materialize_null_check:Boolean=False = case expr of
|
||||
SQL_Expression.Column _ _ ->
|
||||
wrapped_name = base_gen.generate_expression dialect expr
|
||||
typed_result = _align_type wrapped_name Expression_Kind.Value expression_kind
|
||||
pair typed_result [wrapped_name]
|
||||
SQL_Expression.Constant value ->
|
||||
wrapped = case value of
|
||||
Nothing -> SQL_Builder.code "NULL"
|
||||
_ -> SQL_Builder.interpolation value
|
||||
pair wrapped [wrapped]
|
||||
SQL_Expression.Literal value ->
|
||||
modified_value = case value of
|
||||
"TRUE" -> "1"
|
||||
"FALSE" -> "0"
|
||||
_ -> value
|
||||
wrapped = _align_type (SQL_Builder.code modified_value) Expression_Kind.Value expression_kind
|
||||
pair wrapped [wrapped]
|
||||
SQL_Expression.Text_Literal _ ->
|
||||
wrapped_literal = base_gen.generate_expression dialect expr
|
||||
pair wrapped_literal []
|
||||
SQL_Expression.Operation op_kind arguments metadata ->
|
||||
op = dialect.dialect_operations.operations_dict.get op_kind (Error.throw <| Unsupported_Database_Operation.Error op_kind)
|
||||
expected_kind = _op_expected_kind op_kind
|
||||
parsed_args_and_null_checks = arguments.map (c -> _generate_expression dialect base_gen c expected_kind)
|
||||
parsed_args = parsed_args_and_null_checks.map .first
|
||||
null_checks = parsed_args_and_null_checks.map .second . flatten
|
||||
|
||||
|
||||
pair expr_result null_checks_result
|
||||
query : Query -> pair (base_gen.generate_sub_query self query) []
|
||||
## In the case that we actually want to check for null then we need to generate the null
|
||||
check sql for all the columns that have been used up to this point and that
|
||||
becomes the expression.
|
||||
expr_result = if op_kind == "IS_NULL" then _generate_is_null_expr null_checks else
|
||||
base_gen.apply_op_generator_with_metadata op parsed_args metadata
|
||||
null_checks_result = if op_kind == "IS_NULL" then [] else null_checks
|
||||
has_kind = _op_return_kind op_kind
|
||||
converted_expr = _align_type expr_result has_kind expression_kind
|
||||
final_expr = if materialize_null_check then _generate_null_check_sql_builder null_checks_result converted_expr else
|
||||
converted_expr
|
||||
|
||||
## PRIVATE
|
||||
generate_column_for_select self base_gen expr:(SQL_Expression | Order_Descriptor | Query) name:Text -> SQL_Builder =
|
||||
expr_null_checks_pair = self.generate_expression base_gen expr
|
||||
base_expr = expr_null_checks_pair.first
|
||||
built_expr = case expr of
|
||||
SQL_Expression.Operation _ _ _ ->
|
||||
null_check = if expr_null_checks_pair.second.length == 0 then "" else
|
||||
SQL_Builder.code "WHEN " ++ _generate_null_check_sql_builder expr_null_checks_pair.second ++ " THEN NULL "
|
||||
SQL_Builder.code "CASE " ++ null_check ++ "WHEN " ++ base_expr ++ " THEN 1 ELSE 0 END"
|
||||
_ -> base_expr
|
||||
built_expr ++ Base_Generator.alias self name
|
||||
pair final_expr null_checks_result
|
||||
query : Query -> pair (base_gen.generate_sub_query dialect query) []
|
||||
|
||||
## PRIVATE
|
||||
private _generate_null_check_sql_builder null_checks:Vector -> SQL_Builder =
|
||||
type Expression_Kind
|
||||
Boolean_Condition
|
||||
Value
|
||||
|
||||
private _align_type expr (has_kind : Expression_Kind) (expected_kind : Expression_Kind) =
|
||||
if has_kind == expected_kind then expr else
|
||||
case expected_kind of
|
||||
Expression_Kind.Boolean_Condition -> _convert_value_to_boolean_expression expr
|
||||
Expression_Kind.Value -> _convert_boolean_to_value_expression expr
|
||||
|
||||
private _convert_value_to_boolean_expression expr =
|
||||
expr ++ " = 1"
|
||||
|
||||
private _convert_boolean_to_value_expression expr =
|
||||
SQL_Builder.code "IIF(" ++ expr ++ ", CAST(1 AS BIT), CAST(0 AS BIT))"
|
||||
|
||||
private _generate_null_check_sql_builder null_checks:Vector expr -> SQL_Builder =
|
||||
if null_checks.length == 0 then expr else
|
||||
SQL_Builder.code "IIF(" ++ _generate_is_null_expr null_checks ++ ", NULL, " ++ expr ++ ")"
|
||||
|
||||
private _generate_is_null_expr null_checks:Vector -> SQL_Builder =
|
||||
(null_checks.map it->(it.paren ++ " IS NULL ")) . reduce acc-> i-> acc ++ "OR " ++ i
|
||||
|
||||
private _op_expected_kind op -> Expression_Kind =
|
||||
if ["NOT"].contains op then Expression_Kind.Boolean_Condition else Expression_Kind.Value
|
||||
|
||||
private _op_return_kind op -> Expression_Kind =
|
||||
return_bool_ops = ["NOT", "BETWEEN", ">=", ">", "<=", "<", "!=", "==", "IN", "IS_NULL", "LIKE", "STARTS_WITH", "ENDS_WITH", "CONTAINS", "EQUALS_IGNORE_CASE", "IS_EMPTY"]
|
||||
if return_bool_ops.contains op then Expression_Kind.Boolean_Condition else Expression_Kind.Value
|
||||
|
||||
## PRIVATE
|
||||
make_dialect_operations =
|
||||
cases = [["LOWER", Base_Generator.make_function "LOWER"], ["UPPER", Base_Generator.make_function "UPPER"]]
|
||||
text = [starts_with, contains, ends_with, agg_shortest, agg_longest, make_case_sensitive, ["REPLACE", replace], left, right]+concat_ops+cases+trim_ops
|
||||
text = [starts_with, contains, ends_with, like, agg_shortest, agg_longest, make_case_sensitive, ["REPLACE", replace], left, right]+concat_ops+cases+trim_ops
|
||||
counts = [agg_count_is_null, agg_count_empty, agg_count_not_empty, ["COUNT_DISTINCT", agg_count_distinct], ["COUNT_DISTINCT_INCLUDE_NULL", agg_count_distinct_include_null]]
|
||||
arith_extensions = [is_nan, is_inf, is_finite, floating_point_div, mod_op, decimal_div, decimal_mod, ["ROW_MIN", Base_Generator.make_function "LEAST"], ["ROW_MAX", Base_Generator.make_function "GREATEST"]]
|
||||
bool = [bool_or, bool_not]
|
||||
eq = lift_binary_op "==" make_equals
|
||||
compare = [eq]
|
||||
arith_extensions = [floating_point_div, mod_op, decimal_div, decimal_mod, ["ROW_MIN", Base_Generator.make_function "LEAST"], ["ROW_MAX", Base_Generator.make_function "GREATEST"]]
|
||||
bool = [bool_or]
|
||||
|
||||
stddev_pop = ["STDDEV_POP", Base_Generator.make_function "stddev_pop"]
|
||||
stddev_samp = ["STDDEV_SAMP", Base_Generator.make_function "stddev_samp"]
|
||||
stats = [agg_median, agg_mode, agg_percentile, stddev_pop, stddev_samp]
|
||||
date_ops = [make_extract_as_int "year", make_extract_as_int "quarter", make_extract_as_int "month", make_extract_as_int "week", make_extract_as_int "day", make_extract_as_int "hour", make_extract_as_int "minute", make_extract_fractional_as_int "second", make_extract_fractional_as_int "millisecond" modulus=1000, make_extract_fractional_as_int "microsecond" modulus=1000, ["date_add", make_date_add], ["date_diff", make_date_diff], ["date_trunc_to_day", make_date_trunc_to_day]]
|
||||
special_overrides = [is_null]
|
||||
special_overrides = [is_empty]
|
||||
other = [["RUNTIME_ERROR", make_runtime_error_op]]
|
||||
my_mappings = text + counts + stats + first_last_aggregators + arith_extensions + bool + compare + date_ops + special_overrides + other
|
||||
my_mappings = text + counts + stats + first_last_aggregators + arith_extensions + bool + date_ops + special_overrides + other
|
||||
base = Base_Generator.base_dialect_operations . extend_with my_mappings
|
||||
Base_Generator.Dialect_Operations.Value (base.operations_dict.remove "IS_IN")
|
||||
|
||||
## PRIVATE
|
||||
is_null = Base_Generator.lift_unary_op "IS_NULL" arg->
|
||||
arg.paren ++ " IS NULL"
|
||||
|
||||
## PRIVATE
|
||||
agg_count_is_null = Base_Generator.lift_unary_op "COUNT_IS_NULL" arg->
|
||||
SQL_Builder.code "SUM(CASE WHEN " ++ arg.paren ++ " IS NULL THEN 1 ELSE 0 END)"
|
||||
@ -441,14 +464,6 @@ first_last_aggregators =
|
||||
last_not_null = make_first_aggregator reverse=True ignore_null=True
|
||||
[["FIRST", first], ["FIRST_NOT_NULL", first_not_null], ["LAST", last], ["LAST_NOT_NULL", last_not_null]]
|
||||
|
||||
## PRIVATE
|
||||
make_equals a b =
|
||||
case a.build.prepare.second==[True] of
|
||||
True -> b.paren
|
||||
False -> case b.build.prepare.second==[True] of
|
||||
True -> a.paren
|
||||
False -> a.paren ++ " = " ++ b.paren
|
||||
|
||||
## PRIVATE
|
||||
make_first_aggregator reverse ignore_null args =
|
||||
if args.length < 2 then Error.throw (Illegal_State.Error "Insufficient number of arguments for the operation.") else
|
||||
@ -515,20 +530,38 @@ agg_count_distinct_include_null args = case args.length == 1 of
|
||||
False -> Error.throw (Illegal_Argument.Error "COUNT_DISTINCT supports only single arguments in SQLServer.")
|
||||
|
||||
## PRIVATE
|
||||
starts_with = Base_Generator.lift_binary_sql_function "STARTS_WITH" "STARTSWITH"
|
||||
is_empty = Base_Generator.lift_unary_op "IS_EMPTY" arg->
|
||||
is_null = (arg ++ " IS NULL").paren
|
||||
is_empty = (SQL_Builder.code 'DATALENGTH(' ++ arg ++ ') = 0').paren
|
||||
(is_null ++ " OR " ++ is_empty).paren
|
||||
|
||||
|
||||
## PRIVATE
|
||||
ends_with = Base_Generator.lift_binary_sql_function "ENDS_WITH" "ENDSWITH"
|
||||
starts_with = Base_Generator.lift_binary_op "STARTS_WITH" str-> sub->
|
||||
res = str ++ " LIKE (" ++ sub ++ "+'%')"
|
||||
res.paren
|
||||
|
||||
## PRIVATE
|
||||
contains = Base_Generator.lift_binary_sql_function "CONTAINS" "CONTAINS"
|
||||
like = Base_Generator.lift_binary_op "LIKE" str-> sub->
|
||||
res = str ++ " LIKE (REPLACE(REPLACE(" ++ sub ++ ",'[','¬['), ']', '¬]')) ESCAPE '¬'"
|
||||
res.paren
|
||||
|
||||
## PRIVATE
|
||||
ends_with = Base_Generator.lift_binary_op "ENDS_WITH" str-> sub->
|
||||
res = str ++ " LIKE ('%'+" ++ sub ++ ")"
|
||||
res.paren
|
||||
|
||||
## PRIVATE
|
||||
contains = Base_Generator.lift_binary_op "CONTAINS" str-> sub->
|
||||
res = SQL_Builder.code "CHARINDEX(" ++ sub ++ ", " ++ str ++ ") > 0"
|
||||
res.paren
|
||||
|
||||
## PRIVATE
|
||||
make_contains_expr expr substring = contains [expr, substring]
|
||||
|
||||
## PRIVATE
|
||||
make_case_sensitive = Base_Generator.lift_unary_op "MAKE_CASE_SENSITIVE" arg->
|
||||
SQL_Builder.code "((" ++ arg ++ ') COLLATE "ucs_basic")'
|
||||
SQL_Builder.code "((" ++ arg ++ ') COLLATE Latin1_General_BIN2)'
|
||||
|
||||
## PRIVATE
|
||||
left = Base_Generator.lift_binary_op "LEFT" str-> n->
|
||||
@ -560,18 +593,6 @@ make_order_descriptor internal_column sort_direction text_ordering =
|
||||
folded_expression = SQL_Expression.Operation "LOWER" [upper]
|
||||
Order_Descriptor.Value folded_expression sort_direction nulls_order=nulls collation=Nothing
|
||||
|
||||
## PRIVATE
|
||||
is_nan = Base_Generator.lift_unary_op "IS_NAN" arg->
|
||||
(arg ++ " in (double precision 'NaN')").paren
|
||||
|
||||
## PRIVATE
|
||||
is_inf = Base_Generator.lift_unary_op "IS_INF" arg->
|
||||
(arg ++ " in (double precision 'Infinity', double precision '-Infinity')").paren
|
||||
|
||||
## PRIVATE
|
||||
is_finite = Base_Generator.lift_unary_op "IS_FINITE" arg->
|
||||
(arg ++ " not in (double precision 'Infinity', double precision '-Infinity', double precision 'NaN')").paren
|
||||
|
||||
## PRIVATE
|
||||
bool_or = Base_Generator.lift_unary_op "BOOL_OR" arg->
|
||||
SQL_Builder.code "bool_or(" ++ arg ++ ")"
|
||||
|
@ -253,6 +253,8 @@ type Snowflake_Dialect
|
||||
Dialect_Flag.Supports_Separate_NaN -> True
|
||||
Dialect_Flag.Supports_Nested_With_Clause -> True
|
||||
Dialect_Flag.Supports_Case_Sensitive_Columns -> True
|
||||
Dialect_Flag.Supports_Infinity -> True
|
||||
Dialect_Flag.Case_Sensitive_Text_Comparison -> True
|
||||
|
||||
## PRIVATE
|
||||
The default table types to use when listing tables.
|
||||
@ -309,8 +311,9 @@ type Snowflake_Dialect
|
||||
_ -> False
|
||||
|
||||
## PRIVATE
|
||||
generate_column_for_select self base_gen expr:(SQL_Expression | Order_Descriptor | Query) name:Text -> SQL_Builder =
|
||||
base_gen.default_generate_column self expr name
|
||||
generate_expression self base_gen expr:(SQL_Expression | Order_Descriptor | Query) for_select:Boolean -> SQL_Builder =
|
||||
_ = for_select
|
||||
base_gen.generate_expression self expr
|
||||
|
||||
## PRIVATE
|
||||
ensure_query_has_no_holes : JDBC_Connection -> Text -> Nothing ! Illegal_Argument
|
||||
|
@ -191,12 +191,15 @@ add_sqlserver_specs suite_builder create_connection_fn =
|
||||
|
||||
in_mem_table = Table.new columns
|
||||
in_mem_table.select_into_database_table (connection.if_nothing default_connection.get) name primary_key=Nothing temporary=True
|
||||
light_table_builder columns =
|
||||
default_connection.get.base_connection.create_literal_table (Table.new columns) "literal_table"
|
||||
light_table_builder columns = table_builder columns
|
||||
|
||||
## TODO need to be able to type_hint the nulls for this to work
|
||||
light_table_builder columns = default_connection.get.base_connection.create_literal_table (Table.new columns) "literal_table"
|
||||
https://github.com/enso-org/enso/issues/11487
|
||||
|
||||
materialize = .read
|
||||
|
||||
common_selection = Common_Table_Operations.Main.Test_Selection.Config order_by_unicode_normalization_by_default=True allows_mixed_type_comparisons=False text_length_limited_columns=True fixed_length_text_columns=True removes_trailing_whitespace_casting_from_char_to_varchar=True char_max_size_after_substring=..Reset supports_decimal_type=True supported_replace_params=supported_replace_params run_advanced_edge_case_tests_by_default=True supports_date_time_without_timezone=False date_time=False is_nan_comparable=True
|
||||
common_selection = Common_Table_Operations.Main.Test_Selection.Config order_by_unicode_normalization_by_default=True allows_mixed_type_comparisons=False text_length_limited_columns=True fixed_length_text_columns=True removes_trailing_whitespace_casting_from_char_to_varchar=True char_max_size_after_substring=..Reset supports_decimal_type=True supported_replace_params=supported_replace_params run_advanced_edge_case_tests_by_default=True supports_date_time_without_timezone=False date_time=False is_nan_comparable=False
|
||||
aggregate_selection = Common_Table_Operations.Aggregate_Spec.Test_Selection.Config first_last_row_order=False aggregation_problems=False
|
||||
agg_in_memory_table = (enso_project.data / "data.csv") . read
|
||||
|
||||
|
@ -509,10 +509,9 @@ snowflake_specific_spec suite_builder default_connection db_name setup =
|
||||
do_round -1.2222222222222235 15 use_bankers=True . should_equal -1.222222222222224
|
||||
|
||||
group_builder.specify "Can handle NaN/Infinity" <|
|
||||
nan_result = if setup.test_selection.is_nan_and_nothing_distinct then Number.nan else Nothing
|
||||
ops = [.round, .truncate, .ceil, .floor]
|
||||
ops.each op->
|
||||
do_op Number.nan op . should_equal nan_result
|
||||
do_op Number.nan op . should_equal Number.nan
|
||||
do_op Number.positive_infinity op . should_equal Number.positive_infinity
|
||||
do_op Number.negative_infinity op . should_equal Number.negative_infinity
|
||||
|
||||
|
BIN
test/Table_Tests/data/transient/sqlite_test.db-journal
Normal file
BIN
test/Table_Tests/data/transient/sqlite_test.db-journal
Normal file
Binary file not shown.
@ -918,7 +918,7 @@ add_column_operation_specs suite_builder setup =
|
||||
(y ^ "a").should_fail_with Invalid_Value_Type
|
||||
(y ^ 42).should_fail_with Invalid_Value_Type
|
||||
|
||||
case setup.test_selection.is_nan_and_nothing_distinct of
|
||||
case setup.flagged ..Supports_Separate_NaN of
|
||||
True ->
|
||||
table = Lazy_Ref.Value <|
|
||||
build_sorted_table [["X", [1.5, 3.0, Number.positive_infinity, Number.negative_infinity, Number.nan, Nothing]], ["Y", [1, 2, 3, 4, 5, Nothing]], ["Z", ["1", "2", "3", "4", "5", Nothing]]]
|
||||
|
@ -59,11 +59,20 @@ add_filter_specs suite_builder setup =
|
||||
t.filter "X" (..Equal to=100) . at "X" . to_vector . should_equal [100]
|
||||
t.filter "X" (Filter_Condition.Equal to=123) . at "X" . to_vector . should_equal []
|
||||
|
||||
t.filter "X" (Filter_Condition.Is_Finite) . at "ix" . to_vector . should_equal [1, 2, 4, 5]
|
||||
t.filter "X" (Filter_Condition.Is_Infinite) . at "ix" . to_vector . should_equal []
|
||||
if test_selection.is_nan_and_nothing_distinct then
|
||||
t.filter "X" (Filter_Condition.Is_Nan) . at "ix" . to_vector . should_equal []
|
||||
t.filter "X" (Filter_Condition.Not_Nan) . at "ix" . to_vector . should_equal [1, 2, 4, 5]
|
||||
case setup.flagged ..Supports_Infinity of
|
||||
True ->
|
||||
t.filter "X" (Filter_Condition.Is_Finite) . at "ix" . to_vector . should_equal [1, 2, 4, 5]
|
||||
t.filter "X" (Filter_Condition.Is_Infinite) . at "ix" . to_vector . should_equal []
|
||||
False ->
|
||||
t.filter "X" (Filter_Condition.Is_Finite) . should_fail_with Unsupported_Database_Operation
|
||||
t.filter "X" (Filter_Condition.Is_Infinite) . should_fail_with Unsupported_Database_Operation
|
||||
case setup.flagged ..Supports_Separate_NaN of
|
||||
True ->
|
||||
t.filter "X" (Filter_Condition.Is_Nan) . at "ix" . to_vector . should_equal []
|
||||
t.filter "X" (Filter_Condition.Not_Nan) . at "ix" . to_vector . should_equal [1, 2, 4, 5]
|
||||
False ->
|
||||
t.filter "X" (Filter_Condition.Is_Nan) . should_fail_with Unsupported_Database_Operation
|
||||
t.filter "X" (Filter_Condition.Not_Nan) . should_fail_with Unsupported_Database_Operation
|
||||
|
||||
v = t.filter "X" (Filter_Condition.Equal to="SOME TEXT :)") . at "X" . to_vector
|
||||
## We do not do typechecking at Enso level here, as it is
|
||||
@ -93,22 +102,29 @@ add_filter_specs suite_builder setup =
|
||||
group_builder.specify "by float operations" <|
|
||||
t = table_builder [["ix", [1, 2, 3, 4, 5, 6]], ["X", [100.0, 2.5, Nothing, Number.nan, Number.positive_infinity, Number.negative_infinity]]]
|
||||
|
||||
t.filter "X" (Filter_Condition.Less than=10.0) . at "X" . to_vector . should_equal [2.5, Number.negative_infinity]
|
||||
case setup.flagged ..Supports_Infinity of
|
||||
True -> t.filter "X" (Filter_Condition.Less than=10.0) . at "X" . to_vector . should_equal [2.5, Number.negative_infinity]
|
||||
False -> t.filter "X" (Filter_Condition.Less than=10.0) . at "X" . to_vector . should_equal [2.5]
|
||||
|
||||
# In some backends, NaN is greater than any other value, so it is > 10.0; in other implementations it is usually not greater nor smaller, so it gets filtered out.
|
||||
nan_is_comparable = setup.test_selection.is_nan_comparable
|
||||
t.filter "X" (Filter_Condition.Greater than=10.0) . at "ix" . to_vector . should_equal <|
|
||||
if nan_is_comparable then [1, 4, 5] else [1, 5]
|
||||
case setup.flagged ..Supports_Infinity of
|
||||
True -> if nan_is_comparable then [1, 4, 5] else [1, 5]
|
||||
False -> if nan_is_comparable then [1, 4] else [1]
|
||||
|
||||
# Similarly, PostgreSQL and Snowflake treats NaN==NaN, we assume `nan_is_comparable` implies that.
|
||||
# If needed, this may become a separate flag in the future.
|
||||
t.filter "X" (Filter_Condition.Equal to=Number.nan) . at "ix" . to_vector . should_equal <|
|
||||
if nan_is_comparable then [4] else []
|
||||
t.filter "X" (Filter_Condition.Equal to=Number.positive_infinity) . at "ix" . to_vector . should_equal [5]
|
||||
|
||||
t.filter "X" Filter_Condition.Is_Infinite . at "ix" . to_vector . should_equal [5, 6]
|
||||
t.filter "X" Filter_Condition.Is_Finite . at "ix" . to_vector . should_equal [1, 2]
|
||||
if test_selection.is_nan_and_nothing_distinct then
|
||||
if setup.flagged ..Supports_Infinity then
|
||||
t.filter "X" (Filter_Condition.Equal to=Number.positive_infinity) . at "ix" . to_vector . should_equal [5]
|
||||
|
||||
t.filter "X" Filter_Condition.Is_Infinite . at "ix" . to_vector . should_equal [5, 6]
|
||||
t.filter "X" Filter_Condition.Is_Finite . at "ix" . to_vector . should_equal [1, 2]
|
||||
|
||||
if setup.flagged ..Supports_Separate_NaN then
|
||||
t.filter "X" Filter_Condition.Is_Nan . at "ix" . to_vector . should_equal [4]
|
||||
t.filter "X" Filter_Condition.Not_Nan . at "ix" . to_vector . should_equal [1, 2, 5, 6]
|
||||
|
||||
@ -151,7 +167,9 @@ add_filter_specs suite_builder setup =
|
||||
t.filter "X" (Filter_Condition.Between (Column_Ref.Name "Y") "bzzzz") . at "X" . to_vector . should_equal ["abb", "baca", "b"]
|
||||
|
||||
t2 = table_builder [["X", ["A", "a", "b"]], ["Y", ["a", "B", "b"]]]
|
||||
t2.filter "X" (Filter_Condition.Equal to="a") . at "X" . to_vector . should_equal ["a"]
|
||||
case setup.flagged ..Case_Sensitive_Text_Comparison of
|
||||
True -> t2.filter "X" (Filter_Condition.Equal to="a") . at "X" . to_vector . should_equal ["a"]
|
||||
False -> t2.filter "X" (Filter_Condition.Equal to="a") . at "X" . to_vector . should_equal ["A", "a"]
|
||||
t2.filter "X" (Filter_Condition.Equal_Ignore_Case to="a") . at "X" . to_vector . should_equal ["A", "a"]
|
||||
t2.filter "X" (Filter_Condition.Equal_Ignore_Case to=(Column_Ref.Name "Y")) . at "X" . to_vector . should_equal ["A", "b"]
|
||||
|
||||
@ -171,18 +189,31 @@ add_filter_specs suite_builder setup =
|
||||
t.filter "X" (Filter_Condition.Contains "AC" Case_Sensitivity.Sensitive) . at "X" . to_vector . should_equal []
|
||||
t.filter "X" (Filter_Condition.Contains "AC" Case_Sensitivity.Insensitive) . at "X" . to_vector . should_equal ["bacb"]
|
||||
|
||||
t.filter "X" (Filter_Condition.Starts_With (t.at "Y")) . at "X" . to_vector . should_equal ["abb"]
|
||||
t.filter "X" (Filter_Condition.Starts_With (t.at "Y") keep_or_remove=Filter_Action.Remove) . at "X" . to_vector . should_equal ["bacb", "banana", Nothing, "nana"]
|
||||
case setup.flagged ..Case_Sensitive_Text_Comparison of
|
||||
True ->
|
||||
t.filter "X" (Filter_Condition.Starts_With (t.at "Y")) . at "X" . to_vector . should_equal ["abb"]
|
||||
t.filter "X" (Filter_Condition.Starts_With (t.at "Y") keep_or_remove=Filter_Action.Remove) . at "X" . to_vector . should_equal ["bacb", "banana", Nothing, "nana"]
|
||||
False ->
|
||||
t.filter "X" (Filter_Condition.Starts_With (t.at "Y")) . at "X" . to_vector . should_equal ["abb", "bacb"]
|
||||
t.filter "X" (Filter_Condition.Starts_With (t.at "Y") keep_or_remove=Filter_Action.Remove) . at "X" . to_vector . should_equal ["banana", Nothing, "nana"]
|
||||
|
||||
t.filter "X" (Filter_Condition.Starts_With (t.at "Y") Case_Sensitivity.Sensitive) . at "X" . to_vector . should_equal ["abb"]
|
||||
t.filter "X" (Filter_Condition.Starts_With (t.at "Y") Case_Sensitivity.Insensitive) . at "X" . to_vector . should_equal ["abb", "bacb"]
|
||||
t.filter "X" (Filter_Condition.Ends_With (t.at "Y")) . at "X" . to_vector . should_equal ["nana"]
|
||||
|
||||
case setup.flagged ..Case_Sensitive_Text_Comparison of
|
||||
True -> t.filter "X" (Filter_Condition.Ends_With (t.at "Y")) . at "X" . to_vector . should_equal ["nana"]
|
||||
False -> t.filter "X" (Filter_Condition.Ends_With (t.at "Y")) . at "X" . to_vector . should_equal ["bacb", "nana"]
|
||||
t.filter "X" (Filter_Condition.Ends_With (t.at "Y") Case_Sensitivity.Sensitive) . at "X" . to_vector . should_equal ["nana"]
|
||||
t.filter "X" (Filter_Condition.Ends_With (t.at "Y") Case_Sensitivity.Insensitive) . at "X" . to_vector . should_equal ["bacb", "nana"]
|
||||
t.filter "X" (Filter_Condition.Contains (t.at "Y")) . at "X" . to_vector . should_equal ["abb", "nana"]
|
||||
case setup.flagged ..Case_Sensitive_Text_Comparison of
|
||||
True -> t.filter "X" (Filter_Condition.Contains (t.at "Y")) . at "X" . to_vector . should_equal ["abb", "nana"]
|
||||
False -> t.filter "X" (Filter_Condition.Contains (t.at "Y")) . at "X" . to_vector . should_equal ["abb", "bacb", "nana"]
|
||||
t.filter "X" (Filter_Condition.Contains (t.at "Y") Case_Sensitivity.Sensitive) . at "X" . to_vector . should_equal ["abb", "nana"]
|
||||
t.filter "X" (Filter_Condition.Contains (t.at "Y") Case_Sensitivity.Insensitive) . at "X" . to_vector . should_equal ["abb", "bacb", "nana"]
|
||||
|
||||
t.filter "X" (Filter_Condition.Starts_With (Column_Ref.Name "Y")) . at "X" . to_vector . should_equal ["abb"]
|
||||
case setup.flagged ..Case_Sensitive_Text_Comparison of
|
||||
True -> t.filter "X" (Filter_Condition.Starts_With (Column_Ref.Name "Y")) . at "X" . to_vector . should_equal ["abb"]
|
||||
False -> t.filter "X" (Filter_Condition.Starts_With (Column_Ref.Name "Y")) . at "X" . to_vector . should_equal ["abb", "bacb"]
|
||||
t.filter "X" (Filter_Condition.Ends_With (Column_Ref.Name "Y") Case_Sensitivity.Insensitive) . at "X" . to_vector . should_equal ["bacb", "nana"]
|
||||
t.filter "X" (Filter_Condition.Contains (Column_Ref.Name "Y") Case_Sensitivity.Insensitive) . at "X" . to_vector . should_equal ["abb", "bacb", "nana"]
|
||||
|
||||
@ -273,7 +304,7 @@ add_filter_specs suite_builder setup =
|
||||
t2.at "ix" . to_vector . should_equal [2, 4]
|
||||
t2.at "X" . to_vector . should_equal [1, 4]
|
||||
|
||||
group_builder.specify "by an Is_In check" <|
|
||||
if setup.is_operation_supported "IS_IN" then group_builder.specify "by an Is_In check" <|
|
||||
t = table_builder [["ix", [1, 2, 3, Nothing, 5, 6]], ["X", ["a", "b", "ccc", "X", "f", "2"]]]
|
||||
t1 = table_builder [["txt", ["X", "a", "c", Nothing]], ["int", [Nothing, 2, 5, 4]], ["bool", [True, Nothing, Nothing, True]]]
|
||||
|
||||
@ -310,7 +341,7 @@ add_filter_specs suite_builder setup =
|
||||
t3.filter "B" (Filter_Condition.Is_In [False]) . at "B" . to_vector . should_equal [False, False, False]
|
||||
t3.filter "C" (Filter_Condition.Is_In [False, False]) . at "C" . to_vector . should_equal [False]
|
||||
|
||||
group_builder.specify "does not allow Column_Ref in Is_In because that would be confusing" <|
|
||||
if setup.is_operation_supported "IS_IN" then group_builder.specify "does not allow Column_Ref in Is_In because that would be confusing" <|
|
||||
## Is In and Not In check if a value is contained anywhere in a provided collection (e.g. column),
|
||||
NOT on a row-by-row basis like all other operations. Column_Ref is used with row-by-row ops,
|
||||
so this would only cause confusion. Very rarely someone wants to filter a column by Is_In
|
||||
@ -354,7 +385,7 @@ add_filter_specs suite_builder setup =
|
||||
|
||||
check_problem <| t.filter "a" Filter_Condition.Is_Finite
|
||||
check_problem <| t.filter "a" Filter_Condition.Is_Infinite
|
||||
if test_selection.is_nan_and_nothing_distinct then
|
||||
if setup.flagged ..Supports_Separate_NaN then
|
||||
check_problem <| t.filter "a" Filter_Condition.Is_Nan
|
||||
check_problem <| t.filter "a" Filter_Condition.Not_Nan
|
||||
|
||||
@ -435,13 +466,16 @@ add_filter_specs suite_builder setup =
|
||||
err2 = t2.filter "Y" (Filter_Condition.Not_Equal 5) on_problems=..Report_Error
|
||||
err2.should_fail_with Floating_Point_Equality
|
||||
|
||||
group_builder.specify "should attach a warning when Nothing is used as a value in a comparison or `is_in` `Filter_Condition`" <|
|
||||
group_builder.specify "should attach a warning when Nothing is used as a value in a comparison `Filter_Condition`" <|
|
||||
t = table_builder [["x", [1, 2, 3]]]
|
||||
fcs = [Filter_Condition.Equal Nothing, Filter_Condition.Not_Equal Nothing]
|
||||
+ [Filter_Condition.Less Nothing, Filter_Condition.Equal_Or_Less Nothing]
|
||||
+ [Filter_Condition.Equal_Or_Greater Nothing, Filter_Condition.Greater Nothing]
|
||||
+ [Filter_Condition.Between Nothing Nothing , Filter_Condition.Is_In [Nothing]]
|
||||
+ [Filter_Condition.Is_In [1, Nothing, 2]]
|
||||
fcs = [Filter_Condition.Equal Nothing, Filter_Condition.Not_Equal Nothing, Filter_Condition.Less Nothing, Filter_Condition.Equal_Or_Less Nothing, Filter_Condition.Equal_Or_Greater Nothing, Filter_Condition.Greater Nothing]
|
||||
fcs.map fc->
|
||||
Test.with_clue fc.to_text <|
|
||||
Problems.expect_warning Nothing_Value_In_Filter_Condition (t.filter "x" fc . at "x")
|
||||
|
||||
if setup.is_operation_supported "IS_IN" then group_builder.specify "should attach a warning when Nothing is used as a value in a `is_in` `Filter_Condition`" <|
|
||||
t = table_builder [["x", [1, 2, 3]]]
|
||||
fcs = [Filter_Condition.Between Nothing Nothing , Filter_Condition.Is_In [Nothing] , Filter_Condition.Is_In [1, Nothing, 2]]
|
||||
fcs.map fc->
|
||||
Test.with_clue fc.to_text <|
|
||||
Problems.expect_warning Nothing_Value_In_Filter_Condition (t.filter "x" fc . at "x")
|
||||
@ -454,7 +488,8 @@ add_filter_specs suite_builder setup =
|
||||
t = table_builder [["x", [1, 2, 3]], ["y", [1, Nothing, 2]]]
|
||||
Problems.assume_no_problems (t.filter "x" (Filter_Condition.Equal 12))
|
||||
Problems.assume_no_problems (t.filter "x" (Filter_Condition.Equal [Nothing, Nothing]))
|
||||
Problems.assume_no_problems (t.filter "x" (Filter_Condition.Is_In [[Nothing, Nothing]]))
|
||||
if setup.is_operation_supported "IS_IN" then
|
||||
Problems.assume_no_problems (t.filter "x" (Filter_Condition.Is_In [[Nothing, Nothing]]))
|
||||
|
||||
suite_builder.group prefix+"Table.filter by an expression" group_builder->
|
||||
group_builder.specify "by a boolean column" <|
|
||||
|
@ -107,9 +107,6 @@ type Test_Selection
|
||||
- supports_unicode_normalization: Specifies if the backend compares
|
||||
strings taking Unicode Normalization into account, i.e. whether
|
||||
's\u0301' is considered equal to 'ś'.
|
||||
- is_nan_and_nothing_distinct: Specifies if the backend is able to
|
||||
distinguish between a decimal NaN value and a missing value (Enso's
|
||||
Nothing, or SQL's NULL). If `False`, NaN is treated as a NULL.
|
||||
- is_nan_comparable: Specifies if NaN value is
|
||||
treated as greater than all numbers. If `False`, `NaN` is expected to
|
||||
yield False to both < and > comparisons.
|
||||
@ -157,7 +154,7 @@ type Test_Selection
|
||||
- supports_date_time_without_timezone: Specifies if the backend supports
|
||||
date/time operations without a timezone (true for most Database backends).
|
||||
Defaults to `.is_integer`.
|
||||
Config natural_ordering=False case_insensitive_ordering=True order_by_unicode_normalization_by_default=False case_insensitive_ascii_only=False allows_mixed_type_comparisons=True supports_unicode_normalization=False is_nan_and_nothing_distinct=True is_nan_comparable=False distinct_returns_first_row_from_group_if_ordered=True date_time=True text_length_limited_columns=False fixed_length_text_columns=False length_restricted_text_columns=True removes_trailing_whitespace_casting_from_char_to_varchar=False char_max_size_after_substring:Char_Max_Size_After_Substring_Behavior=..Kept different_size_integer_types=True supports_8bit_integer=False supports_decimal_type=False supports_time_duration=False supports_nanoseconds_in_time=False supports_mixed_columns=False supported_replace_params=Nothing run_advanced_edge_case_tests_by_default=True supports_date_time_without_timezone=False
|
||||
Config natural_ordering=False case_insensitive_ordering=True order_by_unicode_normalization_by_default=False case_insensitive_ascii_only=False allows_mixed_type_comparisons=True supports_unicode_normalization=False is_nan_comparable=False distinct_returns_first_row_from_group_if_ordered=True date_time=True text_length_limited_columns=False fixed_length_text_columns=False length_restricted_text_columns=True removes_trailing_whitespace_casting_from_char_to_varchar=False char_max_size_after_substring:Char_Max_Size_After_Substring_Behavior=..Kept different_size_integer_types=True supports_8bit_integer=False supports_decimal_type=False supports_time_duration=False supports_nanoseconds_in_time=False supports_mixed_columns=False supported_replace_params=Nothing run_advanced_edge_case_tests_by_default=True supports_date_time_without_timezone=False
|
||||
|
||||
## Specifies if the advanced edge case tests shall be run.
|
||||
|
||||
|
@ -21,7 +21,6 @@ add_specs suite_builder setup =
|
||||
|
||||
add_missing_value_specs suite_builder setup =
|
||||
prefix = setup.prefix
|
||||
test_selection = setup.test_selection
|
||||
table_builder = build_sorted_table setup
|
||||
|
||||
t0 = Lazy_Ref.Value <|
|
||||
@ -100,7 +99,7 @@ add_missing_value_specs suite_builder setup =
|
||||
r2.columns.map .name . should_equal ["c"]
|
||||
r2.at "c" . to_vector . should_equal [10, 20, 30, 40]
|
||||
|
||||
if test_selection.is_nan_and_nothing_distinct then
|
||||
if setup.flagged ..Supports_Separate_NaN then
|
||||
group_builder.specify "should not treat NaNs as blank by default" <|
|
||||
r1 = t3.get.filter_blank_rows when=Blank_Selector.Any_Cell
|
||||
# We cannot use `Vector.==` because `NaN != NaN`.
|
||||
@ -149,7 +148,7 @@ add_missing_value_specs suite_builder setup =
|
||||
r6.columns.map .name . should_equal ["g", "h"]
|
||||
r6.at "h" . to_vector . to_text . should_equal "[NaN, Nothing, NaN, Nothing]"
|
||||
|
||||
if test_selection.is_nan_and_nothing_distinct.not then
|
||||
if setup.flagged ..Supports_Separate_NaN . not then
|
||||
group_builder.specify "this backend treats NaN as Nothing" <|
|
||||
t3.get.at "X" . to_vector . should_equal [2.0, 1.5, Nothing, Nothing]
|
||||
t3.get.at "X" . is_nan . to_vector . should_fail_with Unsupported_Database_Operation
|
||||
|
@ -682,10 +682,9 @@ postgres_specific_spec suite_builder create_connection_fn db_name setup =
|
||||
|
||||
## input/output
|
||||
group_builder.specify "Can handle NaN/Infinity" <|
|
||||
nan_result = if setup.test_selection.is_nan_and_nothing_distinct then Number.nan else Nothing
|
||||
ops = [.round, .truncate, .ceil, .floor]
|
||||
ops.each op->
|
||||
do_op data Number.nan op . should_equal nan_result
|
||||
do_op data Number.nan op . should_equal Number.nan
|
||||
do_op data Number.positive_infinity op . should_equal Number.positive_infinity
|
||||
do_op data Number.negative_infinity op . should_equal Number.negative_infinity
|
||||
|
||||
|
@ -281,7 +281,8 @@ sqlite_specific_spec suite_builder prefix create_connection_func setup =
|
||||
do_round data -1.2222222222222235 15 use_bankers=True . should_equal -1.222222222222224
|
||||
|
||||
group_builder.specify "Can handle NaN/Infinity" <|
|
||||
nan_result = if setup.test_selection.is_nan_and_nothing_distinct then Number.nan else Nothing
|
||||
## SQLite does not support NaN
|
||||
nan_result = Nothing
|
||||
ops = [.round, .truncate, .ceil, .floor]
|
||||
ops.each op->
|
||||
do_op data Number.nan op . should_equal nan_result
|
||||
@ -332,7 +333,7 @@ sqlite_spec suite_builder prefix create_connection_func persistent_connector =
|
||||
|
||||
materialize = .read
|
||||
|
||||
common_selection = Common_Table_Operations.Main.Test_Selection.Config natural_ordering=False case_insensitive_ordering=True case_insensitive_ascii_only=True is_nan_and_nothing_distinct=False date_time=False supported_replace_params=supported_replace_params different_size_integer_types=False length_restricted_text_columns=False char_max_size_after_substring=..Reset run_advanced_edge_case_tests_by_default=True
|
||||
common_selection = Common_Table_Operations.Main.Test_Selection.Config natural_ordering=False case_insensitive_ordering=True case_insensitive_ascii_only=True date_time=False supported_replace_params=supported_replace_params different_size_integer_types=False length_restricted_text_columns=False char_max_size_after_substring=..Reset run_advanced_edge_case_tests_by_default=True
|
||||
|
||||
## For now `advanced_stats`, `text_shortest_longest` and
|
||||
`multi_distinct` remain disabled, because SQLite does not provide the
|
||||
|
@ -40,8 +40,8 @@ in_memory_setup =
|
||||
is_operation_supported_fn _ =
|
||||
True
|
||||
flagged_fn flag:Dialect_Flag =
|
||||
case flag of
|
||||
Dialect_Flag.Supports_Case_Sensitive_Columns -> True
|
||||
_ = flag
|
||||
True
|
||||
|
||||
Common_Table_Operations.Main.Test_Setup.Config "[In-Memory] " agg_table_fn empty_table_fn table_builder materialize is_database=False test_selection=selection aggregate_test_selection=aggregate_selection create_connection_func=create_connection_func light_table_builder=light_table_builder is_feature_supported=is_feature_supported_fn flagged=flagged_fn is_operation_supported=is_operation_supported_fn
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user