mirror of
https://github.com/enso-org/enso.git
synced 2024-12-22 17:41:53 +03:00
Initial separation of Snowflake_Dialect from Postgres_Dialect (#10266)
- Part of #9486 - Building on top of initial work by @jdunkerley and finishing it - Reverted the changes to the Postgres_Dialect from last Snowflake work and split the Snowflake_Dialect into a separate module. - Moved from `rounding_decimal_places_not_allowed_for_floats` to `supports_float_round_decimal_places` (as too confusing). - Added Snowflake_Dialect type. - Extracted `Snowflake_Spec` into separate `Snowflake_Tests` - It imports the common tests from `Table_Tests`. - Some initial adaptations to make the snowflake dialect not-crash. - Adding `Internals_Access` proxy to allow external implementations to access our internal data structures without directly exposing them to users. Users should not use these. - Adding profiling of SQL to check performance.
This commit is contained in:
parent
1096642fae
commit
3a4784c226
@ -87,8 +87,7 @@ type Redshift_Dialect
|
||||
A heuristic used by `Connection.query` to determine if a given text looks
|
||||
like a SQL query for the given dialect or is rather a table name.
|
||||
is_probably_a_query : Text -> Boolean
|
||||
is_probably_a_query self text =
|
||||
(text.contains "SELECT ") || (text.contains "EXEC ")
|
||||
is_probably_a_query self text = Base_Generator.is_probably_a_query text
|
||||
|
||||
## PRIVATE
|
||||
Returns the mapping between SQL types of this dialect and Enso
|
||||
@ -98,7 +97,7 @@ type Redshift_Dialect
|
||||
|
||||
## PRIVATE
|
||||
get_statement_setter : Statement_Setter
|
||||
get_statement_setter self = Postgres_Dialect.postgres_statement_setter
|
||||
get_statement_setter self = Statement_Setter.default
|
||||
|
||||
## PRIVATE
|
||||
make_cast : Internal_Column -> SQL_Type -> (SQL_Expression -> SQL_Type_Reference) -> Internal_Column
|
||||
@ -122,8 +121,8 @@ type Redshift_Dialect
|
||||
supports_negative_round_decimal_places self = True
|
||||
|
||||
## PRIVATE
|
||||
rounding_decimal_places_not_allowed_for_floats : Boolean
|
||||
rounding_decimal_places_not_allowed_for_floats self = True
|
||||
supports_float_round_decimal_places : Boolean
|
||||
supports_float_round_decimal_places self = False
|
||||
|
||||
## PRIVATE
|
||||
adapt_unified_column : Internal_Column -> Value_Type -> (SQL_Expression -> SQL_Type_Reference) -> Internal_Column
|
||||
|
@ -151,8 +151,9 @@ type Filter_Condition
|
||||
resolve_auto_scoped filter =
|
||||
resolve filter:Filter_Condition = filter
|
||||
case filter of
|
||||
_ : Function -> Panic.catch Any (resolve filter) _->filter
|
||||
_ -> filter
|
||||
_ : Function -> Panic.catch Any (resolve filter) _->filter
|
||||
_ : Filter_Condition -> filter
|
||||
_ -> Panic.throw (Illegal_Argument.Error "The filter condition can either be a Function or a Filter_Condition, but got: "+filter.to_display_text)
|
||||
|
||||
## ICON convert
|
||||
Converts a `Filter_Condition` condition into a predicate taking an
|
||||
|
@ -320,7 +320,7 @@ type DB_Column
|
||||
Value_Type.expect_text other <|
|
||||
Helpers.assume_default_locale locale <|
|
||||
new_name = self.naming_helper.function_name "equals_ignore_case" [self, other]
|
||||
self.make_binary_op "equals_ignore_case" other new_name
|
||||
self.make_binary_op "EQUALS_IGNORE_CASE" other new_name
|
||||
|
||||
## ALIAS not equals
|
||||
GROUP Standard.Base.Operators
|
||||
@ -604,7 +604,7 @@ type DB_Column
|
||||
Value_Type_Helpers.check_binary_numeric_op self other <|
|
||||
other_type = Value_Type_Helpers.find_argument_type other
|
||||
# Different implementation may be used for integer types.
|
||||
op = if self.value_type.is_integer && (other_type.is_nothing || other_type.is_integer) then "%" else "mod"
|
||||
op = if self.value_type.is_integer && (other_type.is_nothing || other_type.is_integer) then "%" else "MOD"
|
||||
new_name = self.naming_helper.binary_operation_name "%" self other
|
||||
self.make_binary_op op other new_name
|
||||
|
||||
@ -798,7 +798,7 @@ type DB_Column
|
||||
# Don't use for negative decimal places, if the backend doesn't support it
|
||||
negative_dp_ok = decimal_places >= 0 || self.connection.dialect.supports_negative_round_decimal_places
|
||||
# Don't use for floating-point inputs if decimal_places != 0, for Postgres
|
||||
dp_param_ok = (self.connection.dialect.rounding_decimal_places_not_allowed_for_floats && self.value_type.is_floating_point && decimal_places != 0).not
|
||||
dp_param_ok = decimal_places == 0 || self.value_type.is_floating_point.not || self.connection.dialect.supports_float_round_decimal_places
|
||||
not_bankers && negative_dp_ok && dp_param_ok
|
||||
|
||||
## PRIVATE
|
||||
@ -1237,7 +1237,7 @@ type DB_Column
|
||||
starts_with : DB_Column | Text -> Case_Sensitivity -> DB_Column
|
||||
starts_with self other case_sensitivity=Case_Sensitivity.Default =
|
||||
new_name = self.naming_helper.function_name "starts_with" [self, other]
|
||||
make_text_case_op self "starts_with" other case_sensitivity new_name
|
||||
make_text_case_op self "STARTS_WITH" other case_sensitivity new_name
|
||||
|
||||
## GROUP Standard.Base.Text
|
||||
ICON preparation
|
||||
@ -1255,7 +1255,7 @@ type DB_Column
|
||||
ends_with : DB_Column | Text -> Case_Sensitivity -> DB_Column
|
||||
ends_with self other case_sensitivity=Case_Sensitivity.Default =
|
||||
new_name = self.naming_helper.function_name "ends_with" [self, other]
|
||||
make_text_case_op self "ends_with" other case_sensitivity new_name
|
||||
make_text_case_op self "ENDS_WITH" other case_sensitivity new_name
|
||||
|
||||
## GROUP Standard.Base.Text
|
||||
ICON preparation
|
||||
@ -1335,7 +1335,7 @@ type DB_Column
|
||||
contains : DB_Column | Text -> Case_Sensitivity -> DB_Column
|
||||
contains self other case_sensitivity=Case_Sensitivity.Default =
|
||||
new_name = self.naming_helper.function_name "contains" [self, other]
|
||||
make_text_case_op self "contains" other case_sensitivity new_name
|
||||
make_text_case_op self "CONTAINS" other case_sensitivity new_name
|
||||
|
||||
## GROUP Standard.Base.Logical
|
||||
ICON preparation
|
||||
|
@ -140,8 +140,8 @@ type Dialect
|
||||
|
||||
## PRIVATE
|
||||
Specifies whether round() can take a decimal_places argument for floating point values.
|
||||
rounding_decimal_places_not_allowed_for_floats : Boolean
|
||||
rounding_decimal_places_not_allowed_for_floats self =
|
||||
supports_float_round_decimal_places : Boolean
|
||||
supports_float_round_decimal_places self =
|
||||
Unimplemented.throw "This is an interface only."
|
||||
|
||||
## PRIVATE
|
||||
|
@ -116,6 +116,20 @@ lift_binary_op name function =
|
||||
True -> function (arguments.at 0) (arguments.at 1)
|
||||
[name, generator]
|
||||
|
||||
## PRIVATE
|
||||
|
||||
A helper function to create a binary operation from a SQL function name.
|
||||
|
||||
Arguments:
|
||||
- name: Name of the operation, used for error reporting.
|
||||
- sql_function: SQL function taking two arguments.
|
||||
lift_binary_sql_function : Text -> Text -> Pair Text (Vector SQL_Builder -> SQL_Builder)
|
||||
lift_binary_sql_function name:Text sql_function:Text =
|
||||
function first second =
|
||||
res = SQL_Builder.code (sql_function + "(") ++ first ++ "," ++ second ++ ")"
|
||||
res.paren
|
||||
lift_binary_op name function
|
||||
|
||||
## PRIVATE
|
||||
|
||||
A helper function to create a unary operator which is added to the right of
|
||||
@ -181,7 +195,7 @@ base_dialect =
|
||||
unary = name -> [name, make_unary_op name]
|
||||
fun = name -> [name, make_function name]
|
||||
|
||||
arith = [["ADD_NUMBER", make_binary_op "+"], ["ADD_TEXT", make_binary_op "||"], bin "-", bin "*", bin "/", bin "%", ["mod", make_function "MOD"], ["^", make_function "POWER"], ["ROUND", make_function "ROUND"], ["TRUNCATE", make_function "TRUNC"], ["CEIL", make_function "CEIL"], ["FLOOR", make_function "FLOOR"]]
|
||||
arith = [["ADD_NUMBER", make_binary_op "+"], ["ADD_TEXT", make_binary_op "||"], bin "-", bin "*", bin "/", bin "%", ["MOD", make_function "MOD"], ["^", make_function "POWER"], ["ROUND", make_function "ROUND"], ["TRUNCATE", make_function "TRUNC"], ["CEIL", make_function "CEIL"], ["FLOOR", make_function "FLOOR"]]
|
||||
logic = [bin "AND", bin "OR", unary "NOT", ["IIF", make_iif], ["CASE", case_when]]
|
||||
eq = lift_binary_op "==" make_equals
|
||||
neq = lift_binary_op "!=" make_not_equals
|
||||
@ -375,7 +389,7 @@ make_case_sensitive = lift_unary_op "MAKE_CASE_SENSITIVE" _->
|
||||
Error.throw <| Unsupported_Database_Operation.Error ("Case sensitive operations are not currently supported by this connection.")
|
||||
|
||||
## PRIVATE
|
||||
simple_equals_ignore_case = Base_Generator.lift_binary_op "equals_ignore_case" a-> b->
|
||||
simple_equals_ignore_case = Base_Generator.lift_binary_op "EQUALS_IGNORE_CASE" a-> b->
|
||||
SQL_Builder.code "LOWER(UPPER(" ++ a ++ ")) = LOWER(UPPER(" ++ b ++ "))"
|
||||
|
||||
## PRIVATE
|
||||
@ -464,7 +478,8 @@ generate_query dialect query = case query of
|
||||
prefix = case ctx.distinct_on of
|
||||
Nothing -> SQL_Builder.code ""
|
||||
expressions : Vector ->
|
||||
# TODO I just realised this does not make sense in other backends than Postgres, so we should probably fail in such cases; probably rewrite into a generic modifier? or a transform?
|
||||
## TODO I just realised this does not make sense in other backends than Postgres,
|
||||
so we should probably fail in such cases; probably rewrite into a generic modifier? or a transform?
|
||||
generated = SQL_Builder.join ", " (expressions.map (generate_expression dialect))
|
||||
SQL_Builder.code "DISTINCT ON (" ++ generated ++ ") "
|
||||
SQL_Builder.code "SELECT " ++ prefix ++ cols ++ generate_select_context dialect ctx
|
||||
@ -583,3 +598,9 @@ generate_column_description dialect descriptor =
|
||||
suffix = if modifiers.is_empty then SQL_Builder.empty else
|
||||
SQL_Builder.code " " ++ (SQL_Builder.join " " modifiers)
|
||||
(dialect.wrap_identifier descriptor.name) ++ " " ++ descriptor.sql_type ++ suffix
|
||||
|
||||
## PRIVATE
|
||||
A heuristic used by `Connection.query` to determine if a given text looks
|
||||
like a SQL query for the given dialect or is rather a table name.
|
||||
is_probably_a_query : Text -> Boolean
|
||||
is_probably_a_query text = (text.contains "SELECT ") || (text.contains "EXEC ")
|
||||
|
@ -34,7 +34,7 @@ make_join_helpers left_table right_table left_column_mapping right_column_mappin
|
||||
SQL_Expression.Operation "==" [resolve_left left, resolve_right right]
|
||||
make_equals_ignore_case _ left right locale =
|
||||
Helpers.assume_default_locale locale <|
|
||||
SQL_Expression.Operation "equals_ignore_case" [resolve_left left, resolve_right right]
|
||||
SQL_Expression.Operation "EQUALS_IGNORE_CASE" [resolve_left left, resolve_right right]
|
||||
make_between _ left right_lower right_upper =
|
||||
args = [resolve_left left, resolve_right right_lower, resolve_right right_upper]
|
||||
SQL_Expression.Operation "BETWEEN" args
|
||||
|
@ -0,0 +1,29 @@
|
||||
## We do not want to expose column internals to users, but we need to allow
|
||||
external libraries implementing additional database connectors to be able to
|
||||
access them.
|
||||
|
||||
To alleviate that, we provide the `Internals_Access` module that exposes
|
||||
these internals.
|
||||
|
||||
This module should never be imported by user code. It should only be used by
|
||||
providers of new database dialects.
|
||||
|
||||
from Standard.Base import all
|
||||
|
||||
import project.DB_Column.DB_Column
|
||||
import project.DB_Table.DB_Table
|
||||
import project.Internal.IR.Context.Context
|
||||
import project.Internal.IR.Internal_Column.Internal_Column
|
||||
import project.Internal.IR.SQL_Expression.SQL_Expression
|
||||
|
||||
## PRIVATE
|
||||
column_expression (column : DB_Column | Internal_Column) -> SQL_Expression = column.expression
|
||||
|
||||
## PRIVATE
|
||||
get_connection (thing : DB_Column | DB_Table) = thing.connection
|
||||
|
||||
## PRIVATE
|
||||
get_context (thing : DB_Column | DB_Table) -> Context = thing.context
|
||||
|
||||
## PRIVATE
|
||||
internal_columns (table : DB_Table) -> Vector Internal_Column = table.internal_columns
|
@ -102,7 +102,7 @@ type JDBC_Connection
|
||||
(e.g. `ensure_query_has_no_holes` which prepares a statement but does
|
||||
not execute it).
|
||||
with_prepared_statement : Text | SQL_Statement -> Statement_Setter -> (PreparedStatement -> Any) -> Boolean -> Any
|
||||
with_prepared_statement self query statement_setter action skip_log=False = self.synchronized <|
|
||||
with_prepared_statement self query statement_setter action skip_log=False = self.synchronized <| profile_sql_if_enabled self query.to_text <|
|
||||
prepare template values = self.with_connection java_connection->
|
||||
stmt = java_connection.prepareStatement template
|
||||
handle_illegal_state caught_panic =
|
||||
@ -159,7 +159,6 @@ type JDBC_Connection
|
||||
raw_read_statement : Text -> Table
|
||||
raw_read_statement self raw_sql =
|
||||
self.with_prepared_statement raw_sql Statement_Setter.null stmt->
|
||||
log_sql_if_enabled self stmt.toString
|
||||
rs = stmt.executeQuery
|
||||
make_fallback_fetcher _ =
|
||||
Column_Fetcher_Module.fallback_fetcher
|
||||
@ -226,7 +225,7 @@ type JDBC_Connection
|
||||
It is the caller's responsibility to call this method from within a
|
||||
transaction to ensure consistency.
|
||||
batch_insert : Text -> Statement_Setter -> Table -> Integer -> Vector Value_Type | Nothing -> Integer | Nothing -> Nothing
|
||||
batch_insert self insert_template statement_setter table batch_size expected_type_hints=Nothing row_limit=Nothing =
|
||||
batch_insert self insert_template statement_setter table batch_size expected_type_hints=Nothing row_limit=Nothing = profile_sql_if_enabled self insert_template <|
|
||||
In_Transaction.ensure_in_transaction <| self.with_connection java_connection-> handle_sql_errors related_query=insert_template <|
|
||||
Managed_Resource.bracket (java_connection.prepareStatement insert_template) .close stmt->
|
||||
log_sql_if_enabled self insert_template
|
||||
@ -321,6 +320,31 @@ log_sql_if_enabled jdbc_connection ~query_text =
|
||||
Context.Output.with_enabled <|
|
||||
log_line.write path on_existing_file=Existing_File_Behavior.Append
|
||||
|
||||
## PRIVATE
|
||||
A helper that logs performed SQL queries/statements run-time to a profiling
|
||||
file, if an environment variable is set.
|
||||
profile_sql_if_enabled (jdbc_connection : JDBC_Connection) (~query_text : Text) ~action =
|
||||
case Environment.get "ENSO_SQL_PROFILING_PATH" of
|
||||
Nothing -> action
|
||||
path -> if path.is_empty then action else
|
||||
db_id = jdbc_connection.with_metadata .getDatabaseProductName
|
||||
duration_and_result = Duration.time_execution action
|
||||
duration = duration_and_result.first
|
||||
result = duration_and_result.second
|
||||
## If the action returns a dataflow error, `time_execution` returns the error, no duration.
|
||||
We could work around this. But we don't want to - the failing queries likely did not run anyway,
|
||||
so their time info would most likely be an outlier. For current purposes it makes sense to ignore such cases.
|
||||
result.if_not_error <|
|
||||
padded_query = query_text.to_display_text.pad length=80
|
||||
millis = duration.total_milliseconds
|
||||
millis_text = (millis.to_text.pad length=5 with_pad=' ' at=Location.Start) + ' ms'
|
||||
|
||||
# E.g. [SQLite] SELECT * FROM "test" ... --> 12 ms
|
||||
log_line = "[" + db_id + "] " + padded_query + " --> " + millis_text + '\n'
|
||||
Context.Output.with_enabled <|
|
||||
log_line.write path on_existing_file=Existing_File_Behavior.Append
|
||||
result
|
||||
|
||||
## PRIVATE
|
||||
get_pragma_value : JDBC_Connection -> Text -> Any
|
||||
get_pragma_value jdbc_connection sql =
|
||||
|
@ -47,22 +47,20 @@ postgres =
|
||||
Postgres_Dialect.Value make_internal_generator_dialect
|
||||
|
||||
## PRIVATE
|
||||
|
||||
The dialect of PostgreSQL databases.
|
||||
type Postgres_Dialect
|
||||
## PRIVATE
|
||||
|
||||
The dialect of PostgreSQL databases.
|
||||
Value internal_generator_dialect dialect_name:Text="PostgreSQL" dialect_to_text:Text="Postgres_Dialect" type_mapping=Postgres_Type_Mapping
|
||||
Value internal_generator_dialect
|
||||
|
||||
## PRIVATE
|
||||
Name of the dialect.
|
||||
name : Text
|
||||
name self = self.dialect_name
|
||||
name self = postgres_dialect_name
|
||||
|
||||
## PRIVATE
|
||||
to_text : Text
|
||||
to_text self = self.dialect_to_text
|
||||
to_text self = "Postgres_Dialect"
|
||||
|
||||
## PRIVATE
|
||||
A function which generates SQL code from the internal representation
|
||||
@ -125,18 +123,17 @@ type Postgres_Dialect
|
||||
A heuristic used by `Connection.query` to determine if a given text looks
|
||||
like a SQL query for the given dialect or is rather a table name.
|
||||
is_probably_a_query : Text -> Boolean
|
||||
is_probably_a_query self text =
|
||||
(text.contains "SELECT ") || (text.contains "EXEC ")
|
||||
is_probably_a_query self text = Base_Generator.is_probably_a_query text
|
||||
|
||||
## PRIVATE
|
||||
Returns the mapping between SQL types of this dialect and Enso
|
||||
`Value_Type`.
|
||||
get_type_mapping : SQL_Type_Mapping
|
||||
get_type_mapping self = self.type_mapping
|
||||
get_type_mapping self = Postgres_Type_Mapping
|
||||
|
||||
## PRIVATE
|
||||
get_statement_setter : Statement_Setter
|
||||
get_statement_setter self = postgres_statement_setter
|
||||
get_statement_setter self = Statement_Setter.default
|
||||
|
||||
## PRIVATE
|
||||
make_cast : Internal_Column -> SQL_Type -> (SQL_Expression -> SQL_Type_Reference) -> Internal_Column
|
||||
@ -167,8 +164,8 @@ type Postgres_Dialect
|
||||
supports_negative_round_decimal_places self = True
|
||||
|
||||
## PRIVATE
|
||||
rounding_decimal_places_not_allowed_for_floats : Boolean
|
||||
rounding_decimal_places_not_allowed_for_floats self = True
|
||||
supports_float_round_decimal_places : Boolean
|
||||
supports_float_round_decimal_places self = False
|
||||
|
||||
## PRIVATE
|
||||
There is a bug in Postgres type inference, where if we unify two
|
||||
@ -238,14 +235,14 @@ type Postgres_Dialect
|
||||
if the given period is supported.
|
||||
prepare_metadata_for_period : Date_Period | Time_Period -> Value_Type -> Any
|
||||
prepare_metadata_for_period self period operation_input_type =
|
||||
if period == Time_Period.Nanosecond && self.name == "PostgreSQL" then Error.throw (Unsupported_Database_Operation.Error "Postgres backend does not support nanosecond precision in date/time operations.") else
|
||||
if period == Time_Period.Nanosecond then Error.throw (Unsupported_Database_Operation.Error "Postgres does not support nanosecond precision date/times.") else
|
||||
Date_Period_Metadata.Value period operation_input_type
|
||||
|
||||
## PRIVATE
|
||||
Returns true if the `replace` parameters are supported by this backend.
|
||||
if_replace_params_supports : Replace_Params -> Any -> Any
|
||||
if_replace_params_supports self replace_params ~action =
|
||||
if supported_replace_params.contains replace_params then action else replace_params.throw_unsupported self.name
|
||||
if supported_replace_params.contains replace_params then action else replace_params.throw_unsupported postgres_dialect_name
|
||||
|
||||
## PRIVATE
|
||||
value_type_for_upload_of_existing_column : DB_Column -> Value_Type
|
||||
@ -451,12 +448,10 @@ agg_count_distinct_include_null args =
|
||||
SQL_Builder.code "COUNT(DISTINCT (" ++ SQL_Builder.join ", " args ++ ", 0))"
|
||||
|
||||
## PRIVATE
|
||||
starts_with = Base_Generator.lift_binary_op "starts_with" str-> sub->
|
||||
res = SQL_Builder.code "starts_with(" ++ str ++ "," ++ sub ++ ")"
|
||||
res.paren
|
||||
starts_with = Base_Generator.lift_binary_sql_function "STARTS_WITH" "starts_with"
|
||||
|
||||
## PRIVATE
|
||||
ends_with = Base_Generator.lift_binary_op "ends_with" str-> sub->
|
||||
ends_with = Base_Generator.lift_binary_op "ENDS_WITH" str-> sub->
|
||||
res = str ++ " LIKE CONCAT('%', " ++ sub ++ ")"
|
||||
res.paren
|
||||
|
||||
@ -469,7 +464,7 @@ make_contains_expr expr substring =
|
||||
SQL_Builder.code "position(" ++ substring ++ " in " ++ expr ++ ") > 0"
|
||||
|
||||
## PRIVATE
|
||||
contains = Base_Generator.lift_binary_op "contains" make_contains_expr
|
||||
contains = Base_Generator.lift_binary_op "CONTAINS" make_contains_expr
|
||||
|
||||
## PRIVATE
|
||||
left = Base_Generator.lift_binary_op "LEFT" str-> n->
|
||||
@ -520,7 +515,7 @@ floating_point_div = Base_Generator.lift_binary_op "/" x-> y->
|
||||
SQL_Builder.code "CAST(" ++ x ++ " AS double precision) / CAST(" ++ y ++ " AS double precision)"
|
||||
|
||||
## PRIVATE
|
||||
mod_op = Base_Generator.lift_binary_op "mod" x-> y->
|
||||
mod_op = Base_Generator.lift_binary_op "MOD" x-> y->
|
||||
x ++ " - FLOOR(CAST(" ++ x ++ " AS double precision) / CAST(" ++ y ++ " AS double precision)) * " ++ y
|
||||
|
||||
## PRIVATE
|
||||
@ -589,9 +584,7 @@ replace args metadata =
|
||||
_ ->
|
||||
SQL_Builder.code "REPLACE(" ++ input ++ ", " ++ pattern ++ ", " ++ replacement ++ ")"
|
||||
True -> Nothing
|
||||
case expression of
|
||||
Nothing -> replace_params.throw_unsupported "PostgreSQL"
|
||||
_ -> expression
|
||||
expression.if_nothing (replace_params.throw_unsupported postgres_dialect_name)
|
||||
|
||||
## PRIVATE
|
||||
make_extract_as_int enso_name sql_name=enso_name =
|
||||
@ -725,9 +718,6 @@ as_int64 expr =
|
||||
as_int32 expr =
|
||||
SQL_Builder.code "(" ++ expr ++ "::int4)"
|
||||
|
||||
## PRIVATE
|
||||
postgres_statement_setter = Statement_Setter.default
|
||||
|
||||
## PRIVATE
|
||||
The RUNTIME_ERROR operation should allow the query to compile fine and it
|
||||
will not prevent it from running if the branch including this operation is
|
||||
@ -750,3 +740,6 @@ make_runtime_error_op arguments =
|
||||
variable_to_defer = arguments.at 1
|
||||
|
||||
SQL_Builder.code "CAST('[ENSO INVARIANT VIOLATED: '||" ++ error_message ++ "||'] '||COALESCE(" ++ variable_to_defer ++ "::TEXT,'NULL') AS BOOLEAN)"
|
||||
|
||||
## PRIVATE
|
||||
postgres_dialect_name = "PostgreSQL"
|
||||
|
@ -123,8 +123,7 @@ type SQLite_Dialect
|
||||
A heuristic used by `Connection.query` to determine if a given text looks
|
||||
like a SQL query for the given dialect or is rather a table name.
|
||||
is_probably_a_query : Text -> Boolean
|
||||
is_probably_a_query self text =
|
||||
(text.contains "SELECT ") || (text.contains "EXEC ")
|
||||
is_probably_a_query self text = Base_Generator.is_probably_a_query text
|
||||
|
||||
## PRIVATE
|
||||
Returns the mapping between SQL types of this dialect and Enso
|
||||
@ -167,8 +166,8 @@ type SQLite_Dialect
|
||||
supports_negative_round_decimal_places self = False
|
||||
|
||||
## PRIVATE
|
||||
rounding_decimal_places_not_allowed_for_floats : Boolean
|
||||
rounding_decimal_places_not_allowed_for_floats self = False
|
||||
supports_float_round_decimal_places : Boolean
|
||||
supports_float_round_decimal_places self = True
|
||||
|
||||
## PRIVATE
|
||||
SQLite allows mixed type columns, but we want our columns to be uniform.
|
||||
@ -378,12 +377,12 @@ agg_count_distinct_include_null args = case args.length == 1 of
|
||||
False -> Error.throw (Illegal_Argument.Error "COUNT_DISTINCT supports only single arguments in SQLite.")
|
||||
|
||||
## PRIVATE
|
||||
starts_with = Base_Generator.lift_binary_op "starts_with" str-> sub->
|
||||
starts_with = Base_Generator.lift_binary_op "STARTS_WITH" str-> sub->
|
||||
res = str ++ " GLOB (" ++ sub ++ " || '*')"
|
||||
res.paren
|
||||
|
||||
## PRIVATE
|
||||
ends_with = Base_Generator.lift_binary_op "ends_with" str-> sub->
|
||||
ends_with = Base_Generator.lift_binary_op "ENDS_WITH" str-> sub->
|
||||
res = str ++ " GLOB ('*' || " ++ sub ++ ")"
|
||||
res.paren
|
||||
|
||||
@ -396,7 +395,7 @@ make_contains_expr expr substring =
|
||||
SQL_Builder.code "instr(" ++ expr ++ ", " ++ substring ++ ") > 0"
|
||||
|
||||
## PRIVATE
|
||||
contains = Base_Generator.lift_binary_op "contains" make_contains_expr
|
||||
contains = Base_Generator.lift_binary_op "CONTAINS" make_contains_expr
|
||||
|
||||
## PRIVATE
|
||||
left = Base_Generator.lift_binary_op "LEFT" str-> n->
|
||||
@ -415,7 +414,7 @@ floating_point_div = Base_Generator.lift_binary_op "/" x-> y->
|
||||
SQL_Builder.code "CAST(" ++ x ++ " AS REAL) / CAST(" ++ y ++ " AS REAL)"
|
||||
|
||||
## PRIVATE
|
||||
mod_op = Base_Generator.lift_binary_op "mod" x-> y->
|
||||
mod_op = Base_Generator.lift_binary_op "MOD" x-> y->
|
||||
x ++ " - FLOOR(CAST(" ++ x ++ " AS REAL) / CAST(" ++ y ++ " AS REAL)) * " ++ y
|
||||
|
||||
## PRIVATE
|
||||
|
@ -190,8 +190,8 @@ operations_map =
|
||||
handle_runtime_error _ =
|
||||
Panic.throw (Illegal_State.Error "RUNTIME_ERROR should not be part of direct type inference, so this code should never be reached. This is a bug in the Database library.")
|
||||
|
||||
always_boolean_ops = ["==", "!=", "equals_ignore_case", ">=", "<=", "<", ">", "BETWEEN", "AND", "OR", "NOT", "IS_NULL", "IS_EMPTY", "LIKE", "IS_IN", "IS_IN_COLUMN", "starts_with", "ends_with", "contains", "BOOL_OR", "IS_INF"]
|
||||
always_floating_ops = ["/", "mod", "AVG", "STDDEV_POP", "STDDEV_SAMP", "ROUND"]
|
||||
always_boolean_ops = ["==", "!=", "EQUALS_IGNORE_CASE", ">=", "<=", "<", ">", "BETWEEN", "AND", "OR", "NOT", "IS_NULL", "IS_EMPTY", "LIKE", "IS_IN", "IS_IN_COLUMN", "STARTS_WITH", "ENDS_WITH", "CONTAINS", "BOOL_OR", "IS_INF"]
|
||||
always_floating_ops = ["/", "MOD", "AVG", "STDDEV_POP", "STDDEV_SAMP", "ROUND"]
|
||||
always_text_ops = ["ADD_TEXT", "CONCAT", "CONCAT_QUOTE_IF_NEEDED", "MAKE_CASE_SENSITIVE", "FOLD_CASE", "TRIM", "LTRIM", "RTRIM", "REPLACE", "LEFT", "RIGHT"]
|
||||
always_integer_ops = ["COUNT", "COUNT_IS_NULL", "COUNT_DISTINCT", "COUNT_DISTINCT_INCLUDE_NULL", "COUNT_EMPTY", "COUNT_NOT_EMPTY", "COUNT_ROWS", "ROW_NUMBER", "ROW_NUMBER_IN_GROUP", "LENGTH"]
|
||||
same_as_first = ["TRUNCATE", "CEIL", "FLOOR"]
|
||||
|
@ -77,3 +77,8 @@ type SQL_Statement
|
||||
JS_Object.from_pairs [["sql_interpolation", inner]]
|
||||
fragments = self.internal_fragments.map jsonify
|
||||
JS_Object.from_pairs [["query", fragments]]
|
||||
|
||||
## PRIVATE
|
||||
to_text self -> Text =
|
||||
prepared = self.prepare
|
||||
"SQL_Statement " + prepared.first + " with values " + prepared.second.to_text
|
||||
|
@ -95,7 +95,7 @@ type Snowflake_Connection
|
||||
@database (self-> Single_Choice display=Display.Always values=(self.databases . map d-> Option d d.pretty))
|
||||
set_database : Text -> Connection ! SQL_Error
|
||||
set_database self database =
|
||||
if database == self.database then self else
|
||||
if database . equals_ignore_case self.database then self else
|
||||
self.make_new database Nothing Nothing
|
||||
|
||||
## ICON metadata
|
||||
@ -117,7 +117,7 @@ type Snowflake_Connection
|
||||
@schema make_schema_selector
|
||||
set_schema : Text -> Connection ! SQL_Error
|
||||
set_schema self schema =
|
||||
if schema == self.schema then self else
|
||||
if schema . equals_ignore_case self.schema then self else
|
||||
self.make_new Nothing schema Nothing
|
||||
|
||||
## GROUP Standard.Base.Metadata
|
||||
|
@ -1,14 +1,663 @@
|
||||
private
|
||||
|
||||
from Standard.Base import all
|
||||
from Standard.Base import all hiding First, Last
|
||||
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
|
||||
import Standard.Base.Errors.Illegal_State.Illegal_State
|
||||
import Standard.Base.Errors.Unimplemented.Unimplemented
|
||||
|
||||
from Standard.Database.Internal.Postgres.Postgres_Dialect import make_internal_generator_dialect, Postgres_Dialect
|
||||
import Standard.Table.Internal.Problem_Builder.Problem_Builder
|
||||
import Standard.Table.Internal.Vector_Builder.Vector_Builder
|
||||
from Standard.Table import Aggregate_Column, Column, Value_Type
|
||||
from Standard.Table.Aggregate_Column.Aggregate_Column import all
|
||||
from Standard.Table.Errors import Inexact_Type_Coercion
|
||||
from Standard.Table.Internal.Storage import get_storage_for_column
|
||||
|
||||
import Standard.Database.Connection.Connection.Connection
|
||||
import Standard.Database.DB_Column.DB_Column
|
||||
import Standard.Database.DB_Table.DB_Table
|
||||
import Standard.Database.Dialect
|
||||
import Standard.Database.Internal.Base_Generator
|
||||
import Standard.Database.Internal.Common.Database_Distinct_Helper
|
||||
import Standard.Database.Internal.Common.Database_Join_Helper
|
||||
import Standard.Database.Internal.Error_Mapper.Error_Mapper
|
||||
import Standard.Database.Internal.Internals_Access
|
||||
import Standard.Database.Internal.IR.Context.Context
|
||||
import Standard.Database.Internal.IR.From_Spec.From_Spec
|
||||
import Standard.Database.Internal.IR.Internal_Column.Internal_Column
|
||||
import Standard.Database.Internal.IR.Nulls_Order.Nulls_Order
|
||||
import Standard.Database.Internal.IR.Order_Descriptor.Order_Descriptor
|
||||
import Standard.Database.Internal.IR.Query.Query
|
||||
import Standard.Database.Internal.IR.SQL_Expression.SQL_Expression
|
||||
import Standard.Database.Internal.IR.SQL_Join_Kind.SQL_Join_Kind
|
||||
import Standard.Database.Internal.Replace_Params.Replace_Params
|
||||
import Standard.Database.Internal.SQL_Type_Mapping.SQL_Type_Mapping
|
||||
import Standard.Database.Internal.SQL_Type_Reference.SQL_Type_Reference
|
||||
import Standard.Database.Internal.Statement_Setter.Statement_Setter
|
||||
import Standard.Database.SQL.SQL_Builder
|
||||
import Standard.Database.SQL.SQL_Fragment
|
||||
import Standard.Database.SQL_Statement.SQL_Statement
|
||||
import Standard.Database.SQL_Type.SQL_Type
|
||||
from Standard.Database.Errors import SQL_Error, Unsupported_Database_Operation
|
||||
from Standard.Database.Internal.IR.Operation_Metadata import Date_Period_Metadata
|
||||
from Standard.Database.Internal.Statement_Setter import fill_hole_default
|
||||
|
||||
# TODO replace with custom one
|
||||
import Standard.Database.Internal.Postgres.Postgres_Error_Mapper.Postgres_Error_Mapper
|
||||
|
||||
import project.Internal.Snowflake_Type_Mapping.Snowflake_Type_Mapping
|
||||
|
||||
## PRIVATE
|
||||
The dialect of Snowflake databases.
|
||||
Based off Postgres for now.
|
||||
snowflake : Postgres_Dialect
|
||||
snowflake : Snowflake_Dialect
|
||||
snowflake =
|
||||
Postgres_Dialect.Value make_internal_generator_dialect "Snowflake" "Snowflake_Dialect" Snowflake_Type_Mapping
|
||||
Snowflake_Dialect.Value make_internal_generator_dialect
|
||||
|
||||
## PRIVATE
|
||||
The dialect of Snowflake databases.
|
||||
type Snowflake_Dialect
|
||||
## PRIVATE
|
||||
The dialect of Snowflake databases.
|
||||
Value internal_generator_dialect
|
||||
|
||||
## PRIVATE
|
||||
Name of the dialect.
|
||||
name : Text
|
||||
name self = snowflake_dialect_name
|
||||
|
||||
## PRIVATE
|
||||
to_text : Text
|
||||
to_text self = "Snowflake_Dialect"
|
||||
|
||||
## PRIVATE
|
||||
A function which generates SQL code from the internal representation
|
||||
according to the specific dialect.
|
||||
generate_sql : Query -> SQL_Statement
|
||||
generate_sql self query = case query of
|
||||
Query.Truncate_Table name ->
|
||||
# Special handling to use TRUNCATE TABLE instead of the default DELETE FROM.
|
||||
wrapped_name = self.internal_generator_dialect.wrap_identifier name
|
||||
(SQL_Builder.code "TRUNCATE TABLE " ++ wrapped_name) . build
|
||||
_ ->
|
||||
# Rely on the base generator for most queries.
|
||||
Base_Generator.generate_query self.internal_generator_dialect query . build
|
||||
|
||||
## PRIVATE
|
||||
Wraps and possibly escapes the identifier so that it can be used in a
|
||||
generated query regardless of what characters it contains.
|
||||
The quotes used will depend on the dialect.
|
||||
wrap_identifier : Text -> Text
|
||||
wrap_identifier self identifier =
|
||||
self.internal_generator_dialect.wrap_identifier_raw identifier
|
||||
|
||||
## PRIVATE
|
||||
Prepares an ordering descriptor.
|
||||
|
||||
One of the purposes of this method is to verify if the expected ordering
|
||||
settings are supported by the given database backend.
|
||||
|
||||
Arguments:
|
||||
- internal_column: the column to order by.
|
||||
- sort_direction: the direction of the ordering.
|
||||
- text_ordering: If provided, specifies that the column should be treated
|
||||
as text values according to the provided ordering. For non-text types,
|
||||
it should be set to `Nothing`.
|
||||
prepare_order_descriptor : Internal_Column -> Sort_Direction -> Nothing | Text_Ordering -> Order_Descriptor
|
||||
prepare_order_descriptor self internal_column sort_direction text_ordering =
|
||||
make_order_descriptor internal_column sort_direction text_ordering
|
||||
|
||||
## PRIVATE
|
||||
Prepares a distinct operation.
|
||||
prepare_distinct : DB_Table -> Vector -> Case_Sensitivity -> Problem_Builder -> DB_Table
|
||||
prepare_distinct self table key_columns case_sensitivity problem_builder =
|
||||
table_name_deduplicator = (Internals_Access.get_connection table).base_connection.table_naming_helper.create_unique_name_strategy
|
||||
table_name_deduplicator.mark_used table.name
|
||||
inner_table_alias = table_name_deduplicator.make_unique table.name+"_inner"
|
||||
setup = (Internals_Access.get_context table).as_subquery inner_table_alias [Internals_Access.internal_columns table]
|
||||
new_columns = setup.new_columns.first
|
||||
column_mapping = Map.from_vector <| new_columns.map c-> [c.name, c]
|
||||
new_key_columns = key_columns.map c-> column_mapping.at c.name
|
||||
type_mapping = self.get_type_mapping
|
||||
distinct_expressions = new_key_columns.map column->
|
||||
value_type = type_mapping.sql_type_to_value_type column.sql_type_reference.get
|
||||
Database_Distinct_Helper.make_distinct_expression case_sensitivity problem_builder column value_type
|
||||
new_context = Context.for_subquery setup.subquery . set_distinct_on distinct_expressions
|
||||
table.updated_context_and_columns new_context new_columns subquery=True
|
||||
|
||||
## PRIVATE
|
||||
A heuristic used by `Connection.query` to determine if a given text looks
|
||||
like a SQL query for the given dialect or is rather a table name.
|
||||
is_probably_a_query : Text -> Boolean
|
||||
is_probably_a_query self text = Base_Generator.is_probably_a_query text
|
||||
|
||||
## PRIVATE
|
||||
Returns the mapping between SQL types of this dialect and Enso
|
||||
`Value_Type`.
|
||||
get_type_mapping : SQL_Type_Mapping
|
||||
get_type_mapping self = Snowflake_Type_Mapping
|
||||
|
||||
## PRIVATE
|
||||
get_statement_setter : Statement_Setter
|
||||
get_statement_setter self =
|
||||
custom_fill_hole stmt i type_hint value = case value of
|
||||
# TODO some Special handling for Date/Time
|
||||
_ : Date -> stmt.setObject i value
|
||||
_ : Date_Time -> stmt.setObject i value
|
||||
_ : Time_Of_Day -> stmt.setObject i value
|
||||
# Fallback to default logic for everything else
|
||||
_ -> fill_hole_default stmt i type_hint value
|
||||
Statement_Setter.Value custom_fill_hole
|
||||
|
||||
## PRIVATE
|
||||
make_cast : Internal_Column -> SQL_Type -> (SQL_Expression -> SQL_Type_Reference) -> Internal_Column
|
||||
make_cast self column target_type infer_result_type_from_database_callback =
|
||||
mapping = self.get_type_mapping
|
||||
source_type = mapping.sql_type_to_value_type column.sql_type_reference.get
|
||||
target_value_type = mapping.sql_type_to_value_type target_type
|
||||
# Boolean to Numeric casts need special handling:
|
||||
transformed_expression = case source_type.is_boolean && target_value_type.is_numeric of
|
||||
True ->
|
||||
SQL_Expression.Operation "IIF" [Internals_Access.column_expression column, SQL_Expression.Literal "1", SQL_Expression.Literal "0"]
|
||||
False -> Internals_Access.column_expression column
|
||||
target_type_sql_text = mapping.sql_type_to_text target_type
|
||||
new_expression = SQL_Expression.Operation "CAST" [transformed_expression, SQL_Expression.Literal target_type_sql_text]
|
||||
new_sql_type_reference = infer_result_type_from_database_callback new_expression
|
||||
Internal_Column.Value column.name new_sql_type_reference new_expression
|
||||
|
||||
## PRIVATE
|
||||
needs_execute_query_for_type_inference : Boolean
|
||||
needs_execute_query_for_type_inference self = False
|
||||
|
||||
## PRIVATE
|
||||
supports_separate_nan : Boolean
|
||||
supports_separate_nan self = True
|
||||
|
||||
## PRIVATE
|
||||
supports_negative_round_decimal_places : Boolean
|
||||
supports_negative_round_decimal_places self = True
|
||||
|
||||
## PRIVATE
|
||||
supports_float_round_decimal_places : Boolean
|
||||
supports_float_round_decimal_places self = True
|
||||
|
||||
## PRIVATE
|
||||
adapt_unified_column : Internal_Column -> Value_Type -> (SQL_Expression -> SQL_Type_Reference) -> Internal_Column
|
||||
adapt_unified_column self column approximate_result_type infer_result_type_from_database_callback =
|
||||
_ = [approximate_result_type, infer_result_type_from_database_callback]
|
||||
column
|
||||
|
||||
## PRIVATE
|
||||
prepare_fetch_types_query : SQL_Expression -> Context -> SQL_Statement
|
||||
prepare_fetch_types_query self expression context =
|
||||
Dialect.default_fetch_types_query self expression context
|
||||
|
||||
## PRIVATE
|
||||
check_aggregate_support : Aggregate_Column -> Boolean ! Unsupported_Database_Operation
|
||||
check_aggregate_support self aggregate =
|
||||
_ = aggregate
|
||||
True
|
||||
|
||||
## PRIVATE
|
||||
Checks if an operation is supported by the dialect.
|
||||
is_supported : Text -> Boolean
|
||||
is_supported self operation =
|
||||
self.internal_generator_dialect.is_supported operation
|
||||
|
||||
## PRIVATE
|
||||
The default table types to use when listing tables.
|
||||
default_table_types : Vector Text
|
||||
default_table_types self =
|
||||
["TABLE", "VIEW", "TEMPORARY TABLE", "TEMPORARY VIEW", "MATERIALIZED VIEW"]
|
||||
|
||||
## PRIVATE
|
||||
get_error_mapper : Error_Mapper
|
||||
get_error_mapper self = Postgres_Error_Mapper
|
||||
|
||||
## PRIVATE
|
||||
The dialect-dependent strategy to get the Primary Key for a given table.
|
||||
|
||||
Returns `Nothing` if the key is not defined.
|
||||
fetch_primary_key : Connection -> Text -> Vector Text ! Nothing
|
||||
fetch_primary_key self connection table_name =
|
||||
Dialect.default_fetch_primary_key connection table_name
|
||||
|
||||
## PRIVATE
|
||||
Prepares metadata for an operation taking a date/time period and checks
|
||||
if the given period is supported.
|
||||
prepare_metadata_for_period : Date_Period | Time_Period -> Value_Type -> Any
|
||||
prepare_metadata_for_period self period operation_input_type =
|
||||
Date_Period_Metadata.Value period operation_input_type
|
||||
|
||||
## PRIVATE
|
||||
Returns true if the `replace` parameters are supported by this backend.
|
||||
if_replace_params_supports : Replace_Params -> Any -> Any
|
||||
if_replace_params_supports self replace_params ~action =
|
||||
if supported_replace_params.contains replace_params then action else replace_params.throw_unsupported snowflake_dialect_name
|
||||
|
||||
## PRIVATE
|
||||
value_type_for_upload_of_existing_column : DB_Column -> Value_Type
|
||||
value_type_for_upload_of_existing_column self column = case column of
|
||||
# Return the type as-is for database columns.
|
||||
_ : DB_Column -> column.value_type
|
||||
_ : Column ->
|
||||
base_type = column.value_type
|
||||
case base_type of
|
||||
Value_Type.Decimal precision scale ->
|
||||
used_scale = scale.if_nothing 12
|
||||
used_precision = Math.min 38 precision.if_nothing 38
|
||||
new_type = Value_Type.Decimal used_precision used_scale
|
||||
if used_scale==scale && used_precision==precision then new_type else
|
||||
Warning.attach (Inexact_Type_Coercion.Warning base_type new_type unavailable=False) new_type
|
||||
_ -> base_type
|
||||
|
||||
## PRIVATE
|
||||
make_internal_generator_dialect =
|
||||
cases = [["LOWER", Base_Generator.make_function "LOWER"], ["UPPER", Base_Generator.make_function "UPPER"]]
|
||||
text = [starts_with, contains, ends_with, agg_shortest, agg_longest, make_case_sensitive, ["REPLACE", replace], left, right]+concat_ops+cases+trim_ops
|
||||
counts = [agg_count_is_null, agg_count_empty, agg_count_not_empty, ["COUNT_DISTINCT", agg_count_distinct], ["COUNT_DISTINCT_INCLUDE_NULL", agg_count_distinct_include_null]]
|
||||
arith_extensions = [is_nan, is_inf, floating_point_div, mod_op, decimal_div, decimal_mod, ["ROW_MIN", Base_Generator.make_function "LEAST"], ["ROW_MAX", Base_Generator.make_function "GREATEST"]]
|
||||
bool = [bool_or]
|
||||
|
||||
stddev_pop = ["STDDEV_POP", Base_Generator.make_function "stddev_pop"]
|
||||
stddev_samp = ["STDDEV_SAMP", Base_Generator.make_function "stddev_samp"]
|
||||
stats = [agg_median, agg_mode, agg_percentile, stddev_pop, stddev_samp]
|
||||
date_ops = [make_extract_as_int "year", make_extract_as_int "quarter", make_extract_as_int "month", make_extract_as_int "week", make_extract_as_int "day", make_extract_as_int "hour", make_extract_as_int "minute", make_extract_fractional_as_int "second", make_extract_fractional_as_int "millisecond" modulus=1000, make_extract_fractional_as_int "microsecond" modulus=1000, ["date_add", make_date_add], ["date_diff", make_date_diff], ["date_trunc_to_day", make_date_trunc_to_day]]
|
||||
special_overrides = []
|
||||
other = [["RUNTIME_ERROR", make_runtime_error_op]]
|
||||
my_mappings = text + counts + stats + first_last_aggregators + arith_extensions + bool + date_ops + special_overrides + other
|
||||
Base_Generator.base_dialect . extend_with my_mappings
|
||||
|
||||
## PRIVATE
|
||||
agg_count_is_null = Base_Generator.lift_unary_op "COUNT_IS_NULL" arg->
|
||||
SQL_Builder.code "COUNT_IF(" ++ arg.paren ++ " IS NULL)"
|
||||
|
||||
## PRIVATE
|
||||
agg_count_empty = Base_Generator.lift_unary_op "COUNT_EMPTY" arg->
|
||||
SQL_Builder.code "COUNT_IF("++ arg.paren ++ " IS NULL OR " ++ arg.paren ++ " == '')"
|
||||
|
||||
## PRIVATE
|
||||
agg_count_not_empty = Base_Generator.lift_unary_op "COUNT_NOT_EMPTY" arg->
|
||||
SQL_Builder.code "COUNT_IF(" ++ arg.paren ++ " IS NOT NULL AND " ++ arg.paren ++ " != '')"
|
||||
|
||||
## PRIVATE
|
||||
agg_median = Base_Generator.lift_unary_op "MEDIAN" arg->
|
||||
median = SQL_Builder.code "MEDIAN(" ++ arg ++ ")"
|
||||
has_nan = SQL_Builder.code "BOOLOR_AGG(" ++ arg ++ " = 'NaN'::Double)"
|
||||
SQL_Builder.code "CASE WHEN " ++ has_nan ++ " THEN 'NaN'::Double ELSE " ++ median ++ " END"
|
||||
|
||||
## PRIVATE
|
||||
agg_mode = Base_Generator.lift_unary_op "MODE" arg->
|
||||
SQL_Builder.code "MODE(" ++ arg ++ ")"
|
||||
|
||||
## PRIVATE
|
||||
agg_percentile = Base_Generator.lift_binary_op "PERCENTILE" p-> expr->
|
||||
percentile = SQL_Builder.code "percentile_cont(" ++ p ++ ") WITHIN GROUP (ORDER BY " ++ expr ++ ")"
|
||||
has_nan = SQL_Builder.code "BOOLOR_AGG(" ++ expr ++ " = 'NaN'::Double)"
|
||||
SQL_Builder.code "CASE WHEN " ++ has_nan ++ " THEN 'NaN' ELSE " ++ percentile ++ " END"
|
||||
|
||||
## PRIVATE
|
||||
These are written in a not most-efficient way, but a way that makes them
|
||||
compatible with other group-by aggregations out-of-the-box. In the future, we
|
||||
may want to consider some alternative solutions.
|
||||
first_last_aggregators =
|
||||
first = make_first_aggregator reverse=False ignore_null=False
|
||||
first_not_null = make_first_aggregator reverse=False ignore_null=True
|
||||
last = make_first_aggregator reverse=True ignore_null=False
|
||||
last_not_null = make_first_aggregator reverse=True ignore_null=True
|
||||
[["FIRST", first], ["FIRST_NOT_NULL", first_not_null], ["LAST", last], ["LAST_NOT_NULL", last_not_null]]
|
||||
|
||||
## PRIVATE
|
||||
make_first_aggregator reverse ignore_null args =
|
||||
if args.length < 2 then Error.throw (Illegal_State.Error "Insufficient number of arguments for the operation.") else
|
||||
result_expr = args.first
|
||||
order_bys = args.drop 1
|
||||
|
||||
method_name = if reverse then "LAST_VALUE" else "FIRST_VALUE"
|
||||
filter_clause = if ignore_null then ") IGNORE NULLS OVER" else ") OVER"
|
||||
order_clause = SQL_Builder.code " ORDER BY " ++ SQL_Builder.join "," order_bys
|
||||
SQL_Builder.code (method_name + "(") ++ result_expr ++ filter_clause ++ order_clause
|
||||
|
||||
## PRIVATE
|
||||
agg_shortest = Base_Generator.lift_unary_op "SHORTEST" arg->
|
||||
SQL_Builder.code "FIRST_VALUE(" ++ arg ++ ") IGNORE NULLS OVER (ORDER BY LENGTH(" ++ arg ++ "))"
|
||||
|
||||
## PRIVATE
|
||||
agg_longest = Base_Generator.lift_unary_op "LONGEST" arg->
|
||||
SQL_Builder.code "FIRST_VALUE(" ++ arg ++ ") IGNORE NULLS OVER (ORDER BY LENGTH(" ++ arg ++ ") DESC)"
|
||||
|
||||
## PRIVATE
|
||||
concat_ops =
|
||||
make_raw_concat_expr expr separator =
|
||||
SQL_Builder.code "string_agg(" ++ expr ++ ", " ++ separator ++ ")"
|
||||
concat = Base_Generator.make_concat make_raw_concat_expr make_contains_expr
|
||||
[["CONCAT", concat (has_quote=False)], ["CONCAT_QUOTE_IF_NEEDED", concat (has_quote=True)]]
|
||||
|
||||
## PRIVATE
|
||||
trim_ops =
|
||||
whitespace = "' ' || CHR(9) || CHR(10) || CHR(13)"
|
||||
make_fn fn_name = Base_Generator.lift_binary_op fn_name input-> chars-> case chars of
|
||||
Nothing -> SQL_Builder.code fn_name+"(" ++ input ++ ", " ++ whitespace ++ ")"
|
||||
_ ->
|
||||
case chars.is_constant of
|
||||
True ->
|
||||
const = chars.fragments.vec.first.object
|
||||
if const.is_nothing || const.is_empty then SQL_Builder.code fn_name+"(" ++ input ++ ", " ++ whitespace ++ ")" else
|
||||
SQL_Builder.code fn_name+"(" ++ input ++ ", " ++ chars ++ ")"
|
||||
False ->
|
||||
SQL_Builder.code "CASE WHEN " ++ chars ++ " IS NULL OR " ++ chars ++ " = '' THEN " ++ fn_name ++ "(" ++ input ++ ") ELSE " ++ fn_name ++ "(" ++ input ++ ", " ++ chars ++ ") END"
|
||||
[make_fn "TRIM", make_fn "LTRIM", make_fn "RTRIM"]
|
||||
|
||||
## PRIVATE
|
||||
agg_count_distinct args = if args.is_empty then (Error.throw (Illegal_Argument.Error "COUNT_DISTINCT requires at least one argument.")) else
|
||||
case args.length == 1 of
|
||||
True ->
|
||||
## A single null value will be skipped.
|
||||
SQL_Builder.code "COUNT(DISTINCT " ++ args.first ++ ")"
|
||||
False ->
|
||||
## A tuple of nulls is not a null, so it will not be skipped - but
|
||||
we want to ignore all-null columns. So we manually filter them
|
||||
out.
|
||||
count = SQL_Builder.code "COUNT(DISTINCT (" ++ SQL_Builder.join ", " args ++ "))"
|
||||
are_nulls = args.map arg-> arg.paren ++ " IS NULL"
|
||||
all_nulls_filter = SQL_Builder.code " FILTER (WHERE NOT (" ++ SQL_Builder.join " AND " are_nulls ++ "))"
|
||||
(count ++ all_nulls_filter).paren
|
||||
|
||||
## PRIVATE
|
||||
agg_count_distinct_include_null args =
|
||||
## If we always count as tuples, then even null fields are counted.
|
||||
SQL_Builder.code "COUNT(DISTINCT (" ++ SQL_Builder.join ", " args ++ ", 0))"
|
||||
|
||||
## PRIVATE
|
||||
starts_with = Base_Generator.lift_binary_sql_function "STARTS_WITH" "STARTSWITH"
|
||||
|
||||
## PRIVATE
|
||||
ends_with = Base_Generator.lift_binary_sql_function "ENDS_WITH" "ENDSWITH"
|
||||
|
||||
## PRIVATE
|
||||
contains = Base_Generator.lift_binary_sql_function "CONTAINS" "CONTAINS"
|
||||
|
||||
## PRIVATE
|
||||
make_contains_expr expr substring = contains [expr, substring]
|
||||
|
||||
## PRIVATE
|
||||
make_case_sensitive = Base_Generator.lift_unary_op "MAKE_CASE_SENSITIVE" arg->
|
||||
SQL_Builder.code "((" ++ arg ++ ') COLLATE "ucs_basic")'
|
||||
|
||||
## PRIVATE
|
||||
left = Base_Generator.lift_binary_op "LEFT" str-> n->
|
||||
SQL_Builder.code "left(" ++ str ++ ", CAST(" ++ n ++ " AS INT))"
|
||||
|
||||
## PRIVATE
|
||||
right = Base_Generator.lift_binary_op "RIGHT" str-> n->
|
||||
SQL_Builder.code "right(" ++ str ++ ", CAST(" ++ n ++ " AS INT))"
|
||||
|
||||
## PRIVATE
|
||||
make_order_descriptor internal_column sort_direction text_ordering =
|
||||
nulls = case sort_direction of
|
||||
Sort_Direction.Ascending -> Nulls_Order.First
|
||||
Sort_Direction.Descending -> Nulls_Order.Last
|
||||
case text_ordering of
|
||||
Nothing ->
|
||||
Order_Descriptor.Value (Internals_Access.column_expression internal_column) sort_direction nulls_order=nulls collation=Nothing
|
||||
_ ->
|
||||
## In the future we can modify this error to suggest using a custom defined collation.
|
||||
if text_ordering.sort_digits_as_numbers then Error.throw (Unsupported_Database_Operation.Error "Natural ordering is currently not supported. You may need to materialize the Table to perform this operation.") else
|
||||
case text_ordering.case_sensitivity of
|
||||
Case_Sensitivity.Default ->
|
||||
Order_Descriptor.Value (Internals_Access.column_expression internal_column) sort_direction nulls_order=nulls collation=Nothing
|
||||
Case_Sensitivity.Sensitive ->
|
||||
Order_Descriptor.Value (Internals_Access.column_expression internal_column) sort_direction nulls_order=nulls collation="ucs_basic"
|
||||
Case_Sensitivity.Insensitive locale -> case locale == Locale.default of
|
||||
False ->
|
||||
Error.throw (Unsupported_Database_Operation.Error "Case insensitive ordering with custom locale is currently not supported. You may need to materialize the Table to perform this operation.")
|
||||
True ->
|
||||
upper = SQL_Expression.Operation "UPPER" [Internals_Access.column_expression internal_column]
|
||||
folded_expression = SQL_Expression.Operation "LOWER" [upper]
|
||||
Order_Descriptor.Value folded_expression sort_direction nulls_order=nulls collation=Nothing
|
||||
|
||||
## PRIVATE
|
||||
is_nan = Base_Generator.lift_unary_op "IS_NAN" arg->
|
||||
(arg ++ " in (double precision 'NaN')").paren
|
||||
|
||||
## PRIVATE
|
||||
is_inf = Base_Generator.lift_unary_op "IS_INF" arg->
|
||||
(arg ++ " in (double precision 'Infinity', double precision '-Infinity')").paren
|
||||
|
||||
## PRIVATE
|
||||
bool_or = Base_Generator.lift_unary_op "BOOL_OR" arg->
|
||||
SQL_Builder.code "bool_or(" ++ arg ++ ")"
|
||||
|
||||
## PRIVATE
|
||||
floating_point_div = Base_Generator.lift_binary_op "/" x-> y->
|
||||
SQL_Builder.code "CAST(" ++ x ++ " AS double precision) / CAST(" ++ y ++ " AS double precision)"
|
||||
|
||||
## PRIVATE
|
||||
mod_op = Base_Generator.lift_binary_op "MOD" x-> y->
|
||||
x ++ " - FLOOR(CAST(" ++ x ++ " AS double precision) / CAST(" ++ y ++ " AS double precision)) * " ++ y
|
||||
|
||||
## PRIVATE
|
||||
decimal_div = Base_Generator.lift_binary_op "DECIMAL_DIV" x-> y->
|
||||
SQL_Builder.code "CAST(" ++ x ++ " AS decimal) / CAST(" ++ y ++ " AS decimal)"
|
||||
|
||||
## PRIVATE
|
||||
decimal_mod = Base_Generator.lift_binary_op "DECIMAL_MOD" x-> y->
|
||||
x ++ " - FLOOR(CAST(" ++ x ++ " AS decimal) / CAST(" ++ y ++ " AS decimal)) * " ++ y
|
||||
|
||||
## PRIVATE
|
||||
supported_replace_params : Set Replace_Params
|
||||
supported_replace_params =
|
||||
e0 = [Replace_Params.Value Text Case_Sensitivity.Default False, Replace_Params.Value Text Case_Sensitivity.Default True, Replace_Params.Value Text Case_Sensitivity.Sensitive False]
|
||||
e1 = [Replace_Params.Value Text Case_Sensitivity.Sensitive True, Replace_Params.Value Text Case_Sensitivity.Insensitive False, Replace_Params.Value Text Case_Sensitivity.Insensitive True]
|
||||
e2 = [Replace_Params.Value Regex Case_Sensitivity.Default False, Replace_Params.Value Regex Case_Sensitivity.Default True, Replace_Params.Value Regex Case_Sensitivity.Sensitive False]
|
||||
e3 = [Replace_Params.Value Regex Case_Sensitivity.Sensitive True, Replace_Params.Value Regex Case_Sensitivity.Insensitive False, Replace_Params.Value Regex Case_Sensitivity.Insensitive True]
|
||||
e4 = [Replace_Params.Value DB_Column Case_Sensitivity.Default False, Replace_Params.Value DB_Column Case_Sensitivity.Sensitive False]
|
||||
Set.from_vector <| e0 + e1 + e2 + e3 + e4
|
||||
|
||||
## PRIVATE
|
||||
replace : Vector SQL_Builder -> Any -> SQL_Builder
|
||||
replace args metadata =
|
||||
input = args.at 0
|
||||
pattern = args.at 1
|
||||
replacement = args.at 2
|
||||
|
||||
## `raw_pattern` is a `Text1 or `Regex`; it's the same value as `input`, but not
|
||||
embedded in IR.
|
||||
raw_pattern = metadata.at 0
|
||||
replace_params = metadata.at 1
|
||||
|
||||
expression = case replace_params.input_type of
|
||||
Text ->
|
||||
## To use REGEXP_REPLACE on a non-regex, we have to escape it.
|
||||
escaped_pattern = SQL_Builder.interpolation (Regex.escape raw_pattern)
|
||||
case replace_params.only_first of
|
||||
False -> case replace_params.case_sensitivity of
|
||||
Case_Sensitivity.Insensitive _ ->
|
||||
SQL_Builder.code "REGEXP_REPLACE(" ++ input ++ ", " ++ escaped_pattern ++ ", " ++ replacement ++ ", 'ig')"
|
||||
_ ->
|
||||
SQL_Builder.code "REPLACE(" ++ input ++ ", " ++ pattern ++ ", " ++ replacement ++ ")"
|
||||
True -> case replace_params.case_sensitivity of
|
||||
Case_Sensitivity.Insensitive _ ->
|
||||
SQL_Builder.code "REGEXP_REPLACE(" ++ input ++ ", " ++ escaped_pattern ++ ", " ++ replacement ++ ", 'i')"
|
||||
_ ->
|
||||
SQL_Builder.code "REGEXP_REPLACE(" ++ input ++ ", " ++ escaped_pattern ++ ", " ++ replacement ++ ")"
|
||||
Regex ->
|
||||
pattern_string = SQL_Builder.interpolation raw_pattern.pattern_string
|
||||
case replace_params.only_first of
|
||||
False -> case replace_params.case_sensitivity of
|
||||
Case_Sensitivity.Insensitive _ ->
|
||||
SQL_Builder.code "REGEXP_REPLACE(" ++ input ++ ", " ++ pattern_string ++ ", " ++ replacement ++ ", 'ig')"
|
||||
_ ->
|
||||
SQL_Builder.code "REGEXP_REPLACE(" ++ input ++ ", " ++ pattern_string ++ ", " ++ replacement ++ ", 'g')"
|
||||
True -> case replace_params.case_sensitivity of
|
||||
Case_Sensitivity.Insensitive _ ->
|
||||
SQL_Builder.code "REGEXP_REPLACE(" ++ input ++ ", " ++ pattern_string ++ ", " ++ replacement ++ ", 'i')"
|
||||
_ ->
|
||||
SQL_Builder.code "REGEXP_REPLACE(" ++ input ++ ", " ++ pattern_string ++ ", " ++ replacement ++ ")"
|
||||
DB_Column ->
|
||||
case replace_params.only_first of
|
||||
False -> case replace_params.case_sensitivity of
|
||||
Case_Sensitivity.Insensitive _ ->
|
||||
Nothing
|
||||
_ ->
|
||||
SQL_Builder.code "REPLACE(" ++ input ++ ", " ++ pattern ++ ", " ++ replacement ++ ")"
|
||||
True -> Nothing
|
||||
expression.if_nothing (replace_params.throw_unsupported snowflake_dialect_name)
|
||||
|
||||
## PRIVATE
|
||||
make_extract_as_int enso_name sql_name=enso_name =
|
||||
Base_Generator.lift_unary_op enso_name arg->
|
||||
as_int32 <| SQL_Builder.code "EXTRACT(" ++ sql_name ++ " FROM " ++ arg ++ ")"
|
||||
|
||||
## PRIVATE
|
||||
make_extract_fractional_as_int enso_name sql_name=enso_name modulus=Nothing =
|
||||
Base_Generator.lift_unary_op enso_name arg->
|
||||
result = as_int32 <| SQL_Builder.code "TRUNC(EXTRACT(" ++ sql_name ++ " FROM " ++ arg ++ "))"
|
||||
case modulus of
|
||||
Nothing -> result
|
||||
_ : Integer ->
|
||||
(result ++ (" % "+modulus.to_text)).paren
|
||||
|
||||
## PRIVATE
|
||||
make_date_add arguments (metadata : Date_Period_Metadata) =
|
||||
if arguments.length != 2 then Error.throw (Illegal_State.Error "date_add expects exactly 2 sub expressions. This is a bug in Database library.") else
|
||||
expr = arguments.at 0
|
||||
amount = arguments.at 1
|
||||
interval_arg = case metadata.period of
|
||||
Date_Period.Year ->
|
||||
"years=>1"
|
||||
Date_Period.Quarter ->
|
||||
"months=>3"
|
||||
Date_Period.Month ->
|
||||
"months=>1"
|
||||
Date_Period.Week _ ->
|
||||
"weeks=>1"
|
||||
Date_Period.Day ->
|
||||
"days=>1"
|
||||
Time_Period.Day ->
|
||||
"hours=>24"
|
||||
Time_Period.Hour ->
|
||||
"hours=>1"
|
||||
Time_Period.Minute ->
|
||||
"mins=>1"
|
||||
Time_Period.Second ->
|
||||
"secs=>1"
|
||||
Time_Period.Millisecond ->
|
||||
"secs=>0.001"
|
||||
Time_Period.Microsecond ->
|
||||
"secs=>0.000001"
|
||||
interval_expression = SQL_Builder.code "make_interval(" ++ interval_arg ++ ")"
|
||||
shifted = SQL_Builder.code "(" ++ expr ++ " + (" ++ amount ++ " * " ++ interval_expression ++ "))"
|
||||
case metadata.input_value_type of
|
||||
Value_Type.Date ->
|
||||
SQL_Builder.code "(" ++ shifted ++ "::date)"
|
||||
_ -> shifted
|
||||
|
||||
## PRIVATE
|
||||
make_date_diff arguments (metadata : Date_Period_Metadata) =
|
||||
if arguments.length != 2 then Error.throw (Illegal_State.Error "date_diff expects exactly 2 sub expressions. This is a bug in Database library.") else
|
||||
start = arguments.at 0
|
||||
end = arguments.at 1
|
||||
|
||||
truncate expr =
|
||||
SQL_Builder.code "TRUNC(" ++ expr ++ ")"
|
||||
|
||||
# `age` computes a 'symbolic' difference expressed in years, months and days.
|
||||
extract_years =
|
||||
as_int32 <| SQL_Builder.code "EXTRACT(YEARS FROM age(" ++ end ++ ", " ++ start ++ "))"
|
||||
# To get total months, we need to sum up with whole years.
|
||||
extract_months =
|
||||
months = as_int32 <|
|
||||
SQL_Builder.code "EXTRACT(MONTHS FROM age(" ++ end ++ ", " ++ start ++ "))"
|
||||
SQL_Builder.code "(" ++ extract_years ++ " * 12 + " ++ months ++ ")"
|
||||
## To get total days, we cannot use `age`, because we cannot convert an
|
||||
amount of months to days (month lengths vary). Instead we rely on `-`
|
||||
returning an interval based in 'raw' days.
|
||||
extract_days =
|
||||
as_int32 <| case metadata.input_value_type of
|
||||
## For pure 'date' datatype, the difference is a simple integer
|
||||
count of days.
|
||||
Value_Type.Date -> (end ++ " - " ++ start).paren
|
||||
# For others, it is an interval, so we need to extract.
|
||||
_ -> SQL_Builder.code "EXTRACT(DAYS FROM (" ++ end ++ " - " ++ start ++ "))"
|
||||
## We round the amount of seconds towards zero, as we only count full
|
||||
elapsed seconds in the interval.
|
||||
Note that it is important the interval is computed using `-`. The
|
||||
symbolic `age` has no clear mapping to the count of days, skewing the
|
||||
result.
|
||||
extract_seconds =
|
||||
seconds_numeric = SQL_Builder.code "EXTRACT(EPOCH FROM (" ++ end ++ " - " ++ start ++ "))"
|
||||
as_int64 (truncate seconds_numeric)
|
||||
case metadata.period of
|
||||
Date_Period.Year -> extract_years
|
||||
Date_Period.Month -> extract_months
|
||||
Date_Period.Quarter -> (extract_months ++ " / 3").paren
|
||||
Date_Period.Week _ -> (extract_days ++ " / 7").paren
|
||||
Date_Period.Day -> extract_days
|
||||
## EXTRACT HOURS/MINUTES would yield only a date part, but we need
|
||||
the total which is easiest achieved by EPOCH
|
||||
Time_Period.Hour -> (extract_seconds ++ " / 3600").paren
|
||||
Time_Period.Minute -> (extract_seconds ++ " / 60").paren
|
||||
Time_Period.Second -> extract_seconds
|
||||
Time_Period.Day -> case metadata.input_value_type of
|
||||
Value_Type.Date -> extract_days
|
||||
# Time_Period.Day is treated as 24 hours, so for types that support time we use the same algorithm like for hours, but divide by 24.
|
||||
_ -> (extract_seconds ++ " / (3600 * 24)").paren
|
||||
## The EPOCH gives back just the integer amount of seconds, without
|
||||
the fractional part. So we get the fractional part using
|
||||
MILLISECONDS - but that does not give the _total_ just the
|
||||
'seconds of minute' part, expressed in milliseconds. So we need
|
||||
to merge both - but then seconds of minute appear twice, so we %
|
||||
the milliseconds to get just the fractional part from it and sum
|
||||
both.
|
||||
Time_Period.Millisecond ->
|
||||
millis = truncate <|
|
||||
SQL_Builder.code "EXTRACT(MILLISECONDS FROM (" ++ end ++ " - " ++ start ++ "))"
|
||||
as_int64 <|
|
||||
((extract_seconds ++ " * 1000").paren ++ " + " ++ (millis ++ " % 1000").paren).paren
|
||||
Time_Period.Microsecond ->
|
||||
micros = SQL_Builder.code "EXTRACT(MICROSECONDS FROM (" ++ end ++ " - " ++ start ++ "))"
|
||||
as_int64 <|
|
||||
((extract_seconds ++ " * 1000000").paren ++ " + " ++ (micros ++ " % 1000000").paren).paren
|
||||
|
||||
## PRIVATE
|
||||
make_date_trunc_to_day arguments =
|
||||
if arguments.length != 1 then Error.throw (Illegal_State.Error "date_trunc_to_day expects exactly one sub expression. This is a bug in Database library.") else
|
||||
expr = arguments.at 0
|
||||
SQL_Builder.code "(DATE_TRUNC('day'," ++ expr ++ ") :: DATE)"
|
||||
|
||||
## PRIVATE
|
||||
Alters the expression casting the value to a 64-bit integer.
|
||||
TODO probably remove
|
||||
as_int64 expr =
|
||||
SQL_Builder.code "(" ++ expr ++ "::int8)"
|
||||
|
||||
## PRIVATE
|
||||
Alters the expression casting the value to a 32-bit integer.
|
||||
TODO probably remove
|
||||
as_int32 expr =
|
||||
SQL_Builder.code "(" ++ expr ++ "::int4)"
|
||||
|
||||
## PRIVATE
|
||||
The RUNTIME_ERROR operation should allow the query to compile fine and it
|
||||
will not prevent it from running if the branch including this operation is
|
||||
not taken. But if the branch is computed, it should ensure the query fails.
|
||||
|
||||
This query never returns a value, so its type should be polymorphic. However,
|
||||
that is not possible - so currently it just 'pretends' that it would return a
|
||||
Boolean - because that is the type we expect in the use-case. This can be
|
||||
altered if needed.
|
||||
|
||||
It takes a variable as the second argument. It can be any value that is not
|
||||
statically known - this ensure that the optimizer will not be able to
|
||||
pre-compute the expression too early (which could make the query fail
|
||||
spuriously). See `make_invariant_check` in `Lookup_Query_Helper` for an
|
||||
example.
|
||||
make_runtime_error_op arguments =
|
||||
if arguments.length != 2 then
|
||||
Panic.throw (Illegal_Argument.Error "RUNTIME_ERROR takes exactly 2 arguments (error message and a variable to ensure deferred execution).")
|
||||
error_message = arguments.at 0
|
||||
variable_to_defer = arguments.at 1
|
||||
|
||||
SQL_Builder.code "CAST('[ENSO INVARIANT VIOLATED: '||" ++ error_message ++ "||'] '||COALESCE(" ++ variable_to_defer ++ "::TEXT,'NULL') AS BOOLEAN)"
|
||||
|
||||
## PRIVATE
|
||||
snowflake_dialect_name = "Snowflake"
|
||||
|
@ -115,9 +115,10 @@ type Suite
|
||||
IO.println <| skipped_tests.to_text + " tests skipped."
|
||||
IO.println <| pending_groups.to_text + " groups skipped."
|
||||
IO.println ""
|
||||
IO.println <| "Failed tests: '" + failed_tests_names + "'"
|
||||
if failed_tests_number > 10 then IO.println "(Displaying only first 10 failed tests)"
|
||||
IO.println ""
|
||||
if failed_tests_number > 0 then
|
||||
IO.println <| "Failed tests: '" + failed_tests_names + "'"
|
||||
if failed_tests_number > 10 then IO.println "(Displaying only first 10 failed tests)"
|
||||
IO.println ""
|
||||
exit_code = if failed_tests_number > 0 then 1 else 0
|
||||
System.exit exit_code
|
||||
False ->
|
||||
@ -142,5 +143,5 @@ name_matches (name : Text) (filter : (Text | Nothing)) -> Boolean =
|
||||
text : Text ->
|
||||
regex = Regex.compile text
|
||||
if regex.is_error.not then regex.match name != Nothing else
|
||||
Panic.throw (Illegal_Argument.Error ("Filter '" + text + "' cannot be converted to regex"))
|
||||
Panic.throw (Illegal_Argument.Error ("Filter " + text.pretty + " cannot be converted to regex"))
|
||||
Nothing -> True
|
||||
|
@ -1,4 +1,5 @@
|
||||
minimum-launcher-version: 2023.2.1-nightly.2023.11.2
|
||||
minimum-project-manager-version: 2023.2.1-nightly.2023.11.2
|
||||
jvm-options:
|
||||
- value: "-Dpolyglot.compiler.IterativePartialEscape=true"
|
||||
- value: "-Dgraal.PrintGraph=Network"
|
||||
- value: "--add-opens=java.base/java.nio=ALL-UNNAMED"
|
||||
|
19
test/Snowflake_Tests/README.md
Normal file
19
test/Snowflake_Tests/README.md
Normal file
@ -0,0 +1,19 @@
|
||||
This is a set of tests for the Snowflake integration for Enso.
|
||||
|
||||
## Testing Snowflake
|
||||
|
||||
To run the tests, you need to prepare credentials for a Snowflake instance that
|
||||
can be used to run them on.
|
||||
|
||||
Please set the following environment variables:
|
||||
|
||||
- `ENSO_SNOWFLAKE_ACCOUNT` - the account name for the Snowflake instance,
|
||||
- `ENSO_SNOWFLAKE_USER` - the username to use for the tests,
|
||||
- `ENSO_SNOWFLAKE_PASSWORD` - the password for the user, for security reasons
|
||||
this can be a plain text value or an Enso Secret. If using a secret, please
|
||||
provide it as path - `enso://orgid/path/foo.secret`,
|
||||
- `ENSO_SNOWFLAKE_DATABASE` - the name of the database to use for the tests,
|
||||
- `ENSO_SNOWFLAKE_SCHEMA` - the name of the schema to use for the tests,
|
||||
(optional, defaults to `PUBLIC`),
|
||||
- `ENSO_SNOWFLAKE_WAREHOUSE` - the name of the warehouse to use for the tests
|
||||
(optional, defaults to blank value).
|
7
test/Snowflake_Tests/package.yaml
Normal file
7
test/Snowflake_Tests/package.yaml
Normal file
@ -0,0 +1,7 @@
|
||||
name: Snowflake_Tests
|
||||
namespace: enso_dev
|
||||
version: 0.0.1
|
||||
license: MIT
|
||||
author: enso-dev@enso.org
|
||||
maintainer: enso-dev@enso.org
|
||||
prefer-local-libraries: true
|
11
test/Snowflake_Tests/src/Main.enso
Normal file
11
test/Snowflake_Tests/src/Main.enso
Normal file
@ -0,0 +1,11 @@
|
||||
from Standard.Base import all
|
||||
|
||||
from Standard.Test import Test
|
||||
|
||||
import project.Snowflake_Spec
|
||||
|
||||
main filter=Nothing =
|
||||
suite = Test.build suite_builder->
|
||||
Snowflake_Spec.add_specs suite_builder
|
||||
|
||||
suite.run_with_filter filter
|
@ -13,24 +13,23 @@ import Standard.Database.Internal.Replace_Params.Replace_Params
|
||||
from Standard.Database import all
|
||||
from Standard.Database.Errors import all
|
||||
|
||||
from Standard.Snowflake import all
|
||||
|
||||
from Standard.Test import all
|
||||
import Standard.Test.Test_Environment
|
||||
|
||||
import project.Database.Common.Common_Spec
|
||||
import project.Database.Transaction_Spec
|
||||
import project.Database.Upload_Spec
|
||||
import project.Database.Helpers.Name_Generator
|
||||
import project.Common_Table_Operations
|
||||
from project.Common_Table_Operations.Util import all
|
||||
from project.Database.Types.Postgres_Type_Mapping_Spec import default_text
|
||||
from project.Database.Postgres_Spec import Basic_Test_Data, Postgres_Tables_Data
|
||||
import enso_dev.Table_Tests
|
||||
import enso_dev.Table_Tests.Database.Common.Common_Spec
|
||||
import enso_dev.Table_Tests.Database.Transaction_Spec
|
||||
import enso_dev.Table_Tests.Database.Upload_Spec
|
||||
import enso_dev.Table_Tests.Database.Helpers.Name_Generator
|
||||
import enso_dev.Table_Tests.Common_Table_Operations
|
||||
from enso_dev.Table_Tests.Common_Table_Operations.Util import all
|
||||
from enso_dev.Table_Tests.Database.Types.Postgres_Type_Mapping_Spec import default_text
|
||||
from enso_dev.Table_Tests.Database.Postgres_Spec import Basic_Test_Data, Postgres_Tables_Data
|
||||
|
||||
import enso_dev.Base_Tests.Network.Enso_Cloud.Cloud_Tests_Setup.Cloud_Tests_Setup
|
||||
|
||||
## Still a WIP cannot be completed until the dialect is implemented properly
|
||||
for Snowflake.
|
||||
It is not currently being run as part of the test suite.
|
||||
|
||||
type Snowflake_Info_Data
|
||||
Value ~data
|
||||
|
||||
@ -66,19 +65,18 @@ snowflake_specific_spec suite_builder create_connection_fn db_name setup =
|
||||
data.teardown
|
||||
|
||||
group_builder.specify "should be able to get current database and list databases" <|
|
||||
data.connection.database . should_equal db_name
|
||||
data.connection.database.equals_ignore_case db_name . should_be_true
|
||||
data.connection.databases.length . should_not_equal 0
|
||||
data.connection.databases.contains db_name . should_be_true
|
||||
data.connection.databases.find (name-> name.equals_ignore_case db_name) . should_succeed
|
||||
Meta.is_same_object data.connection (data.connection.set_database db_name) . should_be_true
|
||||
|
||||
group_builder.specify "should be able to get current schema and list schemas" <|
|
||||
data.connection.schema . should_equal "public"
|
||||
data.connection.schema.equals_ignore_case "public" . should_be_true
|
||||
data.connection.schemas.length . should_not_equal 0
|
||||
data.connection.schemas.contains "public" . should_be_true
|
||||
data.connection.schemas.contains "information_schema" . should_be_true
|
||||
data.connection.schemas.find (name-> name.equals_ignore_case "public") . should_succeed
|
||||
Meta.is_same_object data.connection (data.connection.set_schema "public") . should_be_true
|
||||
|
||||
group_builder.specify "should allow changing schema" <|
|
||||
group_builder.specify "should allow changing schema" pending="TODO?" <|
|
||||
new_connection = data.connection.set_schema "information_schema"
|
||||
new_schema = new_connection.read (SQL_Query.Raw_SQL "SELECT current_schema()") . at 0 . to_vector . first
|
||||
new_schema . should_equal "information_schema"
|
||||
@ -119,14 +117,14 @@ snowflake_specific_spec suite_builder create_connection_fn db_name setup =
|
||||
group_builder.specify "should be able to filter tables by name" <|
|
||||
tables = data.connection.tables data.tinfo
|
||||
tables.row_count . should_equal 1
|
||||
tables.at "Database" . to_vector . at 0 . should_equal db_name
|
||||
tables.at "Schema" . to_vector . at 0 . should_equal "public"
|
||||
tables.at "Database" . to_vector . at 0 . equals_ignore_case db_name . should_be_true
|
||||
tables.at "Schema" . to_vector . at 0 . equals_ignore_case "public" . should_be_true
|
||||
tables.at "Name" . to_vector . at 0 . should_equal data.tinfo
|
||||
tables.at "Type" . to_vector . at 0 . should_equal "TABLE"
|
||||
|
||||
data.connection.tables "TestT_ble%" . row_count . should_equal 1
|
||||
data.connection.tables "Temporary%ble%" . row_count . should_equal 1
|
||||
data.connection.tables "Temporary%ble%" . at "Type" . to_vector . should_equal ["TEMPORARY TABLE"]
|
||||
data.connection.tables "Temporary%ble%" . at "Type" . to_vector . should_equal ["TEMPORARY"]
|
||||
data.connection.tables "N_nexistent%" . row_count . should_equal 0
|
||||
|
||||
group_builder.specify "should be able to filter tables by type" <|
|
||||
@ -149,7 +147,7 @@ snowflake_specific_spec suite_builder create_connection_fn db_name setup =
|
||||
i.at "Value Type" . to_vector . should_equal [default_text, Value_Type.Integer, Value_Type.Boolean, Value_Type.Float]
|
||||
|
||||
group_builder.specify "should return Table information, also for aggregated results" <|
|
||||
i = data.t.aggregate columns=[Aggregate_Column.Concatenate "strs", Aggregate_Column.Sum "ints", Aggregate_Column.Count_Distinct "bools"] . info
|
||||
i = data.t.aggregate columns=[Aggregate_Column.Concatenate "strs", Aggregate_Column.Sum "ints", Aggregate_Column.Count_Distinct "bools"] . column_info
|
||||
i.at "Column" . to_vector . should_equal ["Concatenate strs", "Sum ints", "Count Distinct bools"]
|
||||
i.at "Items Count" . to_vector . should_equal [1, 1, 1]
|
||||
i.at "Value Type" . to_vector . should_equal [default_text, Value_Type.Decimal, Value_Type.Integer]
|
||||
@ -160,7 +158,7 @@ snowflake_specific_spec suite_builder create_connection_fn db_name setup =
|
||||
data.t.at "bools" . value_type . is_boolean . should_be_true
|
||||
data.t.at "doubles" . value_type . is_floating_point . should_be_true
|
||||
|
||||
group_builder.specify "should preserve Snowflake types when table is materialized, where possible" <|
|
||||
group_builder.specify "should preserve Snowflake types when table is materialized, where possible" pending="TODO" <|
|
||||
name = Name_Generator.random_name "types-test"
|
||||
Problems.assume_no_problems <|
|
||||
data.connection.execute_update 'CREATE TEMPORARY TABLE "'+name+'" ("int4" int4, "int2" int2, "txt-limited" varchar(10), "txt-fixed" char(3))'
|
||||
@ -274,7 +272,7 @@ snowflake_specific_spec suite_builder create_connection_fn db_name setup =
|
||||
group_builder.teardown <|
|
||||
data.teardown
|
||||
|
||||
group_builder.specify "materialize should respect the overridden type" <|
|
||||
group_builder.specify "materialize should respect the overridden type" pending="TODO" <|
|
||||
t0 = table_builder [["x", [False, True, False]], ["A", ["a", "b", "c"]], ["B", ["xyz", "abc", "def"]]] connection=data.connection
|
||||
t1 = t0 . cast "A" (Value_Type.Char size=1 variable_length=False) . cast "B" (Value_Type.Char size=3 variable_length=False)
|
||||
|
||||
@ -294,7 +292,7 @@ snowflake_specific_spec suite_builder create_connection_fn db_name setup =
|
||||
Test.with_clue "d.value_type="+d.value_type.to_display_text+": " <|
|
||||
d.value_type.variable_length.should_be_true
|
||||
|
||||
group_builder.specify "should be able to round-trip a BigInteger column" <|
|
||||
group_builder.specify "should be able to round-trip a BigInteger column" pending="TODO" <|
|
||||
x = 2^70
|
||||
m1 = Table.new [["X", [10, x]]]
|
||||
m1.at "X" . value_type . should_be_a (Value_Type.Decimal ...)
|
||||
@ -355,7 +353,7 @@ snowflake_specific_spec suite_builder create_connection_fn db_name setup =
|
||||
w4.requested_type . should_equal (Value_Type.Decimal precision=Nothing scale=Nothing)
|
||||
w4.actual_type . should_equal Value_Type.Float
|
||||
|
||||
group_builder.specify "should round-trip timestamptz column, preserving instant but converting to UTC" <|
|
||||
group_builder.specify "should round-trip timestamptz column, preserving instant but converting to UTC" pending="TODO" <|
|
||||
table_name = Name_Generator.random_name "TimestampTZ"
|
||||
table = data.connection.create_table table_name [Column_Description.Value "A" (Value_Type.Date_Time with_timezone=True)] primary_key=[]
|
||||
|
||||
@ -388,7 +386,7 @@ snowflake_specific_spec suite_builder create_connection_fn db_name setup =
|
||||
t2.row_count . should_equal local_equals.length
|
||||
t2.at "A" . to_vector . should_equal_tz_agnostic local_equals
|
||||
|
||||
group_builder.specify "will round-trip timestamp column without timezone by converting it to UTC" <|
|
||||
group_builder.specify "will round-trip timestamp column without timezone by converting it to UTC" pending="TODO" <|
|
||||
table_name = Name_Generator.random_name "Timestamp"
|
||||
table = data.connection.create_table table_name [Column_Description.Value "A" (Value_Type.Date_Time with_timezone=False)] primary_key=[]
|
||||
Problems.assume_no_problems table
|
||||
@ -523,7 +521,7 @@ snowflake_specific_spec suite_builder create_connection_fn db_name setup =
|
||||
do_round data 231 . should_be_a Float
|
||||
do_round data 231 -1 . should_be_a Float
|
||||
|
||||
add_postgres_specs suite_builder create_connection_fn db_name =
|
||||
add_snowflake_specs suite_builder create_connection_fn db_name =
|
||||
prefix = "[Snowflake] "
|
||||
name_counter = Ref.new 0
|
||||
|
||||
@ -541,7 +539,7 @@ add_postgres_specs suite_builder create_connection_fn db_name =
|
||||
|
||||
common_selection = Common_Table_Operations.Main.Test_Selection.Config supports_case_sensitive_columns=True order_by_unicode_normalization_by_default=True allows_mixed_type_comparisons=False fixed_length_text_columns=True removes_trailing_whitespace_casting_from_char_to_varchar=True supports_decimal_type=True supported_replace_params=supported_replace_params
|
||||
aggregate_selection = Common_Table_Operations.Aggregate_Spec.Test_Selection.Config first_last_row_order=False aggregation_problems=False
|
||||
agg_in_memory_table = (enso_project.data / "data.csv") . read
|
||||
agg_in_memory_table = ((Project_Description.new enso_dev.Table_Tests).data / "data.csv") . read
|
||||
|
||||
agg_table_fn = _->
|
||||
connection = create_connection_fn Nothing
|
||||
@ -567,69 +565,28 @@ supported_replace_params =
|
||||
Set.from_vector <| e0 + e1 + e2 + e3 + e4
|
||||
|
||||
add_table_specs suite_builder =
|
||||
db_name = Environment.get "ENSO_DATABASE_TEST_DB_NAME"
|
||||
db_host_port = (Environment.get "ENSO_DATABASE_TEST_HOST").if_nothing "localhost" . split ':'
|
||||
db_host = db_host_port.at 0
|
||||
db_port = if db_host_port.length == 1 then 5432 else Integer.parse (db_host_port.at 1)
|
||||
db_user = Environment.get "ENSO_DATABASE_TEST_DB_USER"
|
||||
db_password = Environment.get "ENSO_DATABASE_TEST_DB_PASSWORD"
|
||||
ca_cert_file = Environment.get "ENSO_DATABASE_TEST_CA_CERT_FILE"
|
||||
|
||||
ssl_pending = if ca_cert_file.is_nothing then "PostgreSQL SSL test not configured." else Nothing
|
||||
|
||||
suite_builder.group "[Snowflake] SSL connectivity tests" pending=ssl_pending group_builder->
|
||||
group_builder.specify "should connect without ssl parameter" <|
|
||||
Database.connect (Postgres db_host db_port db_name credentials=(Credentials.Username_And_Password db_user db_password)) . should_succeed
|
||||
|
||||
group_builder.specify "should connect, requiring SSL" <|
|
||||
Database.connect (Postgres db_host db_port db_name credentials=(Credentials.Username_And_Password db_user db_password) use_ssl=SSL_Mode.Require) . should_succeed
|
||||
|
||||
group_builder.specify "should connect be able to verify the certificate" <|
|
||||
Database.connect (Postgres db_host db_port db_name credentials=(Credentials.Username_And_Password db_user db_password) use_ssl=(SSL_Mode.Verify_CA ca_cert_file)) . should_succeed
|
||||
|
||||
## Default certificate should not accept the self signed certificate.
|
||||
ca_fail = Database.connect (Postgres db_host db_port db_name credentials=(Credentials.Username_And_Password db_user db_password) use_ssl=SSL_Mode.Verify_CA)
|
||||
ca_fail.is_error . should_equal True
|
||||
ca_fail.catch SQL_Error . is_a SQL_Error . should_equal True
|
||||
|
||||
group_builder.specify "should connect be able to verify the host name against the certificate" <|
|
||||
Database.connect (Postgres db_host db_port db_name credentials=(Credentials.Username_And_Password db_user db_password) use_ssl=(SSL_Mode.Full_Verification ca_cert_file)) . should_succeed
|
||||
|
||||
alternate_host = Environment.get "ENSO_DATABASE_TEST_ALTERNATE_HOST" . if_nothing <|
|
||||
if db_host == "127.0.0.1" then "localhost" else Nothing
|
||||
pending_alternate = if alternate_host.is_nothing then "Alternative host name not configured." else Nothing
|
||||
group_builder.specify "should fail to connect with alternate host name not valid in certificate" pending=pending_alternate <|
|
||||
ca_fail = Database.connect (Postgres alternate_host db_port db_name credentials=(Credentials.Username_And_Password db_user db_password) use_ssl=(SSL_Mode.Full_Verification ca_cert_file))
|
||||
ca_fail.is_error . should_equal True
|
||||
ca_fail.catch SQL_Error . is_a SQL_Error . should_equal True
|
||||
|
||||
case create_connection_builder of
|
||||
Nothing ->
|
||||
message = "PostgreSQL test database is not configured. See README.md for instructions."
|
||||
message = "Snowflake test connection is not configured. See README.md for instructions."
|
||||
suite_builder.group "[Snowflake] Database tests" pending=message (_-> Nothing)
|
||||
connection_builder ->
|
||||
add_postgres_specs suite_builder connection_builder db_name
|
||||
|
||||
db_name = get_configured_connection_details.database
|
||||
add_snowflake_specs suite_builder connection_builder db_name
|
||||
Transaction_Spec.add_specs suite_builder connection_builder "[Snowflake] "
|
||||
Upload_Spec.add_specs suite_builder connection_builder "[Snowflake] "
|
||||
|
||||
suite_builder.group "[Snowflake] Secrets in connection settings" group_builder->
|
||||
cloud_setup = Cloud_Tests_Setup.prepare
|
||||
base_details = get_configured_connection_details
|
||||
group_builder.specify "should allow to set up a connection with the password passed as a secret" pending=cloud_setup.pending <|
|
||||
cloud_setup.with_prepared_environment <|
|
||||
with_secret "my_postgres_username" db_user username_secret-> with_secret "my_postgres_password" db_password password_secret->
|
||||
my_secret_name = "Enso Test: My Secret App NAME " + (Random.uuid.take 5)
|
||||
with_secret "my_postgres_app_name" my_secret_name app_name_secret-> Test.with_retries <|
|
||||
details = Postgres db_host db_port db_name credentials=(Credentials.Username_And_Password username_secret password_secret)
|
||||
# We set the ApplicationName option, so that we can see that secrets can be used in custom properties.
|
||||
options = Connection_Options.Value [["ApplicationName", app_name_secret]]
|
||||
connection = Database.connect details options
|
||||
connection.should_succeed
|
||||
Panic.with_finalizer connection.close <|
|
||||
connection.tables . should_be_a Table
|
||||
table = connection.read "SELECT application_name FROM pg_stat_activity"
|
||||
application_names = table.at 0 . to_vector
|
||||
application_names.should_contain my_secret_name
|
||||
with_secret "my_postgres_username" base_details.credentials.username username_secret-> with_secret "my_postgres_password" base_details.credentials.password password_secret->
|
||||
secret_credentials = Credentials.Username_And_Password username_secret password_secret
|
||||
details = Snowflake_Details.Snowflake base_details.account_name secret_credentials base_details.database base_details.schema base_details.warehouse
|
||||
connection = Database.connect details
|
||||
connection.should_succeed
|
||||
Panic.with_finalizer connection.close <|
|
||||
connection.tables . should_be_a Table
|
||||
|
||||
with_secret name value callback =
|
||||
secret = Enso_Secret.create name+Random.uuid value
|
||||
@ -637,14 +594,19 @@ with_secret name value callback =
|
||||
Panic.with_finalizer secret.delete (callback secret)
|
||||
|
||||
get_configured_connection_details =
|
||||
db_name = Environment.get "ENSO_DATABASE_TEST_DB_NAME"
|
||||
db_host_port = (Environment.get "ENSO_DATABASE_TEST_HOST").if_nothing "localhost" . split ':'
|
||||
db_host = db_host_port.at 0
|
||||
db_port = if db_host_port.length == 1 then 5432 else Integer.parse (db_host_port.at 1)
|
||||
db_user = Environment.get "ENSO_DATABASE_TEST_DB_USER"
|
||||
db_password = Environment.get "ENSO_DATABASE_TEST_DB_PASSWORD"
|
||||
if db_name.is_nothing then Nothing else
|
||||
Postgres db_host db_port db_name credentials=(Credentials.Username_And_Password db_user db_password)
|
||||
account_name = Environment.get "ENSO_SNOWFLAKE_ACCOUNT"
|
||||
if account_name.is_nothing then Nothing else
|
||||
get_var name =
|
||||
Environment.get name if_missing=(Panic.throw (Illegal_State.Error "ENSO_SNOWFLAKE_ACCOUNT is set, but "+name+" is not. Please set all required environment variables."))
|
||||
user = get_var "ENSO_SNOWFLAKE_USER"
|
||||
password = get_var "ENSO_SNOWFLAKE_PASSWORD"
|
||||
database = get_var "ENSO_SNOWFLAKE_DATABASE"
|
||||
schema = Environment.get "ENSO_SNOWFLAKE_SCHEMA" if_missing="PUBLIC"
|
||||
warehouse = Environment.get "ENSO_SNOWFLAKE_WAREHOUSE" if_missing=""
|
||||
|
||||
resolved_password = if password.starts_with "enso://" then Enso_Secret.get password else password
|
||||
credentials = Credentials.Username_And_Password user resolved_password
|
||||
Snowflake_Details.Snowflake account_name credentials database schema warehouse
|
||||
|
||||
## Returns a function that takes anything and returns a new connection.
|
||||
The function creates a _new_ connection on each invocation
|
@ -138,7 +138,7 @@ type Missing_Values_Data
|
||||
setup create_connection_fn = Missing_Values_Data.Value <|
|
||||
connection = create_connection_fn Nothing
|
||||
t4 = upload connection "T4" <|
|
||||
Table.new [["a", [0, 1, Nothing, 42, Nothing]], ["b", [True, Nothing, True, False, Nothing]], ["c", ["", "foo", "bar", Nothing, Nothing]]]
|
||||
Table.new [["a", [0, 1, Nothing, 42, Nothing]], ["b", [True, Nothing, True, False, Nothing]], ["c", ["", "foo", "bar", Nothing, Nothing]], ["rowid", [1, 2, 3, 4, 5]]]
|
||||
[connection, t4]
|
||||
|
||||
teardown self =
|
||||
@ -203,7 +203,7 @@ add_specs (suite_builder : Suite_Builder) (prefix : Text) (create_connection_fn
|
||||
|
||||
group_builder.specify "should allow to access a Table by an SQL query" <|
|
||||
name = data.t1.name
|
||||
t2 = data.connection.query (SQL_Query.Raw_SQL ('SELECT a, b FROM "' + name + '" WHERE a >= 3'))
|
||||
t2 = data.connection.query (SQL_Query.Raw_SQL ('SELECT "a", "b" FROM "' + name + '" WHERE "a" >= 3'))
|
||||
m2 = t2.read
|
||||
m2.column_names . should_equal ["a", "b"]
|
||||
m2.at "a" . to_vector . should_equal [4]
|
||||
@ -212,7 +212,7 @@ add_specs (suite_builder : Suite_Builder) (prefix : Text) (create_connection_fn
|
||||
|
||||
group_builder.specify "should allow to access a Table by an SQL query" <|
|
||||
name = data.t1.name
|
||||
t2 = data.connection.query (SQL_Query.Raw_SQL ('SELECT a, b FROM "' + name + '" WHERE a >= 3'))
|
||||
t2 = data.connection.query (SQL_Query.Raw_SQL ('SELECT "a", "b" FROM "' + name + '" WHERE "a" >= 3'))
|
||||
m2 = t2.read
|
||||
m2.column_names . should_equal ["a", "b"]
|
||||
m2.at "a" . to_vector . should_equal [4]
|
||||
@ -225,7 +225,7 @@ add_specs (suite_builder : Suite_Builder) (prefix : Text) (create_connection_fn
|
||||
|
||||
group_builder.specify "should use labels for column names" <|
|
||||
name = data.t1.name
|
||||
t2 = data.connection.query (SQL_Query.Raw_SQL ('SELECT a AS c, b FROM "' + name + '" WHERE a >= 3'))
|
||||
t2 = data.connection.query (SQL_Query.Raw_SQL ('SELECT "a" AS "c", "b" FROM "' + name + '" WHERE "a" >= 3'))
|
||||
m2 = t2.read
|
||||
m2.column_names . should_equal ["c", "b"]
|
||||
m2.at "c" . to_vector . should_equal [4]
|
||||
@ -237,7 +237,7 @@ add_specs (suite_builder : Suite_Builder) (prefix : Text) (create_connection_fn
|
||||
t2 = data.connection.query name
|
||||
t2.read . should_equal data.t1.read
|
||||
|
||||
t3 = data.connection.query ('SELECT a, b FROM "' + name + '" WHERE a >= 3')
|
||||
t3 = data.connection.query ('SELECT "a", "b" FROM "' + name + '" WHERE "a" >= 3')
|
||||
m3 = t3.read
|
||||
m3.column_names . should_equal ["a", "b"]
|
||||
m3.at "a" . to_vector . should_equal [4]
|
||||
@ -280,7 +280,7 @@ add_specs (suite_builder : Suite_Builder) (prefix : Text) (create_connection_fn
|
||||
(Table.new [["a", [1, 2, 3]], ["b", [4, 5, 6]]]).select_into_database_table data.connection name temporary=True
|
||||
|
||||
t1 = data.connection.query name
|
||||
m1 = t1.read
|
||||
m1 = t1.read.order_by "a"
|
||||
Problems.assume_no_problems m1
|
||||
m1.at "a" . to_vector . should_equal [1, 2, 3]
|
||||
m1.at "b" . to_vector . should_equal [4, 5, 6]
|
||||
@ -302,7 +302,7 @@ add_specs (suite_builder : Suite_Builder) (prefix : Text) (create_connection_fn
|
||||
(Table.new [["a", [1, 2, 3]], ["b", [4, 5, 6]]]).select_into_database_table data.connection name temporary=True
|
||||
|
||||
t1 = data.connection.query name
|
||||
m1 = t1.read
|
||||
m1 = t1.read.order_by "a"
|
||||
Problems.assume_no_problems m1
|
||||
m1.at "a" . to_vector . should_equal [1, 2, 3]
|
||||
m1.at "b" . to_vector . should_equal [4, 5, 6]
|
||||
@ -346,9 +346,10 @@ add_specs (suite_builder : Suite_Builder) (prefix : Text) (create_connection_fn
|
||||
data.teardown
|
||||
|
||||
group_builder.specify "fill_nothing should replace nulls" <|
|
||||
data.t4.at 'a' . fill_nothing 10 . to_vector . should_equal [0, 1, 10, 42, 10]
|
||||
data.t4.at 'b' . fill_nothing False . to_vector . should_equal [True, False, True, False, False]
|
||||
data.t4.at 'c' . fill_nothing "NA" . to_vector . should_equal ["", "foo", "bar", "NA", "NA"]
|
||||
t4_ordered = data.t4.order_by "rowid"
|
||||
t4_ordered.at "a" . fill_nothing 10 . to_vector . should_equal [0, 1, 10, 42, 10]
|
||||
t4_ordered.at "b" . fill_nothing False . to_vector . should_equal [True, False, True, False, False]
|
||||
t4_ordered.at "c" . fill_nothing "NA" . to_vector . should_equal ["", "foo", "bar", "NA", "NA"]
|
||||
|
||||
group_builder.specify "should correctly be counted" <|
|
||||
data.t4.row_count . should_equal 5
|
||||
@ -366,10 +367,10 @@ add_specs (suite_builder : Suite_Builder) (prefix : Text) (create_connection_fn
|
||||
|
||||
group_builder.specify "should allow sorting by a single column name" <|
|
||||
r_1 = data.df.order_by ([Sort_Column.Name 'quantity'])
|
||||
r_1.at 'id' . to_vector . should_equal [2,4,1,3,5,6]
|
||||
r_1.at 'id' . to_vector . should_have_relative_ordering [[2,4],[1,3],[5,6]]
|
||||
|
||||
r_3 = data.df.order_by ([Sort_Column.Name 'rating' Sort_Direction.Descending])
|
||||
r_3.at 'id' . to_vector . should_equal [3,1,4,5,2,6]
|
||||
r_3.at 'id' . to_vector . should_have_relative_ordering [[3],[1,4],[5],[2,6]]
|
||||
|
||||
group_builder.specify 'should allow sorting by multiple column names' <|
|
||||
r_1 = data.df.order_by ([Sort_Column.Name 'quantity', Sort_Column.Name 'rating'])
|
||||
@ -384,19 +385,20 @@ add_specs (suite_builder : Suite_Builder) (prefix : Text) (create_connection_fn
|
||||
r_1.at 'id' . to_vector . should_equal [4,2,3,1,6,5]
|
||||
|
||||
group_builder.specify 'should correctly reorder all kinds of columns and leave the original columns untouched' <|
|
||||
r = data.t8.order_by ([Sort_Column.Name 'ord'])
|
||||
initial = data.t8.order_by 'ints'
|
||||
r = initial.order_by ([Sort_Column.Name 'ord'])
|
||||
|
||||
r.at 'ints' . to_vector . should_equal [1, 5, 3, 2, 4]
|
||||
data.t8.at 'ints' . to_vector . should_equal data.ints
|
||||
initial.at 'ints' . to_vector . should_equal data.ints
|
||||
|
||||
r.at 'reals' . to_vector . should_equal [1.3, 1.6, 3.2, 4.6, 5.2]
|
||||
data.t8.at 'reals' . to_vector . should_equal data.reals
|
||||
initial.at 'reals' . to_vector . should_equal data.reals
|
||||
|
||||
r.at 'bools' . to_vector . should_equal [False, False, True, False, True]
|
||||
data.t8.at 'bools' . to_vector . should_equal data.bools
|
||||
initial.at 'bools' . to_vector . should_equal data.bools
|
||||
|
||||
r.at 'texts' . to_vector . should_equal ['foo', 'spam', 'bar', 'foo', 'baz']
|
||||
data.t8.at 'texts' . to_vector . should_equal data.texts
|
||||
initial.at 'texts' . to_vector . should_equal data.texts
|
||||
|
||||
group_builder.specify 'should sort columns with specified ordering and missing placement' <|
|
||||
c = data.df.at 'rating'
|
||||
@ -461,4 +463,3 @@ add_specs (suite_builder : Suite_Builder) (prefix : Text) (create_connection_fn
|
||||
|
||||
group_builder.specify "report error when trying to filter by a custom predicate" <|
|
||||
data.t1.filter "a" (x -> x % 2 == 0) . should_fail_with Unsupported_Database_Operation
|
||||
|
||||
|
@ -112,7 +112,7 @@ add_specs suite_builder prefix create_connection_func =
|
||||
src = Table.new [["X", [1, 2, 3]]]
|
||||
t1 = src.select_into_database_table data.connection long_name temporary=True
|
||||
Problems.assume_no_problems t1
|
||||
data.connection.query long_name . at "X" . to_vector . should_equal [1, 2, 3]
|
||||
data.connection.query long_name . at "X" . to_vector . should_contain_the_same_elements_as [1, 2, 3]
|
||||
|
||||
longer_name_with_same_prefix = long_name + ("z" * 10)
|
||||
data.connection.query longer_name_with_same_prefix . should_fail_with Table_Not_Found
|
||||
@ -480,4 +480,3 @@ main filter=Nothing =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder "[SQLite]" (_-> Database.connect (SQLite.In_Memory))
|
||||
suite.run_with_filter filter
|
||||
|
||||
|
@ -94,3 +94,48 @@ normalize_lines string line_separator=Line_Ending_Style.Unix.to_text newline_at_
|
||||
set_writable file writable =
|
||||
path = file.absolute.path
|
||||
FileSystemHelper.setWritable path writable
|
||||
|
||||
## Checks if the given result is a Vector and it has ordering as denoted by an
|
||||
example vector.
|
||||
|
||||
The example should be a vector of vectors. Elements of the inner vectors can
|
||||
be ordered arbitrarily (we treat them as belonging to the same equivalence
|
||||
class), but the ordering between elements from different inner vectors must
|
||||
be preserved.
|
||||
Any.should_have_relative_ordering self (example : Vector) =
|
||||
loc = Meta.get_source_location 1
|
||||
case self of
|
||||
_ : Vector ->
|
||||
example.each v-> case v of
|
||||
_ : Vector ->
|
||||
if v.is_empty then Test.fail "The example vector should not contain empty vectors as they are meaningless (at "+loc+")."
|
||||
_ -> Test.fail "Expected a vector of vectors but got a vector containing "+v.to_display_text+" (at "+loc+")."
|
||||
|
||||
total_length = example.fold 0 acc-> v-> acc+v.length
|
||||
if self.length != total_length then
|
||||
Test.fail "Expected a vector of length "+total_length.to_text+" but got "+self.length.to_text+" (at "+loc+")."
|
||||
|
||||
pieces = Vector.build builder->
|
||||
example.fold 0 offset-> reference_v->
|
||||
new_offset = offset + reference_v.length
|
||||
current_part = self.take (offset.up_to new_offset)
|
||||
builder.append current_part
|
||||
new_offset
|
||||
Runtime.assert (pieces.length == example.length)
|
||||
n = pieces.length
|
||||
0.up_to n . each ix->
|
||||
got_piece = pieces.at ix
|
||||
expected_piece = example.at ix
|
||||
are_consistent = Panic.rethrow <|
|
||||
(got_piece.sort on_incomparable=Problem_Behavior.Report_Error) == (expected_piece.sort on_incomparable=Problem_Behavior.Report_Error)
|
||||
if are_consistent.not then
|
||||
offset = pieces.take ix . fold 0 acc-> p-> acc+p.length
|
||||
Test.fail "Expected the run of vector elements starting at offset "+offset.to_text+" to be a permutation of "+expected_piece.to_display_text+" but got "+got_piece.to_display_text+" (at "+loc+")."
|
||||
|
||||
_ -> Test.fail "Expected a vector but got "+self.to_display_text+" (at "+loc+")."
|
||||
## PRIVATE
|
||||
See `Any.should_have_relative_ordering`.
|
||||
Error.should_have_relative_ordering self example =
|
||||
loc = Meta.get_source_location 1
|
||||
_ = example
|
||||
Test.fail "Expected a vector but got a dataflow error "+self.catch.to_display_text+" (at "+loc+")."
|
||||
|
Loading…
Reference in New Issue
Block a user