mirror of
https://github.com/enso-org/enso.git
synced 2024-11-25 10:43:02 +03:00
Make all Common_Spec tests pass for SQLSerever (#10810)
* More SQL tests
* Remove duplicate
* Fix another test
* Temp fix
* More temporay fixes
* More green
* Another green
* Another one
* Fix counts
* Temporary change
* Fix tests
* Auto-commit work in progress before clean build on 2024-09-06 10:53:46
* Another fix
* Green
* Specialise ensure_query_has_no_holes
* cleanup
* Cleanup
* Cleanup
* Clean
* Code review changes
(cherry picked from commit b8516f7b4a
)
This commit is contained in:
parent
cb7e2d6973
commit
55690c7fb4
@ -1,4 +1,5 @@
|
||||
from Standard.Base import all
|
||||
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
|
||||
|
||||
from Standard.Table import Aggregate_Column, Value_Type
|
||||
|
||||
@ -28,6 +29,7 @@ import Standard.Database.SQL_Type.SQL_Type
|
||||
from Standard.Database.Dialect import Temp_Table_Style
|
||||
from Standard.Database.Dialect_Flags import all
|
||||
from Standard.Database.Errors import SQL_Error, Unsupported_Database_Operation
|
||||
from Standard.Database.Internal.JDBC_Connection import JDBC_Connection
|
||||
|
||||
import project.Database.Redshift.Internal.Redshift_Error_Mapper.Redshift_Error_Mapper
|
||||
|
||||
@ -217,3 +219,8 @@ type Redshift_Dialect
|
||||
needs_literal_table_cast self value_type =
|
||||
_ = value_type
|
||||
False
|
||||
|
||||
## PRIVATE
|
||||
ensure_query_has_no_holes : JDBC_Connection -> Text -> Nothing ! Illegal_Argument
|
||||
ensure_query_has_no_holes self jdbc:JDBC_Connection raw_sql:Text =
|
||||
jdbc.ensure_query_has_no_holes raw_sql
|
||||
|
@ -220,7 +220,7 @@ type Connection
|
||||
table_naming_helper.verify_table_name name <|
|
||||
make_table_for_name self name alias
|
||||
SQL_Query.Raw_SQL raw_sql -> handle_sql_errors <| alias.if_not_error <|
|
||||
self.jdbc_connection.ensure_query_has_no_holes raw_sql . if_not_error <|
|
||||
self.dialect.ensure_query_has_no_holes self.jdbc_connection raw_sql . if_not_error <|
|
||||
columns = self.fetch_columns raw_sql Statement_Setter.null
|
||||
name = if alias == "" then (UUID.randomUUID.to_text) else alias
|
||||
ctx = Context.for_query raw_sql name
|
||||
|
@ -1,4 +1,5 @@
|
||||
from Standard.Base import all
|
||||
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
|
||||
import Standard.Base.Errors.Unimplemented.Unimplemented
|
||||
|
||||
import Standard.Table.Internal.Problem_Builder.Problem_Builder
|
||||
@ -27,6 +28,7 @@ import project.SQL_Statement.SQL_Statement
|
||||
import project.SQL_Type.SQL_Type
|
||||
from project.Dialect_Flags import all
|
||||
from project.Errors import SQL_Error, Unsupported_Database_Operation
|
||||
from project.Internal.JDBC_Connection import JDBC_Connection
|
||||
from project.Internal.Result_Set import result_set_to_table
|
||||
|
||||
## PRIVATE
|
||||
@ -275,6 +277,12 @@ type Dialect
|
||||
_ = [base_table, key_columns, resolved_aggregates, problem_builder]
|
||||
Unimplemented.throw "This is an interface only."
|
||||
|
||||
## PRIVATE
|
||||
ensure_query_has_no_holes : JDBC_Connection -> Text -> Nothing ! Illegal_Argument
|
||||
ensure_query_has_no_holes jdbc:JDBC_Connection raw_sql:Text =
|
||||
_ = [jdbc, raw_sql]
|
||||
Unimplemented.throw "This is an interface only."
|
||||
|
||||
## PRIVATE
|
||||
|
||||
The dialect of SQLite databases.
|
||||
|
@ -41,6 +41,7 @@ from project.Dialect import Temp_Table_Style
|
||||
from Standard.Database.Dialect_Flags import all
|
||||
from project.Errors import SQL_Error, Unsupported_Database_Operation
|
||||
from project.Internal.IR.Operation_Metadata import Date_Period_Metadata
|
||||
from project.Internal.JDBC_Connection import JDBC_Connection
|
||||
|
||||
polyglot java import java.sql.Types
|
||||
|
||||
@ -322,6 +323,11 @@ type Postgres_Dialect
|
||||
_ = value_type
|
||||
False
|
||||
|
||||
## PRIVATE
|
||||
ensure_query_has_no_holes : JDBC_Connection -> Text -> Nothing ! Illegal_Argument
|
||||
ensure_query_has_no_holes self jdbc:JDBC_Connection raw_sql:Text =
|
||||
jdbc.ensure_query_has_no_holes raw_sql
|
||||
|
||||
## PRIVATE
|
||||
make_dialect_operations =
|
||||
cases = [["LOWER", Base_Generator.make_function "LOWER"], ["UPPER", Base_Generator.make_function "UPPER"]]
|
||||
|
@ -37,6 +37,7 @@ import project.SQL_Type.SQL_Type
|
||||
from project.Dialect import Temp_Table_Style
|
||||
from Standard.Database.Dialect_Flags import all
|
||||
from project.Errors import SQL_Error, Unsupported_Database_Operation
|
||||
from project.Internal.JDBC_Connection import JDBC_Connection
|
||||
|
||||
## PRIVATE
|
||||
|
||||
@ -325,6 +326,11 @@ type SQLite_Dialect
|
||||
_ = value_type
|
||||
False
|
||||
|
||||
## PRIVATE
|
||||
ensure_query_has_no_holes : JDBC_Connection -> Text -> Nothing ! Illegal_Argument
|
||||
ensure_query_has_no_holes self jdbc:JDBC_Connection raw_sql:Text =
|
||||
jdbc.ensure_query_has_no_holes raw_sql
|
||||
|
||||
## PRIVATE
|
||||
We need custom handling for First and Last, as SQLite does not support
|
||||
such aggregation functions out of the box, so instead we create a row
|
||||
|
@ -6,7 +6,7 @@ import Standard.Base.Runtime.Context
|
||||
|
||||
import project.SQL_Query.SQL_Query
|
||||
from project.Errors import SQL_Error, Table_Already_Exists
|
||||
from project.Internal.Upload.Operations.Internal_Core import internal_create_table_structure
|
||||
from project.Internal.Upload.Operations.Internal_Core import internal_create_table_structure, resolve_temp_table_name
|
||||
|
||||
## PRIVATE
|
||||
Creates a new database table with the provided structure and returns the name
|
||||
@ -23,15 +23,16 @@ from project.Internal.Upload.Operations.Internal_Core import internal_create_tab
|
||||
intercepting the 'already exists' error.
|
||||
create_table_implementation connection table_name structure primary_key temporary allow_existing on_problems:Problem_Behavior =
|
||||
connection.base_connection.maybe_run_maintenance
|
||||
resolved_table_name = resolve_temp_table_name connection temporary table_name
|
||||
table_naming_helper = connection.base_connection.table_naming_helper
|
||||
on_exists =
|
||||
if allow_existing then connection.query (SQL_Query.Table_Name table_name) else Error.throw (Table_Already_Exists.Error table_name)
|
||||
table_naming_helper.verify_table_name table_name <|
|
||||
case connection.base_connection.table_exists table_name of
|
||||
if allow_existing then connection.query (SQL_Query.Table_Name resolved_table_name) else Error.throw (Table_Already_Exists.Error resolved_table_name)
|
||||
table_naming_helper.verify_table_name resolved_table_name <|
|
||||
case connection.base_connection.table_exists resolved_table_name of
|
||||
True -> on_exists
|
||||
False ->
|
||||
dry_run = Context.Output.is_enabled.not
|
||||
effective_table_name = if dry_run.not then table_name else table_naming_helper.generate_dry_run_table_name table_name
|
||||
effective_table_name = if dry_run.not then table_name else table_naming_helper.generate_dry_run_table_name resolved_table_name
|
||||
effective_temporary = temporary || dry_run
|
||||
created_table_name = Context.Output.with_enabled <|
|
||||
if dry_run then
|
||||
@ -53,5 +54,5 @@ create_table_implementation connection table_name structure primary_key temporar
|
||||
# If the table was just created by someone else
|
||||
case dry_run of
|
||||
# If this was a dry-run, we had a race condition - to ensure correct structure, we re-try the whole operation
|
||||
True -> create_table_implementation connection table_name structure primary_key temporary allow_existing on_problems
|
||||
True -> create_table_implementation connection resolved_table_name structure primary_key temporary allow_existing on_problems
|
||||
False -> on_exists
|
||||
|
@ -9,22 +9,23 @@ import project.Internal.DDL_Transaction
|
||||
from project.Errors import SQL_Error, Table_Already_Exists
|
||||
from project.Internal.Upload.Helpers.Constants import dry_run_row_limit
|
||||
from project.Internal.Upload.Helpers.Error_Helpers import handle_upload_errors
|
||||
from project.Internal.Upload.Operations.Internal_Core import internal_upload_table
|
||||
from project.Internal.Upload.Operations.Internal_Core import internal_upload_table, resolve_temp_table_name
|
||||
|
||||
## PRIVATE
|
||||
select_into_table_implementation source_table connection table_name primary_key temporary on_problems:Problem_Behavior =
|
||||
connection.base_connection.maybe_run_maintenance
|
||||
resolved_table_name = resolve_temp_table_name connection temporary table_name
|
||||
table_naming_helper = connection.base_connection.table_naming_helper
|
||||
table_naming_helper.verify_table_name table_name <|
|
||||
table_naming_helper.verify_table_name resolved_table_name <|
|
||||
Panic.recover SQL_Error <| handle_upload_errors <|
|
||||
real_target_already_exists = connection.base_connection.table_exists table_name
|
||||
if real_target_already_exists then Error.throw (Table_Already_Exists.Error table_name) else
|
||||
real_target_already_exists = connection.base_connection.table_exists resolved_table_name
|
||||
if real_target_already_exists then Error.throw (Table_Already_Exists.Error resolved_table_name) else
|
||||
dry_run = Context.Output.is_enabled.not
|
||||
case dry_run of
|
||||
False ->
|
||||
upload_table_in_transaction source_table connection table_name primary_key temporary=temporary on_problems=on_problems row_limit=Nothing
|
||||
upload_table_in_transaction source_table connection resolved_table_name primary_key temporary=temporary on_problems=on_problems row_limit=Nothing
|
||||
True ->
|
||||
tmp_table_name = table_naming_helper.generate_dry_run_table_name table_name
|
||||
tmp_table_name = table_naming_helper.generate_dry_run_table_name resolved_table_name
|
||||
create_dry_run_table _ =
|
||||
table = Context.Output.with_enabled <|
|
||||
## This temporary table can be safely dropped if it
|
||||
|
@ -41,7 +41,9 @@ import Standard.Database.SQL_Type.SQL_Type
|
||||
from Standard.Database.Dialect import Temp_Table_Style
|
||||
from Standard.Database.Dialect_Flags import all
|
||||
from Standard.Database.Errors import SQL_Error, Unsupported_Database_Operation
|
||||
from Standard.Database.Internal.Base_Generator import lift_binary_op
|
||||
from Standard.Database.Internal.IR.Operation_Metadata import Date_Period_Metadata
|
||||
from Standard.Database.Internal.JDBC_Connection import JDBC_Connection
|
||||
from Standard.Database.Internal.Statement_Setter import fill_hole_default
|
||||
|
||||
import project.Internal.SQLServer_Error_Mapper.SQLServer_Error_Mapper
|
||||
@ -283,6 +285,20 @@ type SQLSever_Dialect
|
||||
Warning.attach (Inexact_Type_Coercion.Warning base_type new_type unavailable=False) new_type
|
||||
_ -> base_type
|
||||
|
||||
## PRIVATE
|
||||
needs_literal_table_cast : Value_Type -> Boolean
|
||||
needs_literal_table_cast self value_type =
|
||||
_ = value_type
|
||||
False
|
||||
|
||||
## PRIVATE
|
||||
ensure_query_has_no_holes : JDBC_Connection -> Text -> Nothing ! Illegal_Argument
|
||||
ensure_query_has_no_holes self jdbc:JDBC_Connection raw_sql:Text =
|
||||
## The jdbc driver doesn't work for asking about holes for SQLServer temp tables
|
||||
We can skip this check and still get a decent error message
|
||||
if raw_sql.contains "#" . not then
|
||||
jdbc.ensure_query_has_no_holes raw_sql
|
||||
|
||||
## PRIVATE
|
||||
make_dialect_operations =
|
||||
cases = [["LOWER", Base_Generator.make_function "LOWER"], ["UPPER", Base_Generator.make_function "UPPER"]]
|
||||
@ -290,27 +306,34 @@ make_dialect_operations =
|
||||
counts = [agg_count_is_null, agg_count_empty, agg_count_not_empty, ["COUNT_DISTINCT", agg_count_distinct], ["COUNT_DISTINCT_INCLUDE_NULL", agg_count_distinct_include_null]]
|
||||
arith_extensions = [is_nan, is_inf, is_finite, floating_point_div, mod_op, decimal_div, decimal_mod, ["ROW_MIN", Base_Generator.make_function "LEAST"], ["ROW_MAX", Base_Generator.make_function "GREATEST"]]
|
||||
bool = [bool_or]
|
||||
eq = lift_binary_op "==" make_equals
|
||||
compare = [eq]
|
||||
|
||||
stddev_pop = ["STDDEV_POP", Base_Generator.make_function "stddev_pop"]
|
||||
stddev_samp = ["STDDEV_SAMP", Base_Generator.make_function "stddev_samp"]
|
||||
stats = [agg_median, agg_mode, agg_percentile, stddev_pop, stddev_samp]
|
||||
date_ops = [make_extract_as_int "year", make_extract_as_int "quarter", make_extract_as_int "month", make_extract_as_int "week", make_extract_as_int "day", make_extract_as_int "hour", make_extract_as_int "minute", make_extract_fractional_as_int "second", make_extract_fractional_as_int "millisecond" modulus=1000, make_extract_fractional_as_int "microsecond" modulus=1000, ["date_add", make_date_add], ["date_diff", make_date_diff], ["date_trunc_to_day", make_date_trunc_to_day]]
|
||||
special_overrides = []
|
||||
special_overrides = [is_null]
|
||||
other = [["RUNTIME_ERROR", make_runtime_error_op]]
|
||||
my_mappings = text + counts + stats + first_last_aggregators + arith_extensions + bool + date_ops + special_overrides + other
|
||||
my_mappings = text + counts + stats + first_last_aggregators + arith_extensions + bool + compare + date_ops + special_overrides + other
|
||||
Base_Generator.base_dialect_operations . extend_with my_mappings
|
||||
|
||||
## PRIVATE
|
||||
is_null = Base_Generator.lift_unary_op "IS_NULL" arg->
|
||||
arg.paren ++ " IS NULL"
|
||||
|
||||
## PRIVATE
|
||||
agg_count_is_null = Base_Generator.lift_unary_op "COUNT_IS_NULL" arg->
|
||||
SQL_Builder.code "COUNT_IF(" ++ arg.paren ++ " IS NULL)"
|
||||
SQL_Builder.code "SUM(CASE WHEN " ++ arg.paren ++ " IS NULL THEN 1 ELSE 0 END)"
|
||||
|
||||
## PRIVATE
|
||||
agg_count_empty = Base_Generator.lift_unary_op "COUNT_EMPTY" arg->
|
||||
SQL_Builder.code "COUNT_IF("++ arg.paren ++ " IS NULL OR " ++ arg.paren ++ " == '')"
|
||||
SQL_Builder.code "SUM(CASE WHEN (" ++ arg.paren ++ " IS NULL) OR (" ++ arg.paren ++ " = '') THEN 1 ELSE 0 END)"
|
||||
|
||||
## PRIVATE
|
||||
agg_count_not_empty = Base_Generator.lift_unary_op "COUNT_NOT_EMPTY" arg->
|
||||
SQL_Builder.code "COUNT_IF(" ++ arg.paren ++ " IS NOT NULL AND " ++ arg.paren ++ " != '')"
|
||||
SQL_Builder.code "SUM(CASE WHEN (" ++ arg.paren ++ " IS NOT NULL) AND (" ++ arg.paren ++ " != '') THEN 1 ELSE 0 END)"
|
||||
|
||||
|
||||
## PRIVATE
|
||||
agg_median = Base_Generator.lift_unary_op "MEDIAN" arg->
|
||||
@ -339,6 +362,14 @@ first_last_aggregators =
|
||||
last_not_null = make_first_aggregator reverse=True ignore_null=True
|
||||
[["FIRST", first], ["FIRST_NOT_NULL", first_not_null], ["LAST", last], ["LAST_NOT_NULL", last_not_null]]
|
||||
|
||||
## PRIVATE
|
||||
make_equals a b =
|
||||
case a.build.prepare.second==[True] of
|
||||
True -> b.paren
|
||||
False -> case b.build.prepare.second==[True] of
|
||||
True -> a.paren
|
||||
False -> a.paren ++ " = " ++ b.paren
|
||||
|
||||
## PRIVATE
|
||||
make_first_aggregator reverse ignore_null args =
|
||||
if args.length < 2 then Error.throw (Illegal_State.Error "Insufficient number of arguments for the operation.") else
|
||||
@ -430,9 +461,7 @@ right = Base_Generator.lift_binary_op "RIGHT" str-> n->
|
||||
|
||||
## PRIVATE
|
||||
make_order_descriptor internal_column sort_direction text_ordering =
|
||||
nulls = case sort_direction of
|
||||
Sort_Direction.Ascending -> Nulls_Order.First
|
||||
Sort_Direction.Descending -> Nulls_Order.Last
|
||||
nulls = Nothing
|
||||
case text_ordering of
|
||||
Nothing ->
|
||||
Order_Descriptor.Value (Internals_Access.column_expression internal_column) sort_direction nulls_order=nulls collation=Nothing
|
||||
|
@ -84,6 +84,9 @@ type SQLServer_Type_Mapping
|
||||
Types.VARCHAR ->
|
||||
effective_size = if sql_type.precision==max_length || (sql_type.precision==9 && sql_type.scale==9) then Nothing else sql_type.precision
|
||||
Value_Type.Char size=effective_size variable_length=True
|
||||
Types.NVARCHAR ->
|
||||
effective_size = if sql_type.precision==max_length || (sql_type.precision==9 && sql_type.scale==9) then Nothing else sql_type.precision
|
||||
Value_Type.Char size=effective_size variable_length=True
|
||||
Types.DATE -> Value_Type.Date
|
||||
Types.TIME -> Value_Type.Time
|
||||
Types.TIMESTAMP -> case sql_type.name of
|
||||
@ -108,8 +111,12 @@ type SQLServer_Type_Mapping
|
||||
the database backend.
|
||||
infer_return_type : (SQL_Expression -> SQL_Type_Reference) -> Text -> Vector -> SQL_Expression -> SQL_Type_Reference
|
||||
infer_return_type infer_from_database_callback op_name arguments expression =
|
||||
_ = [op_name, arguments]
|
||||
infer_from_database_callback expression
|
||||
case operations_dict.contains_key op_name of
|
||||
False -> infer_from_database_callback expression
|
||||
True ->
|
||||
handler = operations_dict.at op_name
|
||||
sql_type = handler arguments
|
||||
SQL_Type_Reference.from_constant sql_type
|
||||
|
||||
## PRIVATE
|
||||
We want to respect any overriding references, but references that rely on
|
||||
@ -142,6 +149,11 @@ type SQLServer_Type_Mapping
|
||||
on_unknown_type sql_type =
|
||||
Value_Type.Unsupported_Data_Type sql_type.name sql_type
|
||||
|
||||
## PRIVATE
|
||||
Maps operation names to functions that infer its result type.
|
||||
operations_dict : Dictionary Text (Vector -> SQL_Type)
|
||||
operations_dict = Dictionary.from_vector [["IS_NULL", const (SQL_Type.Value Types.BIT "BIT")],["==", const (SQL_Type.Value Types.BIT "BIT")]]
|
||||
|
||||
## PRIVATE
|
||||
This is the maximum size that JDBC driver reports for 'unbounded' types in
|
||||
SQLServer.
|
||||
|
@ -9,6 +9,7 @@ from Standard.Table import Table
|
||||
import Standard.Database.Column_Description.Column_Description
|
||||
import Standard.Database.Connection.Connection.Connection
|
||||
import Standard.Database.DB_Table.DB_Table
|
||||
import Standard.Database.Internal.Common.Encoding_Limited_Naming_Properties.Encoding_Limited_Naming_Properties
|
||||
import Standard.Database.Internal.Connection.Entity_Naming_Properties.Entity_Naming_Properties
|
||||
import Standard.Database.Internal.JDBC_Connection
|
||||
import Standard.Database.SQL_Query.SQL_Query
|
||||
@ -31,8 +32,11 @@ type SQLServer_Connection
|
||||
create : Text -> Vector -> (Text -> Text -> SQLServer_Connection) -> SQLServer_Connection
|
||||
create url properties make_new =
|
||||
jdbc_connection = JDBC_Connection.create url properties
|
||||
entity_naming_properties = Entity_Naming_Properties.from_jdbc_connection jdbc_connection is_case_sensitive=False
|
||||
SQLServer_Connection.Value (Connection.new jdbc_connection SQLServer_Dialect.sqlserver entity_naming_properties) make_new
|
||||
jdbc_entity_naming_properties = Entity_Naming_Properties.from_jdbc_connection jdbc_connection is_case_sensitive=True
|
||||
## jdbc reports table name length limit as 128, but it actually seems to be 116 for temp tables so we override it
|
||||
limited = Encoding_Limited_Naming_Properties.Instance Encoding.utf_8 limit=116 is_case_sensitive=True
|
||||
modified_entity_naming_properties = Entity_Naming_Properties.Value for_table_names=limited for_column_names=jdbc_entity_naming_properties.for_column_names for_generated_column_names=jdbc_entity_naming_properties.for_generated_column_names
|
||||
SQLServer_Connection.Value (Connection.new jdbc_connection SQLServer_Dialect.sqlserver modified_entity_naming_properties) make_new
|
||||
|
||||
## PRIVATE
|
||||
|
||||
|
@ -46,6 +46,7 @@ from Standard.Database.Dialect import Temp_Table_Style
|
||||
from Standard.Database.Dialect_Flags import all
|
||||
from Standard.Database.Errors import SQL_Error, Unsupported_Database_Operation
|
||||
from Standard.Database.Internal.IR.Operation_Metadata import Date_Period_Metadata
|
||||
from Standard.Database.Internal.JDBC_Connection import JDBC_Connection
|
||||
from Standard.Database.Internal.Statement_Setter import fill_hole_default
|
||||
|
||||
import project.Internal.Snowflake_Error_Mapper.Snowflake_Error_Mapper
|
||||
@ -305,6 +306,11 @@ type Snowflake_Dialect
|
||||
Value_Type.Date_Time _ -> True
|
||||
_ -> False
|
||||
|
||||
## PRIVATE
|
||||
ensure_query_has_no_holes : JDBC_Connection -> Text -> Nothing ! Illegal_Argument
|
||||
ensure_query_has_no_holes self jdbc:JDBC_Connection raw_sql:Text =
|
||||
jdbc.ensure_query_has_no_holes raw_sql
|
||||
|
||||
## PRIVATE
|
||||
In Snowflake we need to create tables outside of transactions.
|
||||
However, currently we decide to opt-out of the integrity check for
|
||||
|
@ -84,7 +84,7 @@ add_specs suite_builder =
|
||||
message = "SQLServer test database is not configured. See README.md for instructions."
|
||||
suite_builder.group "[SQLServer] Database tests" pending=message (_-> Nothing)
|
||||
connection_builder ->
|
||||
_ = connection_builder
|
||||
add_sqlserver_specs suite_builder connection_builder
|
||||
suite_builder.group "[SQLServer] Info" group_builder->
|
||||
default_connection = Database.connect get_configured_connection_details
|
||||
data = SQLServer_Info_Data.setup default_connection
|
||||
@ -145,6 +145,14 @@ add_specs suite_builder =
|
||||
returned_table.should_equal expected_table
|
||||
data.connection.execute 'DROP TABLE "'+name+'"'
|
||||
|
||||
type Lazy_Ref
|
||||
Value ~get
|
||||
|
||||
add_sqlserver_specs suite_builder create_connection_fn =
|
||||
prefix = "[SQLServer] "
|
||||
default_connection = Lazy_Ref.Value (create_connection_fn Nothing)
|
||||
Common_Spec.add_specs suite_builder prefix create_connection_fn default_connection
|
||||
|
||||
main filter=Nothing =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
|
@ -197,15 +197,6 @@ add_specs (suite_builder : Suite_Builder) (prefix : Text) (create_connection_fn
|
||||
name = data.t1.name
|
||||
tmp = data.connection.query (SQL_Query.Table_Name name)
|
||||
tmp.read . should_equal data.t1.read
|
||||
|
||||
group_builder.specify "should allow to access a Table by an SQL query" <|
|
||||
name = data.t1.name
|
||||
t2 = data.connection.query (SQL_Query.Raw_SQL ('SELECT "a", "b" FROM "' + name + '" WHERE "a" >= 3'))
|
||||
m2 = t2.read
|
||||
m2.column_names . should_equal ["a", "b"]
|
||||
m2.at "a" . to_vector . should_equal [4]
|
||||
m2.at "b" . to_vector . should_equal [5]
|
||||
m2.at "c" . should_fail_with No_Such_Column
|
||||
|
||||
group_builder.specify "should allow to access a Table by an SQL query" <|
|
||||
name = data.t1.name
|
||||
@ -216,7 +207,7 @@ add_specs (suite_builder : Suite_Builder) (prefix : Text) (create_connection_fn
|
||||
m2.at "b" . to_vector . should_equal [5]
|
||||
m2.at "c" . should_fail_with No_Such_Column
|
||||
|
||||
t3 = data.connection.query (SQL_Query.Raw_SQL ('SELECT 1+2'))
|
||||
t3 = data.connection.query (SQL_Query.Raw_SQL ('SELECT 1+2 AS "a"'))
|
||||
m3 = t3.read
|
||||
m3.at 0 . to_vector . should_equal [3]
|
||||
|
||||
@ -244,16 +235,15 @@ add_specs (suite_builder : Suite_Builder) (prefix : Text) (create_connection_fn
|
||||
|
||||
group_builder.specify "will fail if the table is modified and a column gets removed" <|
|
||||
name = Name_Generator.random_name "removing-column"
|
||||
Problems.assume_no_problems <|
|
||||
(Table.new [["a", [1, 2, 3]], ["b", [4, 5, 6]]]).select_into_database_table data.connection name temporary=True
|
||||
|
||||
t1 = data.connection.query name
|
||||
t0 = (Table.new [["a", [1, 2, 3]], ["b", [4, 5, 6]]]).select_into_database_table data.connection name temporary=True
|
||||
t1 = data.connection.query t0.name
|
||||
m1 = t1.read.sort "a"
|
||||
Problems.assume_no_problems m1
|
||||
m1.at "a" . to_vector . should_equal [1, 2, 3]
|
||||
m1.at "b" . to_vector . should_equal [4, 5, 6]
|
||||
|
||||
Problems.assume_no_problems <| data.connection.drop_table name
|
||||
Problems.assume_no_problems <| data.connection.drop_table t0.name
|
||||
Problems.assume_no_problems <|
|
||||
(Table.new [["a", [100, 200]]]).select_into_database_table data.connection name temporary=True
|
||||
|
||||
@ -266,16 +256,16 @@ add_specs (suite_builder : Suite_Builder) (prefix : Text) (create_connection_fn
|
||||
|
||||
group_builder.specify "will not fail if the table is modified and a column gets added" <|
|
||||
name = Name_Generator.random_name "adding-column"
|
||||
Problems.assume_no_problems <|
|
||||
(Table.new [["a", [1, 2, 3]], ["b", [4, 5, 6]]]).select_into_database_table data.connection name temporary=True
|
||||
|
||||
t1 = data.connection.query name
|
||||
t0 = (Table.new [["a", [1, 2, 3]], ["b", [4, 5, 6]]]).select_into_database_table data.connection name temporary=True
|
||||
|
||||
t1 = data.connection.query t0.name
|
||||
m1 = t1.read.sort "a"
|
||||
Problems.assume_no_problems m1
|
||||
m1.at "a" . to_vector . should_equal [1, 2, 3]
|
||||
m1.at "b" . to_vector . should_equal [4, 5, 6]
|
||||
|
||||
Problems.assume_no_problems <| data.connection.drop_table name
|
||||
Problems.assume_no_problems <| data.connection.drop_table t0.name
|
||||
Problems.assume_no_problems <|
|
||||
(Table.new [["a", [100, 200]], ["b", [300, 400]], ["c", [500, 600]]]).select_into_database_table data.connection name temporary=True
|
||||
|
||||
@ -287,7 +277,7 @@ add_specs (suite_builder : Suite_Builder) (prefix : Text) (create_connection_fn
|
||||
|
||||
t1.at "c" . should_fail_with No_Such_Column
|
||||
|
||||
t2 = data.connection.query name
|
||||
t2 = data.connection.query t0.name
|
||||
t2.column_names . should_equal ["a", "b", "c"]
|
||||
|
||||
|
||||
|
@ -4,6 +4,7 @@ from Standard.Table import Table, Join_Kind, Aggregate_Column, Value_Type, expr
|
||||
from Standard.Table.Errors import No_Such_Column, Name_Too_Long, Truncated_Column_Names, Duplicate_Output_Column_Names
|
||||
|
||||
from Standard.Database import all
|
||||
from Standard.Database.Dialect import Temp_Table_Style
|
||||
from Standard.Database.Errors import Table_Not_Found, Unsupported_Database_Operation
|
||||
|
||||
from Standard.Test import all
|
||||
@ -30,7 +31,9 @@ add_specs suite_builder prefix create_connection_func =
|
||||
data.teardown
|
||||
|
||||
entity_naming_properties = data.connection.base_connection.entity_naming_properties
|
||||
max_table_name_length = entity_naming_properties.for_table_names.size_limit
|
||||
max_table_name_length = case data.connection.dialect.temp_table_style of
|
||||
Temp_Table_Style.Temporary_Table -> entity_naming_properties.for_table_names.size_limit
|
||||
Temp_Table_Style.Hash_Prefix -> entity_naming_properties.for_table_names.size_limit - 1
|
||||
max_column_name_length = entity_naming_properties.for_column_names.size_limit
|
||||
has_maximum_table_name_length = max_table_name_length.is_nothing.not
|
||||
has_maximum_column_name_length = max_column_name_length.is_nothing.not
|
||||
@ -50,19 +53,22 @@ add_specs suite_builder prefix create_connection_func =
|
||||
group_builder.specify "should not allow to create a table with a name that is too long" <|
|
||||
name = "a" * (max_table_name_length + 1)
|
||||
src = Table.new [["X", [1, 2, 3]]]
|
||||
temp_table_name = case data.connection.dialect.temp_table_style of
|
||||
Temp_Table_Style.Temporary_Table -> name
|
||||
Temp_Table_Style.Hash_Prefix -> ("#" + name)
|
||||
|
||||
run_with_and_without_output <|
|
||||
r = src.select_into_database_table data.connection name temporary=True
|
||||
r.should_fail_with Name_Too_Long
|
||||
r.catch.entity_kind . should_equal "table"
|
||||
r.catch.name . should_equal name
|
||||
r.catch.name . should_equal temp_table_name
|
||||
r.catch.to_display_text . should_contain "The table name"
|
||||
r.catch.to_display_text . should_contain "is too long"
|
||||
|
||||
r2 = data.connection.create_table name [Column_Description.Value "X" Value_Type.Integer] temporary=True
|
||||
r2 = data.connection.create_table temp_table_name [Column_Description.Value "X" Value_Type.Integer] temporary=True
|
||||
r2.should_fail_with Name_Too_Long
|
||||
|
||||
data.connection.query (SQL_Query.Table_Name name) . should_fail_with Name_Too_Long
|
||||
data.connection.query (SQL_Query.Table_Name temp_table_name) . should_fail_with Name_Too_Long
|
||||
|
||||
group_builder.specify "should ensure length is measured in small units, even if grapheme length is lower" <|
|
||||
big_grapheme = '\u{1F926}\u{1F3FC}\u200D\u2642\uFE0F'
|
||||
@ -86,7 +92,9 @@ add_specs suite_builder prefix create_connection_func =
|
||||
t2 = src.select_into_database_table data.connection name_large temporary=True
|
||||
t2.should_fail_with Name_Too_Long
|
||||
t2.catch.entity_kind . should_equal "table"
|
||||
t2.catch.name . should_equal name_large
|
||||
case data.connection.dialect.temp_table_style of
|
||||
Temp_Table_Style.Temporary_Table -> t2.catch.name . should_equal name_large
|
||||
Temp_Table_Style.Hash_Prefix -> t2.catch.name . should_equal ("#" + name_large)
|
||||
t2.catch.to_display_text.should_contain "too long"
|
||||
|
||||
## This name may be supported or not, depending on how the
|
||||
@ -112,7 +120,8 @@ add_specs suite_builder prefix create_connection_func =
|
||||
src = Table.new [["X", [1, 2, 3]]]
|
||||
t1 = src.select_into_database_table data.connection long_name temporary=True
|
||||
Problems.assume_no_problems t1
|
||||
data.connection.query long_name . at "X" . to_vector . should_equal_ignoring_order [1, 2, 3]
|
||||
|
||||
t1 . at "X" . to_vector . should_equal_ignoring_order [1, 2, 3]
|
||||
|
||||
longer_name_with_same_prefix = long_name + ("z" * 10)
|
||||
data.connection.query longer_name_with_same_prefix . should_fail_with Name_Too_Long
|
||||
@ -413,36 +422,36 @@ add_specs suite_builder prefix create_connection_func =
|
||||
Test.with_clue "join: " <|
|
||||
t2 = db_table.join db_table join_kind=Join_Kind.Left_Outer on=name_a right_prefix="RIGHT_"
|
||||
w = Problems.expect_warning Truncated_Column_Names t2
|
||||
w.original_names . should_equal ["RIGHT_" + name_a, "RIGHT_" + name_b]
|
||||
w.original_names . should_equal_ignoring_order ["RIGHT_" + name_a, "RIGHT_" + name_b]
|
||||
t2.row_count . should_equal 3
|
||||
|
||||
m2 = t2.read . sort name_a
|
||||
m2.column_names . should_equal [name_a, name_b]+w.truncated_names
|
||||
m2.column_names . should_equal_ignoring_order [name_a, name_b]+w.truncated_names
|
||||
m2.at name_a . to_vector . should_equal [1, 2, 3]
|
||||
m2.at name_b . to_vector . should_equal [4, 5, 6]
|
||||
m2.at (w.truncated_names.at 0) . to_vector . should_equal [1, 2, 3]
|
||||
m2.at (w.truncated_names.at 1) . to_vector . should_equal [4, 5, 6]
|
||||
m2.at (w.names_map.at ("RIGHT_" + name_a)) . to_vector . should_equal [1, 2, 3]
|
||||
m2.at (w.names_map.at ("RIGHT_" + name_b)) . to_vector . should_equal [4, 5, 6]
|
||||
|
||||
Test.with_clue "cross_join: " <|
|
||||
t2 = db_table.cross_join db_table right_prefix="RIGHT_"
|
||||
w = Problems.expect_warning Truncated_Column_Names t2
|
||||
w.original_names . should_equal ["RIGHT_" + name_a, "RIGHT_" + name_b]
|
||||
w.original_names . should_equal_ignoring_order ["RIGHT_" + name_a, "RIGHT_" + name_b]
|
||||
t2.row_count . should_equal 9
|
||||
|
||||
m2 = t2.read . sort [name_a, name_b]
|
||||
m2.column_names . should_equal [name_a, name_b]+w.truncated_names
|
||||
m2.column_names . should_equal_ignoring_order [name_a, name_b]+w.truncated_names
|
||||
m2.at name_a . to_vector . distinct . should_equal_ignoring_order [1, 2, 3]
|
||||
m2.at name_b . to_vector . distinct . should_equal_ignoring_order [4, 5, 6]
|
||||
m2.at (w.truncated_names.at 0) . to_vector . distinct . should_equal_ignoring_order [1, 2, 3]
|
||||
m2.at (w.truncated_names.at 1) . to_vector . distinct . should_equal_ignoring_order [4, 5, 6]
|
||||
m2.at (w.names_map.at ("RIGHT_" + name_a)) . to_vector . distinct . should_equal_ignoring_order [1, 2, 3]
|
||||
m2.at (w.names_map.at ("RIGHT_" + name_b)) . to_vector . distinct . should_equal_ignoring_order [4, 5, 6]
|
||||
|
||||
group_builder.specify "should truncate new column names in other operations" <|
|
||||
name_a = "x" * (max_column_name_length - 1) + "A"
|
||||
name_b = "x" * (max_column_name_length - 1) + "B"
|
||||
src_a = Table.new [[name_a, ["1", "2", "3"]]]
|
||||
src_b = Table.new [[name_b, [4, 5, 6]]]
|
||||
db_a = src_a.select_into_database_table data.connection (Name_Generator.random_name "long-column-names-a") temporary=True
|
||||
db_b = src_b.select_into_database_table data.connection (Name_Generator.random_name "long-column-names-b") temporary=True
|
||||
db_a = src_a.select_into_database_table data.connection (Name_Generator.random_name "long-column-names-a") temporary=True primary_key=Nothing
|
||||
db_b = src_b.select_into_database_table data.connection (Name_Generator.random_name "long-column-names-b") temporary=True primary_key=Nothing
|
||||
|
||||
Test.with_clue "zip test will have to be amended once it is implemented: " <|
|
||||
db_a.zip db_b . should_fail_with Unsupported_Database_Operation
|
||||
|
Loading…
Reference in New Issue
Block a user