mirror of
https://github.com/enso-org/enso.git
synced 2024-11-30 05:35:09 +03:00
Implement add_row_number
for Database backends, fix primary key inference for SQLite (#7174)
Closes #6921 and also closes #7037
This commit is contained in:
parent
3c93c25a5a
commit
4ccf3566ce
@ -502,6 +502,7 @@
|
||||
- [Implemented `Table.update_database_table`.][7035]
|
||||
- [Removed `module` argument from `enso_project` and other minor tweaks.][7052]
|
||||
- [Integrated Database write operations with Execution Contexts.][7072]
|
||||
- [Implemented `add_row_number` for Database tables.][7174]
|
||||
|
||||
[debug-shortcuts]:
|
||||
https://github.com/enso-org/enso/blob/develop/app/gui/docs/product/shortcuts.md#debug
|
||||
@ -726,6 +727,7 @@
|
||||
[7035]: https://github.com/enso-org/enso/pull/7035
|
||||
[7052]: https://github.com/enso-org/enso/pull/7052
|
||||
[7072]: https://github.com/enso-org/enso/pull/7072
|
||||
[7174]: https://github.com/enso-org/enso/pull/7174
|
||||
|
||||
#### Enso Compiler
|
||||
|
||||
|
@ -27,7 +27,7 @@ import Standard.Database.Internal.Postgres.Postgres_Type_Mapping.Postgres_Type_M
|
||||
import Standard.Database.Internal.SQL_Type_Mapping.SQL_Type_Mapping
|
||||
import Standard.Database.Internal.SQL_Type_Reference.SQL_Type_Reference
|
||||
import Standard.Database.Internal.Statement_Setter.Statement_Setter
|
||||
from Standard.Database.Errors import Unsupported_Database_Operation
|
||||
from Standard.Database.Errors import SQL_Error, Unsupported_Database_Operation
|
||||
|
||||
import project.Database.Redshift.Internal.Redshift_Error_Mapper.Redshift_Error_Mapper
|
||||
|
||||
@ -167,3 +167,11 @@ type Redshift_Dialect
|
||||
## PRIVATE
|
||||
get_error_mapper : Error_Mapper
|
||||
get_error_mapper self = Redshift_Error_Mapper
|
||||
|
||||
## PRIVATE
|
||||
The dialect-dependent strategy to get the Primary Key for a given table.
|
||||
|
||||
Returns `Nothing` if the key is not defined.
|
||||
fetch_primary_key : Connection -> Text -> Vector Text ! Nothing
|
||||
fetch_primary_key self connection table_name =
|
||||
Dialect.default_fetch_primary_key connection table_name
|
||||
|
@ -1,7 +1,7 @@
|
||||
from Standard.Base import all
|
||||
import Standard.Base.Errors.Unimplemented.Unimplemented
|
||||
|
||||
from Standard.Table import Aggregate_Column, Join_Kind, Value_Type
|
||||
from Standard.Table import Aggregate_Column, Join_Kind, Value_Type, Column_Selector
|
||||
import Standard.Table.Internal.Naming_Helpers.Naming_Helpers
|
||||
import Standard.Table.Internal.Problem_Builder.Problem_Builder
|
||||
|
||||
@ -23,7 +23,8 @@ import project.Internal.SQLite.SQLite_Dialect
|
||||
import project.Internal.SQL_Type_Mapping.SQL_Type_Mapping
|
||||
import project.Internal.SQL_Type_Reference.SQL_Type_Reference
|
||||
import project.Internal.Statement_Setter.Statement_Setter
|
||||
from project.Errors import Unsupported_Database_Operation
|
||||
from project.Errors import SQL_Error, Unsupported_Database_Operation
|
||||
from project.Internal.Result_Set import result_set_to_table
|
||||
|
||||
## PRIVATE
|
||||
|
||||
@ -213,6 +214,15 @@ type Dialect
|
||||
default_table_types self =
|
||||
Unimplemented.throw "This is an interface only."
|
||||
|
||||
## PRIVATE
|
||||
The dialect-dependent strategy to get the Primary Key for a given table.
|
||||
|
||||
Returns `Nothing` if the key is not defined.
|
||||
fetch_primary_key : Connection -> Text -> Vector Text ! Nothing
|
||||
fetch_primary_key self connection table_name =
|
||||
_ = [connection, table_name]
|
||||
Unimplemented.throw "This is an interface only."
|
||||
|
||||
## PRIVATE
|
||||
|
||||
The dialect of SQLite databases.
|
||||
@ -229,3 +239,14 @@ postgres = Postgres_Dialect.postgres
|
||||
default_fetch_types_query dialect expression context =
|
||||
empty_context = context.add_where_filters [SQL_Expression.Literal "FALSE"]
|
||||
dialect.generate_sql (Query.Select [["typed_column", expression]] empty_context)
|
||||
|
||||
## PRIVATE
|
||||
Default implementation relying on DatabaseMetaData.
|
||||
default_fetch_primary_key connection table_name =
|
||||
connection.jdbc_connection.with_metadata metadata->
|
||||
rs = metadata.getPrimaryKeys Nothing Nothing table_name
|
||||
keys_table = result_set_to_table rs connection.dialect.make_column_fetcher_for_type
|
||||
# The names of the columns are sometimes lowercase and sometimes uppercase, so we do a case insensitive select first.
|
||||
selected = keys_table.select_columns [Column_Selector.By_Name "COLUMN_NAME", Column_Selector.By_Name "KEY_SEQ"] reorder=True
|
||||
key_column_names = selected.order_by 1 . at 0 . to_vector
|
||||
if key_column_names.is_empty then Nothing else key_column_names
|
||||
|
@ -21,10 +21,11 @@ import Standard.Table.Data.Report_Unmatched.Report_Unmatched
|
||||
import Standard.Table.Data.Row.Row
|
||||
import Standard.Table.Data.Table.Table as Materialized_Table
|
||||
import Standard.Table.Data.Type.Value_Type_Helpers
|
||||
import Standard.Table.Internal.Add_Row_Number
|
||||
import Standard.Table.Internal.Aggregate_Column_Helper
|
||||
import Standard.Table.Internal.Problem_Builder.Problem_Builder
|
||||
import Standard.Table.Internal.Table_Helpers
|
||||
import Standard.Table.Internal.Table_Helpers.Table_Column_Helper
|
||||
import Standard.Table.Internal.Problem_Builder.Problem_Builder
|
||||
import Standard.Table.Internal.Unique_Name_Strategy.Unique_Name_Strategy
|
||||
import Standard.Table.Internal.Widget_Helpers
|
||||
from Standard.Table.Data.Column import get_item_string, normalize_string_for_display
|
||||
@ -36,15 +37,17 @@ import project.Data.Column.Column
|
||||
import project.Data.SQL_Query.SQL_Query
|
||||
import project.Data.SQL_Statement.SQL_Statement
|
||||
import project.Data.SQL_Type.SQL_Type
|
||||
import project.Internal.Helpers
|
||||
import project.Internal.Aggregate_Helper
|
||||
import project.Internal.Base_Generator
|
||||
import project.Internal.Common.Database_Join_Helper
|
||||
import project.Internal.Helpers
|
||||
import project.Internal.IR.Context.Context
|
||||
import project.Internal.IR.SQL_Expression.SQL_Expression
|
||||
import project.Internal.IR.From_Spec.From_Spec
|
||||
import project.Internal.IR.Internal_Column.Internal_Column
|
||||
import project.Internal.IR.SQL_Join_Kind.SQL_Join_Kind
|
||||
import project.Internal.IR.Order_Descriptor.Order_Descriptor
|
||||
import project.Internal.IR.Query.Query
|
||||
import project.Internal.IR.SQL_Expression.SQL_Expression
|
||||
import project.Internal.IR.SQL_Join_Kind.SQL_Join_Kind
|
||||
import project.Internal.SQL_Type_Reference.SQL_Type_Reference
|
||||
|
||||
from project.Errors import Unsupported_Database_Operation, Integrity_Error, Unsupported_Name, Table_Not_Found
|
||||
@ -559,10 +562,43 @@ type Table
|
||||
@group_by Widget_Helpers.make_column_name_vector_selector
|
||||
@order_by Widget_Helpers.make_order_by_selector
|
||||
add_row_number : Text -> Integer -> Integer -> Vector (Text | Integer | Column_Selector) | Text | Integer -> Vector (Text | Sort_Column) | Text -> Problem_Behavior -> Table
|
||||
add_row_number self name="Row" from=1 step=1 group_by=[] order_by=[] on_problems=Problem_Behavior.Report_Warning =
|
||||
_ = [name, from, step, group_by, order_by, on_problems]
|
||||
msg = "`Table.add_row_number` is not yet implemented in the Database backend."
|
||||
Error.throw (Unsupported_Database_Operation.Error msg)
|
||||
add_row_number self (name:Text = "Row") (from:Integer = 1) (step:Integer = 1) group_by=[] order_by=[] on_problems=Problem_Behavior.Report_Warning =
|
||||
problem_builder = Problem_Builder.new error_on_missing_columns=True
|
||||
grouping_columns = self.columns_helper.select_columns_helper group_by True problem_builder
|
||||
grouping_columns.each internal_column->
|
||||
column = self.make_column internal_column
|
||||
if column.value_type.is_floating_point then
|
||||
problem_builder.report_other_warning (Floating_Point_Equality.Error column.name)
|
||||
ordering = Table_Helpers.resolve_order_by self.columns order_by problem_builder
|
||||
problem_builder.attach_problems_before on_problems <|
|
||||
order_descriptors = case ordering.is_empty of
|
||||
False -> ordering.map element->
|
||||
column = element.column
|
||||
associated_selector = element.associated_selector
|
||||
self.connection.dialect.prepare_order_descriptor column associated_selector.direction text_ordering=Nothing
|
||||
True -> case self.default_ordering of
|
||||
Nothing -> Error.throw (Illegal_Argument.Error "No `order_by` is specified and the table has no existing ordering (e.g. from an `order_by` operation or a primary key). Some ordering is required for `add_row_number` in Database tables.")
|
||||
descriptors -> descriptors
|
||||
grouping_expressions = grouping_columns.map .expression
|
||||
|
||||
separator = SQL_Expression.Literal Base_Generator.row_number_parameter_separator
|
||||
# The SQL row_number() counts from 1, so we adjust the offset.
|
||||
offset = from - step
|
||||
params = [SQL_Expression.Constant offset, SQL_Expression.Constant step] + order_descriptors + [separator] + grouping_expressions
|
||||
new_expr = SQL_Expression.Operation "ROW_NUMBER" params
|
||||
|
||||
type_mapping = self.connection.dialect.get_type_mapping
|
||||
infer_from_database_callback expression =
|
||||
SQL_Type_Reference.new self.connection self.context expression
|
||||
new_type_ref = type_mapping.infer_return_type infer_from_database_callback "ROW_NUMBER" [] new_expr
|
||||
|
||||
new_column = Internal_Column.Value name new_type_ref new_expr
|
||||
|
||||
rebuild_table columns =
|
||||
self.updated_columns (columns.map .as_internal)
|
||||
renamed_table = Add_Row_Number.rename_columns_if_needed self name on_problems rebuild_table
|
||||
renamed_table.updated_columns (renamed_table.internal_columns + [new_column])
|
||||
|
||||
|
||||
## UNSTABLE
|
||||
|
||||
@ -825,7 +861,7 @@ type Table
|
||||
table.order_by [(Sort_Column.Select_By_Name "a.*" use_regex=True case_sensitivity=Case_Sensitivity.Insensitive)]
|
||||
@columns Widget_Helpers.make_order_by_selector
|
||||
order_by : Vector (Text | Sort_Column) | Text -> Text_Ordering -> Boolean -> Problem_Behavior -> Table ! Incomparable_Values | No_Input_Columns_Selected | Missing_Input_Columns
|
||||
order_by self (columns = ([(Sort_Column.Name (self.columns.at 0 . name))])) text_ordering=Text_Ordering.Default error_on_missing_columns=True on_problems=Problem_Behavior.Report_Warning = Panic.handle_wrapped_dataflow_error <|
|
||||
order_by self (columns = ([(Sort_Column.Name (self.columns.at 0 . name))])) text_ordering=Text_Ordering.Default error_on_missing_columns=True on_problems=Problem_Behavior.Report_Warning =
|
||||
problem_builder = Problem_Builder.new error_on_missing_columns=error_on_missing_columns types_to_always_throw=[No_Input_Columns_Selected]
|
||||
columns_for_ordering = Table_Helpers.prepare_order_by self.columns columns problem_builder
|
||||
problem_builder.attach_problems_before on_problems <|
|
||||
@ -833,14 +869,43 @@ type Table
|
||||
column = selected_column.column
|
||||
associated_selector = selected_column.associated_selector
|
||||
effective_text_ordering = if column.value_type.is_text then text_ordering else Nothing
|
||||
## FIXME [RW] this is only needed because `Vector.map` does not
|
||||
propagate dataflow errors correctly. See:
|
||||
https://www.pivotaltracker.com/story/show/181057718
|
||||
Panic.throw_wrapped_if_error <|
|
||||
self.connection.dialect.prepare_order_descriptor column associated_selector.direction effective_text_ordering
|
||||
self.connection.dialect.prepare_order_descriptor column associated_selector.direction effective_text_ordering
|
||||
new_ctx = self.context.add_orders new_order_descriptors
|
||||
self.updated_context new_ctx
|
||||
|
||||
## PRIVATE
|
||||
Returns the default ordering used for operations like `add_row_number` or
|
||||
`take`.
|
||||
|
||||
If the table was recently ordered by operations like `order_by`, that
|
||||
will determine the ordering. Otherwise, the primary key is used if
|
||||
available.
|
||||
default_ordering : Vector Order_Descriptor | Nothing
|
||||
default_ordering self =
|
||||
explicit_ordering = self.context.orders
|
||||
if explicit_ordering.not_empty then explicit_ordering else
|
||||
case self.get_primary_key of
|
||||
Nothing -> Nothing
|
||||
primary_key_column_names : Vector -> case self.context.from_spec of
|
||||
From_Spec.Table _ alias _ ->
|
||||
primary_key_column_names.map column_name->
|
||||
column_expression = SQL_Expression.Column alias column_name
|
||||
Order_Descriptor.Value column_expression Sort_Direction.Ascending
|
||||
_ -> Nothing
|
||||
|
||||
## PRIVATE
|
||||
Returns the primary key defined for the table, if applicable.
|
||||
get_primary_key : Vector Text | Nothing
|
||||
get_primary_key self = case self.context.from_spec of
|
||||
From_Spec.Table table_name _ _ ->
|
||||
# The primary key may not be valid anymore after grouping!
|
||||
is_primary_key_still_valid = self.context.groups.is_empty
|
||||
if is_primary_key_still_valid.not then Nothing else
|
||||
result = self.connection.dialect.fetch_primary_key self.connection table_name
|
||||
result.catch Any _->Nothing
|
||||
# If the key is a result of a join, union or a subquery then it has no notion of primary key.
|
||||
_ -> Nothing
|
||||
|
||||
## Returns the distinct set of rows within the specified columns from the
|
||||
input table.
|
||||
|
||||
|
@ -187,7 +187,8 @@ base_dialect =
|
||||
nulls = [["IS_NULL", make_right_unary_op "IS NULL"], ["FILL_NULL", make_function "COALESCE"]]
|
||||
contains = [["IS_IN", make_is_in], ["IS_IN_COLUMN", make_is_in_column]]
|
||||
types = [simple_cast]
|
||||
base_map = Map.from_vector (arith + logic + compare + functions + agg + counts + text + nulls + contains + types)
|
||||
windows = [["ROW_NUMBER", make_row_number]]
|
||||
base_map = Map.from_vector (arith + logic + compare + functions + agg + counts + text + nulls + contains + types + windows)
|
||||
Internal_Dialect.Value base_map wrap_in_quotes
|
||||
|
||||
## PRIVATE
|
||||
@ -265,6 +266,39 @@ make_is_in_column arguments = case arguments.length of
|
||||
Builder.code "CASE WHEN " ++ expr ++ " IS NULL THEN " ++ has_nulls ++ " ELSE " ++ is_in ++ " END"
|
||||
_ -> Error.throw <| Illegal_State.Error ("The operation IS_IN_COLUMN requires at exactly 3 arguments: the expression, the IN subquery, the subquery checking for nulls.")
|
||||
|
||||
## PRIVATE
|
||||
make_row_number : Vector Builder -> Builder
|
||||
make_row_number arguments = if arguments.length < 4 then Error.throw (Illegal_State.Error "Wrong amount of parameters in ROW_NUMBER IR. This is a bug in the Database library.") else
|
||||
offset = arguments.at 0
|
||||
step = arguments.at 1
|
||||
|
||||
separator_ix = arguments.index_of code->
|
||||
code.build.prepare.first == row_number_parameter_separator
|
||||
ordering = arguments.take (Range.new 2 separator_ix)
|
||||
grouping = arguments.drop (separator_ix+1)
|
||||
|
||||
group_part = if grouping.length == 0 then "" else
|
||||
Builder.code "PARTITION BY " ++ Builder.join ", " grouping
|
||||
Builder.code "(row_number() OVER (" ++ group_part ++ " ORDER BY " ++ Builder.join ", " ordering ++ ") * " ++ step.paren ++ " + " ++ offset.paren ++ ")"
|
||||
|
||||
## PRIVATE
|
||||
This is a terrible hack, but I could not figure a decent way to have an
|
||||
operation take a variable number of arguments of multiple kinds (here both
|
||||
groups and orders are varying).
|
||||
|
||||
Currently, the IR just allows to put a list of parameters for the operation
|
||||
and they are all converted into SQL code before being passed to the
|
||||
particular operation builder. So at this stage there is no way to distinguish
|
||||
the arguments.
|
||||
|
||||
So to distinguish different groups of arguments, we use this 'fake' parameter
|
||||
to act as a separator. This parameter is not supposed to end up in the
|
||||
generated SQL code.
|
||||
|
||||
This is yet another argument for the IR redesign.
|
||||
row_number_parameter_separator =
|
||||
"--<!PARAMETER_SEPARATOR!>--"
|
||||
|
||||
## PRIVATE
|
||||
|
||||
Builds code for an expression.
|
||||
|
@ -36,7 +36,7 @@ import project.Internal.Postgres.Postgres_Error_Mapper.Postgres_Error_Mapper
|
||||
import project.Internal.SQL_Type_Mapping.SQL_Type_Mapping
|
||||
import project.Internal.SQL_Type_Reference.SQL_Type_Reference
|
||||
import project.Internal.Statement_Setter.Statement_Setter
|
||||
from project.Errors import Unsupported_Database_Operation
|
||||
from project.Errors import SQL_Error, Unsupported_Database_Operation
|
||||
|
||||
## PRIVATE
|
||||
|
||||
@ -219,6 +219,14 @@ type Postgres_Dialect
|
||||
get_error_mapper : Error_Mapper
|
||||
get_error_mapper self = Postgres_Error_Mapper
|
||||
|
||||
## PRIVATE
|
||||
The dialect-dependent strategy to get the Primary Key for a given table.
|
||||
|
||||
Returns `Nothing` if the key is not defined.
|
||||
fetch_primary_key : Connection -> Text -> Vector Text ! Nothing
|
||||
fetch_primary_key self connection table_name =
|
||||
Dialect.default_fetch_primary_key connection table_name
|
||||
|
||||
## PRIVATE
|
||||
make_internal_generator_dialect =
|
||||
cases = [["LOWER", Base_Generator.make_function "LOWER"], ["UPPER", Base_Generator.make_function "UPPER"]]
|
||||
|
@ -33,7 +33,7 @@ import project.Internal.SQL_Type_Reference.SQL_Type_Reference
|
||||
import project.Internal.SQLite.SQLite_Type_Mapping.SQLite_Type_Mapping
|
||||
import project.Internal.SQLite.SQLite_Error_Mapper.SQLite_Error_Mapper
|
||||
import project.Internal.Statement_Setter.Statement_Setter
|
||||
from project.Errors import Unsupported_Database_Operation
|
||||
from project.Errors import SQL_Error, Unsupported_Database_Operation
|
||||
|
||||
## PRIVATE
|
||||
|
||||
@ -239,6 +239,24 @@ type SQLite_Dialect
|
||||
get_error_mapper : Error_Mapper
|
||||
get_error_mapper self = SQLite_Error_Mapper
|
||||
|
||||
## PRIVATE
|
||||
The dialect-dependent strategy to get the Primary Key for a given table.
|
||||
|
||||
Returns `Nothing` if the key is not defined.
|
||||
|
||||
Custom handling is required, because the default DatabaseMetaData
|
||||
implementation does not correctly handle temporary tables.
|
||||
fetch_primary_key : Connection -> Text -> Vector Text ! Nothing
|
||||
fetch_primary_key self connection table_name =
|
||||
wrapped_name = self.internal_generator_dialect.wrap_identifier table_name
|
||||
query = Builder.code "pragma table_info(" ++ wrapped_name ++ ")"
|
||||
info_table = connection.read_statement query.build
|
||||
## The `pk` field is non-zero if the columns is part of the primary key.
|
||||
The column value indicates the position in the key.
|
||||
See: https://www.sqlite.org/pragma.html#pragma_table_info
|
||||
v = info_table.filter "pk" (>0) . order_by "pk" . at "name" . to_vector
|
||||
if v.is_empty then Nothing else v
|
||||
|
||||
## PRIVATE
|
||||
make_internal_generator_dialect =
|
||||
text = [starts_with, contains, ends_with, make_case_sensitive]+concat_ops+trim_ops
|
||||
|
@ -182,7 +182,7 @@ operations_map =
|
||||
always_boolean_ops = ["==", "!=", "equals_ignore_case", ">=", "<=", "<", ">", "BETWEEN", "AND", "OR", "NOT", "IS_NULL", "IS_EMPTY", "LIKE", "IS_IN", "IS_IN_COLUMN", "starts_with", "ends_with", "contains", "BOOL_OR", "IS_INF"]
|
||||
always_floating_ops = ["/", "mod", "AVG", "STDDEV_POP", "STDDEV_SAMP", "ROUND"]
|
||||
always_text_ops = ["ADD_TEXT", "CONCAT", "CONCAT_QUOTE_IF_NEEDED", "MAKE_CASE_SENSITIVE", "FOLD_CASE", "TRIM", "LTRIM", "RTRIM"]
|
||||
always_integer_ops = ["COUNT", "COUNT_IS_NULL", "COUNT_DISTINCT", "COUNT_DISTINCT_INCLUDE_NULL", "COUNT_EMPTY", "COUNT_NOT_EMPTY", "COUNT_ROWS"]
|
||||
always_integer_ops = ["COUNT", "COUNT_IS_NULL", "COUNT_DISTINCT", "COUNT_DISTINCT_INCLUDE_NULL", "COUNT_EMPTY", "COUNT_NOT_EMPTY", "COUNT_ROWS", "ROW_NUMBER"]
|
||||
same_as_first = ["TRUNCATE", "CEIL", "FLOOR"]
|
||||
arithmetic_ops = ["ADD_NUMBER", "-", "*", "^", "%", "SUM"]
|
||||
merge_input_types_ops = ["ROW_MAX", "ROW_MIN", "MAX", "MIN", "FILL_NULL", "COALESCE"]
|
||||
|
@ -442,35 +442,7 @@ check_update_arguments_structure_match source_table target_table key_columns upd
|
||||
## PRIVATE
|
||||
default_key_columns (table : Database_Table | In_Memory_Table) =
|
||||
check_target_table_for_update table <|
|
||||
keys = get_primary_key table
|
||||
keys.catch Any _->
|
||||
Error.throw (Illegal_Argument.Error "Could not determine the primary key for table "+table.name+". Please provide it explicitly.")
|
||||
|
||||
## PRIVATE
|
||||
|
||||
This method may not work correctly with temporary tables, possibly resulting
|
||||
in `SQL_Error` as such tables may not be found.
|
||||
|
||||
! Temporary Tables in SQLite
|
||||
|
||||
The temporary tables in SQLite live in a `temp` database. There is a bug in
|
||||
how JDBC retrieves primary keys - it only queries the `sqlite_schema` table
|
||||
which contains schemas of only permanent tables.
|
||||
|
||||
Ideally, we should provide a custom implementation for SQLite that will
|
||||
UNION both `sqlite_schema` and `temp.sqlite_schema` tables to get results
|
||||
for both temporary and permanent tables.
|
||||
|
||||
TODO [RW] fix keys for SQLite temporary tables #7037
|
||||
get_primary_key table =
|
||||
connection = table.connection
|
||||
connection.jdbc_connection.with_metadata metadata->
|
||||
rs = metadata.getPrimaryKeys Nothing Nothing table.name
|
||||
keys_table = result_set_to_table rs connection.dialect.make_column_fetcher_for_type
|
||||
# The names of the columns are sometimes lowercase and sometimes uppercase, so we do a case insensitive select first.
|
||||
selected = keys_table.select_columns [Column_Selector.By_Name "COLUMN_NAME", Column_Selector.By_Name "KEY_SEQ"] reorder=True
|
||||
key_column_names = selected.order_by 1 . at 0 . to_vector
|
||||
if key_column_names.is_empty then Nothing else key_column_names
|
||||
table.get_primary_key
|
||||
|
||||
## PRIVATE
|
||||
dry_run_row_limit = 1000
|
||||
|
@ -1202,7 +1202,7 @@ type Table
|
||||
@group_by Widget_Helpers.make_column_name_vector_selector
|
||||
@order_by Widget_Helpers.make_order_by_selector
|
||||
add_row_number : Text -> Integer -> Integer -> Vector (Text | Integer | Column_Selector) | Text | Integer -> Vector (Text | Sort_Column) | Text -> Problem_Behavior -> Table
|
||||
add_row_number self name="Row" from=1 step=1 group_by=[] order_by=[] on_problems=Problem_Behavior.Report_Warning =
|
||||
add_row_number self (name:Text = "Row") (from:Integer = 1) (step:Integer = 1) group_by=[] order_by=[] on_problems=Problem_Behavior.Report_Warning =
|
||||
Add_Row_Number.add_row_number self name from step group_by order_by on_problems
|
||||
|
||||
## ALIAS Add Column, Update Column, New Column
|
||||
|
@ -33,18 +33,24 @@ add_row_number table name from step group_by order_by on_problems =
|
||||
False -> make_grouped_enumeration name grouping_columns from step
|
||||
False -> make_grouped_ordered_enumeration name grouping_columns ordering from step
|
||||
|
||||
column_names = table.column_names
|
||||
renamed_table = if column_names.contains name . not then table else
|
||||
problems = [Duplicate_Output_Column_Names.Error [name]]
|
||||
on_problems.attach_problems_before problems <|
|
||||
unique_name_strategy = Unique_Name_Strategy.new
|
||||
unique_name_strategy.mark_used column_names
|
||||
new_name = unique_name_strategy.make_unique name
|
||||
new_columns = table.columns.map column->
|
||||
if column.name == name then column.rename new_name else column
|
||||
Table.new new_columns
|
||||
renamed_table = rename_columns_if_needed table name on_problems Table.new
|
||||
renamed_table.set new_column name set_mode=Set_Mode.Add
|
||||
|
||||
## PRIVATE
|
||||
If the table already contains a column called `name` it will be renamed to a
|
||||
unique name, so that a new column with this name can be added.
|
||||
rename_columns_if_needed table name on_problems build_table_from_columns =
|
||||
column_names = table.column_names
|
||||
if column_names.contains name . not then table else
|
||||
problems = [Duplicate_Output_Column_Names.Error [name]]
|
||||
on_problems.attach_problems_before problems <|
|
||||
unique_name_strategy = Unique_Name_Strategy.new
|
||||
unique_name_strategy.mark_used column_names
|
||||
new_name = unique_name_strategy.make_unique name
|
||||
new_columns = table.columns.map column->
|
||||
if column.name == name then column.rename new_name else column
|
||||
build_table_from_columns new_columns
|
||||
|
||||
## PRIVATE
|
||||
nth_index start step n =
|
||||
start + n*step
|
||||
|
@ -6,27 +6,24 @@ from Standard.Table.Errors import Missing_Input_Columns, Duplicate_Output_Column
|
||||
from Standard.Test import Test, Problems
|
||||
import Standard.Test.Extensions
|
||||
|
||||
from project.Common_Table_Operations.Util import run_default_backend
|
||||
import Standard.Database.Extensions.Upload_Database_Table
|
||||
import Standard.Database.Extensions.Upload_In_Memory_Table
|
||||
|
||||
polyglot java import java.lang.Long as Java_Long
|
||||
import project.Database.Helpers.Name_Generator
|
||||
from project.Common_Table_Operations.Util import run_default_backend
|
||||
|
||||
main = run_default_backend spec
|
||||
|
||||
spec setup =
|
||||
prefix = setup.prefix
|
||||
table_builder = setup.table_builder
|
||||
Test.group prefix+"Table.add_row_number" <|
|
||||
Test.specify "should add a row numbering column" <|
|
||||
t = table_builder [["X", ['a', 'b', 'a', 'a', 'c']]]
|
||||
t1 = t.add_row_number
|
||||
rows = t1.rows.to_vector . map .to_vector
|
||||
rows . should_equal [['a', 1], ['b', 2], ['a', 3], ['a', 4], ['c', 5]]
|
||||
t1.at "Row" . to_vector . should_equal [1, 2, 3, 4, 5]
|
||||
t1.at "Row" . value_type . is_integer . should_be_true
|
||||
materialize = setup.materialize
|
||||
|
||||
Test.group prefix+"Table.add_row_number (common)" <|
|
||||
Test.specify "should rename existing column upon a name clash" <|
|
||||
t1 = table_builder [["X", ['a', 'b']], ["Y", ['c', 'd']], ["Z", [40, 20]]]
|
||||
t2 = t1.add_row_number name="Y"
|
||||
t2 = t1.add_row_number name="Y" order_by=["X"] |> materialize |> _.order_by "X"
|
||||
|
||||
t2.column_names . should_equal ["X", "Y 1", "Z", "Y"]
|
||||
t2.at "X" . to_vector . should_equal ['a', 'b']
|
||||
t2.at "Y 1" . to_vector . should_equal ['c', 'd']
|
||||
@ -37,6 +34,75 @@ spec setup =
|
||||
r3 = t1.add_row_number name="X" on_problems=Problem_Behavior.Report_Error
|
||||
r3.should_fail_with Duplicate_Output_Column_Names
|
||||
|
||||
Test.specify "should allow to order the row numbers by some columns" <|
|
||||
t2 = table_builder [["X", ["a", "b", "a", "a"]], ["Y", [1, 2, 3, 4]]]
|
||||
t3 = t2.add_row_number order_by=["X", (Sort_Column.Name "Y" Sort_Direction.Descending)] |> materialize |> _.order_by "Y"
|
||||
t3.at "Y" . to_vector . should_equal [1, 2, 3, 4]
|
||||
t3.at "Row" . to_vector . should_equal [3, 4, 2, 1]
|
||||
|
||||
Test.specify "should allow mixing grouping with ordering and custom start and step" <|
|
||||
vx = ['a', 'b', 'a', 'a', 'a', 'b', 'c', 'c']
|
||||
vy = [9, 8, 7, 6, 5, 4, 100, 200]
|
||||
vr = [1, 2, 3, 4, 5, 6, 7, 8]
|
||||
t = table_builder [["X", vx], ["Y", vy], ["row_id", vr]]
|
||||
# The row id is added to enforce a clear ordering in Database
|
||||
t1 = t.add_row_number group_by=["X"] order_by=["Y"] from=100 step=100 |> materialize |> _.order_by "row_id"
|
||||
|
||||
t1.at "X" . to_vector . should_equal vx
|
||||
t1.at "Y" . to_vector . should_equal vy
|
||||
t1.at "Row" . to_vector . should_equal [400, 200, 300, 200, 100, 100, 100, 200]
|
||||
|
||||
Test.specify "should report floating point equality warning when grouping on float columns" <|
|
||||
t = table_builder [["X", [1.0, 1.5, 1.0, 2.5, 2.5]], ["row_id", [1, 2, 3, 4, 5]]]
|
||||
t1 = t.add_row_number group_by=["X"] order_by=["row_id"] |> materialize |> _.order_by "row_id"
|
||||
Problems.expect_warning Floating_Point_Equality t1
|
||||
t1.at "Row" . to_vector . should_equal [1, 1, 2, 1, 2]
|
||||
|
||||
r2 = t.add_row_number group_by=["X"] order_by=["row_id"] on_problems=Problem_Behavior.Report_Error
|
||||
r2.should_fail_with Floating_Point_Equality
|
||||
|
||||
t3 = t.add_row_number order_by=["X"] |> materialize |> _.order_by "row_id"
|
||||
Problems.assume_no_problems t3
|
||||
t3.at "Row" . to_vector . should_equal [1, 3, 2, 4, 5]
|
||||
|
||||
if setup.is_database.not then
|
||||
t4 = table_builder [["X", [1, "A", 1, 24.0, 24.0, 24.0, 24]], ["row_id", [1, 2, 3, 4, 5, 6, 7]]]
|
||||
t5 = t4.add_row_number group_by=["X"] order_by=["row_id"] |> materialize |> _.order_by "row_id"
|
||||
Problems.expect_warning Floating_Point_Equality t5
|
||||
t5.at "Row" . to_vector . should_equal [1, 1, 2, 1, 2, 3, 4]
|
||||
|
||||
Test.specify "should fail if columns provided in ordering/grouping do not exist" <|
|
||||
t = table_builder [["X", [20, 30, 10]]]
|
||||
r1 = t.add_row_number group_by=["X", "Y", "Z"] order_by=["X"]
|
||||
r1.should_fail_with Missing_Input_Columns
|
||||
r1.catch.criteria . should_equal ["Y", "Z"]
|
||||
|
||||
r2 = t.add_row_number order_by=["Z", "X", "Y"]
|
||||
r2.should_fail_with Missing_Input_Columns
|
||||
r2.catch.criteria . should_equal ["Z", "Y"]
|
||||
|
||||
r3 = t.add_row_number group_by=[44] order_by=["X"]
|
||||
r3.should_fail_with Missing_Input_Columns
|
||||
|
||||
Test.specify "will respect the row order of order_by" <|
|
||||
t = table_builder [["X", [1, 2, 3, 4]], ["Y", [40, 30, 20, 10]]]
|
||||
t1 = t.order_by "Y"
|
||||
|
||||
t2 = t1.add_row_number
|
||||
|
||||
t2.at "X" . to_vector . should_equal [4, 3, 2, 1]
|
||||
t2.at "Y" . to_vector . should_equal [10, 20, 30, 40]
|
||||
t2.at "Row" . to_vector . should_equal [1, 2, 3, 4]
|
||||
|
||||
if setup.is_database.not then Test.group prefix+"Table.add_row_number (in-memory specific)" <|
|
||||
Test.specify "should add a row numbering column" <|
|
||||
t = table_builder [["X", ['a', 'b', 'a', 'a', 'c']]]
|
||||
t1 = t.add_row_number
|
||||
rows = t1.rows.to_vector . map .to_vector
|
||||
rows . should_equal [['a', 1], ['b', 2], ['a', 3], ['a', 4], ['c', 5]]
|
||||
t1.at "Row" . to_vector . should_equal [1, 2, 3, 4, 5]
|
||||
t1.at "Row" . value_type . is_integer . should_be_true
|
||||
|
||||
Test.specify "should allow customizing the starting index and step" <|
|
||||
t = table_builder [["X", ['a', 'b', 'a']]]
|
||||
t1 = t.add_row_number from=10
|
||||
@ -71,6 +137,19 @@ spec setup =
|
||||
t1.at "X" . to_vector . should_equal v
|
||||
t1.at "Row" . to_vector . should_equal [1, 1, 2, 2, 3, 4, 1, 3]
|
||||
|
||||
Test.specify "should allow mixing grouping with ordering and custom start and step, preserving the original row layout" <|
|
||||
vx = ['a', 'b', 'a', 'a', 'a', 'b', 'c', 'c']
|
||||
vy = [9, 8, 7, 6, 5, 4, 100, 200]
|
||||
vr = [1, 2, 3, 4, 5, 6, 7, 8]
|
||||
t = table_builder [["X", vx], ["Y", vy], ["row_id", vr]]
|
||||
t1 = t.add_row_number group_by=["X"] order_by=["Y"] from=100 step=100
|
||||
|
||||
# No reordering occurred
|
||||
t1.at "row_id" . to_vector . should_equal vr
|
||||
t1.at "X" . to_vector . should_equal vx
|
||||
t1.at "Y" . to_vector . should_equal vy
|
||||
t1.at "Row" . to_vector . should_equal [400, 200, 300, 200, 100, 100, 100, 200]
|
||||
|
||||
Test.specify "should allow to order the row numbers by some columns, keeping the row ordering intact" <|
|
||||
v = [9, 8, 7, 6, 5, 4, 100, 200]
|
||||
t = table_builder [["X", v]]
|
||||
@ -83,45 +162,12 @@ spec setup =
|
||||
t3 = t2.add_row_number order_by=["X", (Sort_Column.Name "Y" Sort_Direction.Descending)]
|
||||
t3.at "Row" . to_vector . should_equal [3, 4, 2, 1]
|
||||
|
||||
Test.specify "should allow mixing grouping with ordering and custom start and step" <|
|
||||
vx = ['a', 'b', 'a', 'a', 'a', 'b', 'c', 'c']
|
||||
vy = [9, 8, 7, 6, 5, 4, 100, 200]
|
||||
t = table_builder [["X", vx], ["Y", vy]]
|
||||
t1 = t.add_row_number group_by=["X"] order_by=["Y"] from=100 step=100
|
||||
if setup.is_database then Test.group prefix+"Table.add_row_number (Database specific)" <|
|
||||
Test.specify "will use the primary key by default" <|
|
||||
src = table_builder [["X", [500, 400, 30, 1, 2]], ["Y", [10, 20, 30, 40, 50]]]
|
||||
db_table = src.select_into_database_table setup.connection (Name_Generator.random_name "add-row-number-test-1") temporary=True primary_key=["X"]
|
||||
|
||||
# No reordering
|
||||
t1.at "X" . to_vector . should_equal vx
|
||||
t1.at "Y" . to_vector . should_equal vy
|
||||
|
||||
t1.at "Row" . to_vector . should_equal [400, 200, 300, 200, 100, 100, 100, 200]
|
||||
|
||||
Test.specify "should report floating point equality warning when grouping on float columns" <|
|
||||
t = table_builder [["X", [1.0, 1.5, 1.0, 2.5, 2.5]]]
|
||||
t1 = t.add_row_number group_by=["X"]
|
||||
Problems.expect_warning Floating_Point_Equality t1
|
||||
t1.at "Row" . to_vector . should_equal [1, 1, 2, 1, 2]
|
||||
|
||||
r2 = t.add_row_number group_by=["X"] on_problems=Problem_Behavior.Report_Error
|
||||
r2.should_fail_with Floating_Point_Equality
|
||||
|
||||
t3 = t.add_row_number order_by=["X"]
|
||||
Problems.assume_no_problems t3
|
||||
t3.at "Row" . to_vector . should_equal [1, 3, 2, 4, 5]
|
||||
|
||||
t4 = table_builder [["X", [1, "A", 1, 24.0, 24.0, 24.0, 24]]]
|
||||
t5 = t4.add_row_number group_by=["X"]
|
||||
Problems.expect_warning Floating_Point_Equality t5
|
||||
t5.at "Row" . to_vector . should_equal [1, 1, 2, 1, 2, 3, 4]
|
||||
|
||||
Test.specify "should fail if columns provided in ordering/grouping do not exist" <|
|
||||
t = table_builder [["X", [20, 30, 10]]]
|
||||
r1 = t.add_row_number group_by=["X", "Y", "Z"]
|
||||
r1.should_fail_with Missing_Input_Columns
|
||||
r1.catch.criteria . should_equal ["Y", "Z"]
|
||||
|
||||
r2 = t.add_row_number order_by=["Z", "X", "Y"]
|
||||
r2.should_fail_with Missing_Input_Columns
|
||||
r2.catch.criteria . should_equal ["Z", "Y"]
|
||||
|
||||
r3 = t.add_row_number group_by=[44]
|
||||
r3.should_fail_with Missing_Input_Columns
|
||||
t2 = db_table.add_row_number |> materialize |> _.order_by ["Y"]
|
||||
t2.at "Y" . to_vector . should_equal [10, 20, 30, 40, 50]
|
||||
t2.at "X" . to_vector . should_equal [500, 400, 30, 1, 2]
|
||||
t2.at "Row" . to_vector . should_equal [5, 4, 3, 1, 2]
|
||||
|
@ -1,5 +1,6 @@
|
||||
from Standard.Base import all
|
||||
|
||||
import project.Common_Table_Operations.Add_Row_Number_Spec
|
||||
import project.Common_Table_Operations.Aggregate_Spec
|
||||
import project.Common_Table_Operations.Column_Operations_Spec
|
||||
import project.Common_Table_Operations.Core_Spec
|
||||
@ -45,7 +46,9 @@ type Test_Setup
|
||||
- aggregate_test_selection: A selection of which aggregate test suites
|
||||
should be run. Can be used to skip checks for backends which do not
|
||||
support particular features.
|
||||
Config prefix table empty_table table_builder materialize is_database test_selection aggregate_test_selection
|
||||
- connection: A related database connection or Nothing for in-memory
|
||||
tests.
|
||||
Config prefix table empty_table table_builder materialize is_database test_selection aggregate_test_selection connection
|
||||
|
||||
## Specifies if the given Table backend supports custom Enso types.
|
||||
|
||||
@ -111,6 +114,7 @@ spec setup =
|
||||
Distinct_Spec.spec setup
|
||||
Cross_Tab_Spec.spec setup
|
||||
Transpose_Spec.spec setup
|
||||
Add_Row_Number_Spec.spec setup
|
||||
Integration_Tests.spec setup
|
||||
|
||||
main = run_default_backend spec
|
||||
|
@ -11,6 +11,8 @@ from Standard.Database.Errors import all
|
||||
from Standard.Test import Test, Test_Suite, Problems
|
||||
import Standard.Test.Extensions
|
||||
|
||||
import project.Database.Common.Default_Ordering_Spec
|
||||
|
||||
import project.Util
|
||||
import project.Database.Helpers.Name_Generator
|
||||
|
||||
@ -272,5 +274,7 @@ run_tests prefix connection upload =
|
||||
Test.specify "report error when trying to filter by a custom predicate" <|
|
||||
t1.filter "a" (x -> x % 2 == 0) . should_fail_with Unsupported_Database_Operation
|
||||
|
||||
Default_Ordering_Spec.spec prefix connection
|
||||
|
||||
main = Test_Suite.run_main <|
|
||||
spec "[SQLite] " (Database.connect (SQLite In_Memory))
|
@ -0,0 +1,54 @@
|
||||
from Standard.Base import all
|
||||
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
|
||||
|
||||
from Standard.Table import Table, Sort_Column, Aggregate_Column
|
||||
from Standard.Table.Errors import all
|
||||
|
||||
from Standard.Database import all
|
||||
from Standard.Database.Errors import all
|
||||
|
||||
from Standard.Test import Test, Test_Suite, Problems
|
||||
import Standard.Test.Extensions
|
||||
|
||||
import project.Util
|
||||
import project.Database.Helpers.Name_Generator
|
||||
|
||||
spec prefix connection =
|
||||
Test.group prefix+"Table.default_ordering" <|
|
||||
src_table = Table.new [["X", [1, 2, 3]], ["Y", [30, 20, 10]]]
|
||||
db_table_without_key = src_table.select_into_database_table connection (Name_Generator.random_name "default-ordering-1") temporary=True primary_key=Nothing
|
||||
db_table_with_key = src_table.select_into_database_table connection (Name_Generator.random_name "default-ordering-1") temporary=True primary_key=["X"]
|
||||
|
||||
Test.specify "will return Nothing if no primary key is defined" <|
|
||||
db_table_without_key.default_ordering . should_equal Nothing
|
||||
|
||||
Test.specify "will return the key for a table with a primary key" <|
|
||||
v1 = db_table_with_key.default_ordering
|
||||
v1.length . should_equal 1
|
||||
v1.first.expression.name . should_equal "X"
|
||||
v1.first.direction . should_equal Sort_Direction.Ascending
|
||||
|
||||
t2 = db_table_with_key.set "10 - [X]" "X"
|
||||
v2 = t2.default_ordering
|
||||
v2.length . should_equal 1
|
||||
v2.first.expression.name . should_equal "X"
|
||||
|
||||
Test.specify "will return Nothing for composite tables (join, aggregate)"
|
||||
db_table_with_key.join db_table_with_key . default_ordering . should_equal Nothing
|
||||
db_table_with_key.aggregate [Aggregate_Column.Group_By "X"] . default_ordering . should_equal Nothing
|
||||
|
||||
Test.specify "will return the ordering determined by order_by" <|
|
||||
v1 = db_table_with_key.order_by ["Y", Sort_Column.Name "X" Sort_Direction.Descending] . default_ordering
|
||||
v1.length . should_equal 2
|
||||
v1.first.expression.name . should_equal "Y"
|
||||
v1.first.direction . should_equal Sort_Direction.Ascending
|
||||
v1.second.expression.name . should_equal "X"
|
||||
v1.second.direction . should_equal Sort_Direction.Descending
|
||||
|
||||
v2 = db_table_without_key.order_by ["Y"] . default_ordering
|
||||
v2.length . should_equal 1
|
||||
v2.first.expression.name . should_equal "Y"
|
||||
v2.first.direction . should_equal Sort_Direction.Ascending
|
||||
|
||||
main = Test_Suite.run_main <|
|
||||
spec "[SQLite] " (Database.connect (SQLite In_Memory))
|
@ -15,7 +15,7 @@ from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
import Standard.Test.Test_Environment
|
||||
|
||||
import project.Database.Common_Spec
|
||||
import project.Database.Common.Common_Spec
|
||||
import project.Database.Transaction_Spec
|
||||
import project.Database.Upload_Spec
|
||||
import project.Database.Helpers.Name_Generator
|
||||
@ -284,7 +284,7 @@ run_tests connection db_name =
|
||||
empty_agg_table = (agg_in_memory_table.take (First 0)).select_into_database_table connection (Name_Generator.random_name "Agg_Empty") primary_key=Nothing temporary=True
|
||||
tables.append empty_agg_table.name
|
||||
|
||||
setup = Common_Table_Operations.Main.Test_Setup.Config prefix agg_table empty_agg_table table_builder materialize is_database=True test_selection=common_selection aggregate_test_selection=aggregate_selection
|
||||
setup = Common_Table_Operations.Main.Test_Setup.Config prefix agg_table empty_agg_table table_builder materialize is_database=True test_selection=common_selection aggregate_test_selection=aggregate_selection connection=connection
|
||||
postgres_specific_spec connection db_name setup
|
||||
Common_Table_Operations.Main.spec setup
|
||||
|
||||
|
@ -11,7 +11,7 @@ from Standard.AWS import Redshift_Details, AWS_Credential
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
|
||||
import project.Database.Common_Spec
|
||||
import project.Database.Common.Common_Spec
|
||||
import project.Database.Helpers.Name_Generator
|
||||
import project.Common_Table_Operations
|
||||
|
||||
@ -62,7 +62,7 @@ run_tests connection =
|
||||
empty_agg_table = (agg_in_memory_table.take (First 0)).select_into_database_table connection (Name_Generator.random_name "Agg_Empty") primary_key=Nothing temporary=True
|
||||
tables.append empty_agg_table.name
|
||||
|
||||
setup = Common_Table_Operations.Main.Test_Setup.Config prefix agg_table empty_agg_table table_builder materialize is_database=True test_selection=common_selection aggregate_test_selection=aggregate_selection
|
||||
setup = Common_Table_Operations.Main.Test_Setup.Config prefix agg_table empty_agg_table table_builder materialize is_database=True test_selection=common_selection aggregate_test_selection=aggregate_selection connection=connection
|
||||
Common_Table_Operations.Main.spec setup
|
||||
|
||||
connect_via_json_config =
|
||||
|
@ -11,7 +11,7 @@ from Standard.Database.Errors import SQL_Error
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
|
||||
import project.Database.Common_Spec
|
||||
import project.Database.Common.Common_Spec
|
||||
import project.Database.Transaction_Spec
|
||||
import project.Database.Upload_Spec
|
||||
import project.Database.Types.SQLite_Type_Mapping_Spec
|
||||
@ -218,7 +218,7 @@ sqlite_spec connection prefix =
|
||||
agg_table = agg_in_memory_table.select_into_database_table connection (Name_Generator.random_name "Agg1") primary_key=Nothing temporary=True
|
||||
empty_agg_table = (agg_in_memory_table.take (First 0)).select_into_database_table connection (Name_Generator.random_name "Agg_Empty") primary_key=Nothing temporary=True
|
||||
|
||||
setup = Common_Table_Operations.Main.Test_Setup.Config prefix agg_table empty_agg_table table_builder materialize is_database=True test_selection=common_selection aggregate_test_selection=aggregate_selection
|
||||
setup = Common_Table_Operations.Main.Test_Setup.Config prefix agg_table empty_agg_table table_builder materialize is_database=True test_selection=common_selection aggregate_test_selection=aggregate_selection connection=connection
|
||||
sqlite_specific_spec prefix connection setup
|
||||
Common_Table_Operations.Main.spec setup
|
||||
|
||||
|
@ -12,7 +12,7 @@ from Standard.Table.Errors import all
|
||||
|
||||
from Standard.Database import all
|
||||
from Standard.Database.Errors import all
|
||||
from Standard.Database.Internal.Upload_Table import get_primary_key, default_key_columns
|
||||
from Standard.Database.Internal.Upload_Table import default_key_columns
|
||||
import Standard.Database.Data.Column_Constraint.Column_Constraint
|
||||
|
||||
from Standard.Test import Test, Test_Suite, Problems
|
||||
@ -154,7 +154,7 @@ spec make_new_connection prefix persistent_connector=True =
|
||||
name = Name_Generator.random_name "primary_key 1"
|
||||
db_table = connection.create_table table_name=name structure=[Column_Description.Value "X" Value_Type.Integer, Column_Description.Value "Y" Value_Type.Char, Column_Description.Value "Z" Value_Type.Integer, Column_Description.Value "W" Value_Type.Float] primary_key=["Y", "Z"] temporary=False
|
||||
Panic.with_finalizer (connection.drop_table db_table.name) <|
|
||||
get_primary_key db_table . should_equal ["Y", "Z"]
|
||||
db_table.get_primary_key . should_equal ["Y", "Z"]
|
||||
|
||||
Test.specify "should ensure that primary key columns specified are valid" <|
|
||||
run_with_and_without_output <|
|
||||
@ -232,17 +232,17 @@ spec make_new_connection prefix persistent_connector=True =
|
||||
db_table_1 = t1.select_into_database_table connection (Name_Generator.random_name "primary-key-1") primary_key=["Y", "X"]
|
||||
Panic.with_finalizer (connection.drop_table db_table_1.name) <|
|
||||
db_table_1.at "X" . to_vector . should_equal [1, 2, 3]
|
||||
get_primary_key db_table_1 . should_equal ["Y", "X"]
|
||||
db_table_1.get_primary_key . should_equal ["Y", "X"]
|
||||
|
||||
db_table_2 = t1.select_into_database_table connection (Name_Generator.random_name "primary-key-2")
|
||||
Panic.with_finalizer (connection.drop_table db_table_2.name) <|
|
||||
db_table_2.at "X" . to_vector . should_equal [1, 2, 3]
|
||||
get_primary_key db_table_2 . should_equal ["X"]
|
||||
db_table_2.get_primary_key . should_equal ["X"]
|
||||
|
||||
db_table_3 = t1.select_into_database_table connection (Name_Generator.random_name "primary-key-3") primary_key=Nothing
|
||||
Panic.with_finalizer (connection.drop_table db_table_3.name) <|
|
||||
db_table_3.at "X" . to_vector . should_equal [1, 2, 3]
|
||||
get_primary_key db_table_3 . should_equal Nothing
|
||||
db_table_3.get_primary_key . should_equal Nothing
|
||||
|
||||
Test.specify "should ensure that primary key columns are valid" <|
|
||||
run_with_and_without_output <|
|
||||
@ -365,7 +365,7 @@ spec make_new_connection prefix persistent_connector=True =
|
||||
db_table = t.select_into_database_table connection (Name_Generator.random_name "source-table") temporary=True
|
||||
db_table_2 = db_table.select_into_database_table connection (Name_Generator.random_name "copied-table") primary_key=["X"]
|
||||
Panic.with_finalizer (connection.drop_table db_table_2.name) <|
|
||||
get_primary_key db_table_2 . should_equal ["X"]
|
||||
db_table_2.get_primary_key . should_equal ["X"]
|
||||
|
||||
Test.specify "should ensure that primary key columns are valid" <|
|
||||
t = Table.new [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]]
|
||||
@ -438,8 +438,6 @@ spec make_new_connection prefix persistent_connector=True =
|
||||
db_table.column_names . should_equal ["X"]
|
||||
db_table.at "X" . to_vector . should_equal []
|
||||
|
||||
# TODO this pending status should be removed as part of #7037
|
||||
sqlite_temporary_primary_key_pending = if prefix.contains "SQLite" then "Fix get_primary_key on SQLite temporary tables. See #7037"
|
||||
test_table_append source_table_builder target_table_builder =
|
||||
Test.specify "should be able to append new rows to a table" <|
|
||||
dest = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] primary_key=["X"]
|
||||
@ -464,7 +462,7 @@ spec make_new_connection prefix persistent_connector=True =
|
||||
r1 = src.update_database_table dest update_action=Update_Action.Insert key_columns=["X"]
|
||||
r1.should_fail_with Rows_Already_Present
|
||||
|
||||
Test.specify "should use the target table primary key for the key by default" pending=sqlite_temporary_primary_key_pending <|
|
||||
Test.specify "should use the target table primary key for the key by default" <|
|
||||
dest1 = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']], ["Z", [4, 5, 6]]] primary_key=["Y", "Z"]
|
||||
default_key_columns dest1 . should_equal ["Y", "Z"]
|
||||
|
||||
@ -475,11 +473,6 @@ spec make_new_connection prefix persistent_connector=True =
|
||||
rows = r1.rows.to_vector.map .to_vector
|
||||
rows.should_contain_the_same_elements_as [[1, 'a'], [4, 'b'], [3, 'c'], [5, 'e']]
|
||||
|
||||
if sqlite_temporary_primary_key_pending.is_nothing.not then
|
||||
Test.specify "should report a meaningful error when the primary key could not be determined" <|
|
||||
dest = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] primary_key=["Y"]
|
||||
default_key_columns dest . should_fail_with Illegal_Argument
|
||||
|
||||
Test.specify "should be able to Update existing rows in a table" <|
|
||||
dest = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]]
|
||||
src = source_table_builder [["X", [2]], ["Y", ['ZZZ']]]
|
||||
@ -597,6 +590,11 @@ spec make_new_connection prefix persistent_connector=True =
|
||||
r2.column_names . should_equal ["X"]
|
||||
r2.at "X" . to_vector . should_contain_the_same_elements_as expected
|
||||
|
||||
default_key_columns dest . should_equal Nothing
|
||||
r3 = src.update_database_table dest update_action=Update_Action.Insert
|
||||
r3.column_names . should_equal ["X"]
|
||||
r3.at "X" . to_vector . should_contain_the_same_elements_as expected
|
||||
|
||||
Test.specify "should fail if no key is specified in other modes" <|
|
||||
dest = target_table_builder [["X", [1, 10, 100]]]
|
||||
src = source_table_builder [["X", [1, 2, 3]]]
|
||||
@ -604,12 +602,12 @@ spec make_new_connection prefix persistent_connector=True =
|
||||
run_with_and_without_output <|
|
||||
r1 = src.update_database_table dest update_action=Update_Action.Update key_columns=[]
|
||||
r1.should_fail_with Illegal_Argument
|
||||
r1.catch.to_display_text.should_contain "`key_columns` must be specified"
|
||||
|
||||
if sqlite_temporary_primary_key_pending.is_nothing then
|
||||
# The default will also fail because no primary key is detected in the DB.
|
||||
default_key_columns dest . should_equal Nothing
|
||||
r2 = src.update_database_table dest update_action=Update_Action.Update
|
||||
r2.should_fail_with Illegal_Argument
|
||||
# The default will also fail because no primary key is detected in the DB.
|
||||
default_key_columns dest . should_equal Nothing
|
||||
r2 = src.update_database_table dest update_action=Update_Action.Update
|
||||
r2.should_fail_with Illegal_Argument
|
||||
|
||||
r3 = src.update_database_table dest update_action=Update_Action.Update_Or_Insert key_columns=[]
|
||||
r3.should_fail_with Illegal_Argument
|
||||
@ -627,10 +625,9 @@ spec make_new_connection prefix persistent_connector=True =
|
||||
|
||||
# Only checks 1000 rows in dry run mode.
|
||||
run_with_and_without_output <|
|
||||
if sqlite_temporary_primary_key_pending.is_nothing then
|
||||
# Relying on the default key based on primary key.
|
||||
r1 = src.update_database_table d1 update_action=Update_Action.Insert
|
||||
r1.should_fail_with Non_Unique_Primary_Key
|
||||
# Relying on the default key based on primary key.
|
||||
r1 = src.update_database_table d1 update_action=Update_Action.Insert
|
||||
r1.should_fail_with Non_Unique_Primary_Key
|
||||
|
||||
r2 = src.update_database_table d2 key_columns=["X"] update_action=Update_Action.Insert
|
||||
r2.should_fail_with Non_Unique_Primary_Key
|
||||
@ -816,8 +813,20 @@ spec make_new_connection prefix persistent_connector=True =
|
||||
res = src.update_database_table dest update_action=Update_Action.Insert key_columns=[]
|
||||
res.should_fail_with SQL_Error
|
||||
|
||||
tables_after = connection.base_connection.get_tables_advanced types=Nothing include_hidden=True . at "Name" . to_vector
|
||||
tables_after . should_contain_the_same_elements_as existing_tables
|
||||
tables_immediately_after = connection.base_connection.get_tables_advanced types=Nothing include_hidden=True . at "Name" . to_vector
|
||||
|
||||
# If no new tables are left out - we just finish.
|
||||
if tables_immediately_after.sort == existing_tables.sort then Nothing else
|
||||
## If there are some additional tables, we add some timeout to
|
||||
allow the database to do the cleaning up.
|
||||
additional_tables = Set.from_vector tables_immediately_after - Set.from_vector existing_tables
|
||||
if additional_tables.is_empty then
|
||||
Test.fail "The Database contains less tables after the test than before! That is unexpected, please inspect manually."
|
||||
additional_table = additional_tables.to_vector.first
|
||||
|
||||
wait_until_temporary_table_is_deleted_after_closing_connection connection additional_table
|
||||
tables_after_wait = connection.base_connection.get_tables_advanced types=Nothing include_hidden=True . at "Name" . to_vector
|
||||
tables_after_wait . should_contain_the_same_elements_as existing_tables
|
||||
|
||||
database_table_builder name_prefix args primary_key=[] connection=connection =
|
||||
in_memory_table = Table.new args
|
||||
|
@ -14,7 +14,7 @@ run_common_spec spec =
|
||||
empty_table = table.take 0
|
||||
materialize = x->x
|
||||
|
||||
setup = Common_Table_Operations.Main.Test_Setup.Config "[In-Memory] " table empty_table Table.new materialize is_database=False test_selection=selection aggregate_test_selection=aggregate_selection
|
||||
setup = Common_Table_Operations.Main.Test_Setup.Config "[In-Memory] " table empty_table Table.new materialize is_database=False test_selection=selection aggregate_test_selection=aggregate_selection connection=Nothing
|
||||
spec setup
|
||||
|
||||
spec =
|
||||
|
Loading…
Reference in New Issue
Block a user