Implement Table.union for Database backend (#6204)

Closes #5235
This commit is contained in:
Radosław Waśko 2023-04-06 10:40:34 +02:00 committed by GitHub
parent df4491de50
commit 83b10a2088
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
31 changed files with 758 additions and 125 deletions

View File

@ -375,6 +375,7 @@
- [Added support for Date/Time columns in the Postgres backend and added
`year`/`month`/`day` operations to Table columns.][6153]
- [`Text.split` can now take a vector of delimiters.][6156]
- [Implemented `Table.union` for the Database backend.][6204]
[debug-shortcuts]:
https://github.com/enso-org/enso/blob/develop/app/gui/docs/product/shortcuts.md#debug
@ -568,6 +569,7 @@
[6150]: https://github.com/enso-org/enso/pull/6150
[6153]: https://github.com/enso-org/enso/pull/6153
[6156]: https://github.com/enso-org/enso/pull/6156
[6204]: https://github.com/enso-org/enso/pull/6204
#### Enso Compiler

View File

@ -146,7 +146,7 @@ type Connection
Error.throw (Table_Not_Found.Error query sql_error treated_as_query=True)
SQL_Query.Raw_SQL raw_sql -> handle_sql_errors <|
self.jdbc_connection.ensure_query_has_no_holes raw_sql . if_not_error <|
columns = self.jdbc_connection.fetch_columns raw_sql Statement_Setter.null
columns = self.fetch_columns raw_sql Statement_Setter.null
name = if alias == "" then (UUID.randomUUID.to_text) else alias
ctx = Context.for_query raw_sql name
Database_Table_Module.make_table self name columns ctx
@ -155,7 +155,7 @@ type Connection
ctx = Context.for_table name (if alias == "" then name else alias)
statement = self.dialect.generate_sql (Query.Select Nothing ctx)
statement_setter = self.dialect.get_statement_setter
columns = self.jdbc_connection.fetch_columns statement statement_setter
columns = self.fetch_columns statement statement_setter
Database_Table_Module.make_table self name columns ctx
result.catch SQL_Error sql_error->
Error.throw (Table_Not_Found.Error name sql_error treated_as_query=False)
@ -189,6 +189,14 @@ type Connection
self.jdbc_connection.with_prepared_statement statement statement_setter stmt->
result_set_to_table stmt.executeQuery self.dialect.make_column_fetcher_for_type type_overrides last_row_only
## PRIVATE
Given a prepared statement, gets the column names and types for the
result set.
fetch_columns : Text | SQL_Statement -> Statement_Setter -> Any
fetch_columns self statement statement_setter =
needs_execute_query = self.dialect.needs_execute_query_for_type_inference
self.jdbc_connection.raw_fetch_columns statement needs_execute_query statement_setter
## PRIVATE
ADVANCED

View File

@ -8,7 +8,7 @@ import Standard.Table.Internal.Java_Problems
import Standard.Table.Internal.Problem_Builder.Problem_Builder
import Standard.Table.Internal.Widget_Helpers
from Standard.Table import Sort_Column, Data_Formatter, Value_Type, Auto
from Standard.Table.Errors import Floating_Point_Equality, Inexact_Type_Coercion, Invalid_Value_Type
from Standard.Table.Errors import Floating_Point_Equality, Inexact_Type_Coercion, Invalid_Value_Type, Lossy_Conversion
import project.Data.SQL_Statement.SQL_Statement
import project.Data.SQL_Type.SQL_Type
@ -77,6 +77,13 @@ type Column
to_table self =
Table.Value self.name self.connection [self.as_internal] self.context
## Returns a Table describing this column's contents.
The table behaves like `Table.info` - it lists the column name, the count
of non-null items and the value type.
info : Table
info self = self.to_table.info
## Returns a materialized column containing rows of this column.
Arguments:
@ -91,11 +98,10 @@ type Column
to_vector self =
self.to_table.read . at self.name . to_vector
## UNSTABLE TODO this is a very early prototype that will be revisited later
This implementation is really just so that we can use the types in
`filter`, it does not provide even a decent approximation of the true
type in many cases. It will be improved when the types work is
implemented.
## Returns the `Value_Type` associated with that column.
The value type determines what type of values the column is storing and
what operations are permitted.
value_type : Value_Type
value_type self =
mapping = self.connection.dialect.get_type_mapping
@ -901,6 +907,63 @@ type Column
_ = [type, format, on_problems]
Error.throw <| Unsupported_Database_Operation.Error "`Column.parse` is not implemented yet for the Database backends."
## PRIVATE
UNSTABLE
Cast the column to a specific type.
Arguments:
- value_type: The `Value_Type` to cast the column to.
- on_problems: Specifies how to handle problems if they occur, reporting
them as warnings by default.
TODO [RW] this is a prototype needed for debugging, proper implementation
and testing will come with #6112.
In the Database backend, this will boil down to a CAST operation.
In the in-memory backend, a conversion will be performed according to
the following rules:
- Anything can be cast into the `Mixed` type.
- Converting to a `Char` type, the elements of the column will be
converted to text. If it is fixed length, the texts will be trimmed or
padded on the right with the space character to match the desired
length.
- Conversion between numeric types will replace values exceeding the
range of the target type with `Nothing`.
- Booleans may also be converted to numbers, with `True` being converted
to `1` and `False` to `0`. The reverse is not supported - use `iif`
instead.
- A `Date_Time` may be converted into a `Date` or `Time` type - the
resulting value will be truncated to the desired type.
- If a `Date` is to be converted to `Date_Time`, it will be set at
midnight of the default system timezone.
? Conversion Precision
In the in-memory backend, if the conversion is lossy, a
`Lossy_Conversion` warning will be reported. The only exception is when
truncating a column which is already a text column - as then the
truncation seems like an intended behaviour, so it is not reported. If
truncating needs to occur when converting a non-text column, a warning
will still be reported.
Currently, the warning is not reported for Database backends.
? Inexact Target Type
If the backend does not support the requested target type, the closest
supported type is chosen and a `Inexact_Type_Coercion` problem is
reported.
cast : Value_Type -> Problem_Behavior -> Column ! Illegal_Argument | Inexact_Type_Coercion | Lossy_Conversion
cast self value_type=self.value_type on_problems=Problem_Behavior.Report_Warning =
dialect = self.connection.dialect
type_mapping = dialect.get_type_mapping
target_sql_type = type_mapping.value_type_to_sql value_type on_problems
target_sql_type.if_not_error <|
infer_from_database new_expression =
SQL_Type_Reference.new self.connection self.context new_expression
new_column = dialect.make_cast self.as_internal target_sql_type infer_from_database
Column.Value new_column.name self.connection new_column.sql_type_reference new_column.expression self.context
## ALIAS Transform Column
Applies `function` to each item in this column and returns the column

View File

@ -10,14 +10,17 @@ import project.Data.SQL_Statement.SQL_Statement
import project.Data.SQL_Type.SQL_Type
import project.Data.Table.Table
import project.Internal.Column_Fetcher.Column_Fetcher
import project.Internal.IR.Context.Context
import project.Internal.IR.From_Spec.From_Spec
import project.Internal.IR.Internal_Column.Internal_Column
import project.Internal.IR.Order_Descriptor.Order_Descriptor
import project.Internal.IR.Query.Query
import project.Internal.IR.SQL_Expression.SQL_Expression
import project.Internal.Postgres.Postgres_Dialect
import project.Internal.Redshift.Redshift_Dialect
import project.Internal.SQLite.SQLite_Dialect
import project.Internal.SQL_Type_Mapping.SQL_Type_Mapping
import project.Internal.SQL_Type_Reference.SQL_Type_Reference
import project.Internal.Statement_Setter.Statement_Setter
from project.Errors import Unsupported_Database_Operation
@ -113,6 +116,61 @@ type Dialect
get_statement_setter self =
Unimplemented.throw "This is an interface only."
## PRIVATE
Builds an SQL expression that casts the given expression to the given
target type.
Arguments:
- column: the input column to transform.
- target_type: the target type.
- infer_result_type_from_database_callback: A callback that can be used
to infer the type of the newly built expression from the Database. It
should be used by default, unless an override is chosen.
make_cast : Internal_Column -> SQL_Type -> (SQL_Expression -> SQL_Type_Reference) -> Internal_Column
make_cast self column target_type infer_result_type_from_database_callback =
_ = [column, target_type, infer_result_type_from_database_callback]
Unimplemented.throw "This is an interface only."
## PRIVATE
Specifies if the `fetch_columns` operation needs to execute the query to
get the column types.
In most backends, the `getMetaData` may be called on a
`PreparedStatement` directly, to infer column types without actually
executing the query. In some however, like SQLite, this is insufficient
and will yield incorrect results, so the query needs to be executed (even
though the full results may not need to be streamed).
needs_execute_query_for_type_inference : Boolean
needs_execute_query_for_type_inference self =
Unimplemented.throw "This is an interface only."
## PRIVATE
Specifies if the cast used to reconcile column types should be done after
performing the union. If `False`, the cast will be done before the union.
Most databases that care about column types will want to do the cast
before the union operation to ensure that types are aligned when merging.
For an SQLite workaround to work, it's better to do the cast after the
union operation.
cast_after_union : Boolean
cast_after_union self =
Unimplemented.throw "This is an interface only."
## PRIVATE
Prepares a query that can be used to fetch the type of an expression in
the provided context.
This method may modify the context to optimize the query while preserving
the types. For example, in most databases, it is fine to add
`WHERE FALSE` to the query - ensuring that the engine will not do any
actual work, but the resulting type will still be the same. There are
exceptions though, like SQLite, where the best we can do is add
`LIMIT 1`.
prepare_fetch_types_query : SQL_Expression -> Context -> SQL_Statement
prepare_fetch_types_query self expression context =
_ = [expression, context]
Unimplemented.throw "This is an interface only."
## PRIVATE
Checks if the given aggregate is supported.
@ -139,3 +197,8 @@ postgres = Postgres_Dialect.postgres
The dialect of Redshift databases.
redshift : Dialect
redshift = Redshift_Dialect.redshift
## PRIVATE
default_fetch_types_query dialect expression context =
empty_context = context.add_where_filters [SQL_Expression.Literal "FALSE"]
dialect.generate_sql (Query.Select [["typed_column", expression]] empty_context)

View File

@ -18,12 +18,12 @@ type SQL_Type
- precision: For character types, specifies their length.
See `ResultSetMetaData.getPrecision`.
- scale: The scale for fixed precision numeric types. Not applicable for
other types, so it's value is undefined and will usually just be 0.
other types, so it's value is undefined.
See `ResultSetMetaData.getScale`.
- nullable: Specifies if the given column is nullable. May be `Nothing`
if that is unknown / irrelevant for the type.
TODO: the precise meaning of this will be revised with #5872.
Value (typeid : Integer) (name : Text) (precision : Nothing | Integer = Nothing) (scale : Integer = 0) (nullable : Boolean | Nothing = Nothing)
Value (typeid : Integer) (name : Text) (precision : Nothing | Integer = Nothing) (scale : Nothing | Integer = Nothing) (nullable : Boolean | Nothing = Nothing)
## PRIVATE
ADVANCED
@ -40,8 +40,9 @@ type SQL_Type
0 -> Nothing
p : Integer -> p
scale = metadata.getScale ix
effective_scale = if precision.is_nothing && (scale == 0) then Nothing else scale
nullable_id = metadata.isNullable ix
nullable = if nullable_id == ResultSetMetaData.columnNoNulls then False else
if nullable_id == ResultSetMetaData.columnNullable then True else
Nothing
SQL_Type.Value typeid typename precision scale nullable
SQL_Type.Value typeid typename precision effective_scale nullable

View File

@ -13,6 +13,7 @@ import Standard.Table.Data.Expression.Expression
import Standard.Table.Data.Expression.Expression_Error
import Standard.Table.Data.Join_Condition.Join_Condition
import Standard.Table.Data.Join_Kind.Join_Kind
import Standard.Table.Data.Match_Columns as Match_Columns_Helpers
import Standard.Table.Data.Report_Unmatched.Report_Unmatched
import Standard.Table.Data.Row.Row
import Standard.Table.Data.Table.Table as Materialized_Table
@ -1128,11 +1129,77 @@ type Table
regardless of types of other columns. Mixing any other types will
result in a `No_Common_Type` problem. If columns of incompatible types
are meant to be mixed, at least one of them should be explicitly
retyped to the `Mixed` type to indicate that intention.
retyped to the `Mixed` type to indicate that intention. Note that the
`Mixed` type may not be supported by most Database backends.
union : (Table | Vector Table) -> Match_Columns -> Boolean | Report_Unmatched -> Boolean -> Problem_Behavior -> Table
union self tables match_columns=Match_Columns.By_Name keep_unmatched_columns=Report_Unmatched allow_type_widening=True on_problems=Report_Warning =
_ = [tables, match_columns, keep_unmatched_columns, allow_type_widening, on_problems]
Error.throw (Unsupported_Database_Operation.Error "Table.union is not implemented yet for the Database backends.")
all_tables = case tables of
v : Vector -> [self] + v
single_table -> [self, single_table]
all_tables.all (check_db_table "tables") . if_not_error <|
problem_builder = Problem_Builder.new
matched_column_sets = Match_Columns_Helpers.match_columns all_tables match_columns keep_unmatched_columns problem_builder
dialect = self.connection.dialect
type_mapping = dialect.get_type_mapping
merged_columns = matched_column_sets.map column_set->
case Table_Helpers.unify_result_type_for_union column_set all_tables allow_type_widening problem_builder of
Nothing -> Nothing
result_type : Value_Type ->
sql_type = type_mapping.value_type_to_sql result_type Problem_Behavior.Report_Error
sql_type.catch Inexact_Type_Coercion error->
Panic.throw <|
Illegal_State.Error "Unexpected inexact type coercion in Union. The union logic should only operate in types supported by the given backend. This is a bug in the Database library. The coercion was: "+error.to_display_text cause=error
[column_set, sql_type]
good_columns = merged_columns.filter r-> r.is_nothing.not
if good_columns.is_empty then Error.throw No_Output_Columns else
problem_builder.attach_problems_before on_problems <|
cast_after_union = dialect.cast_after_union
queries = all_tables.map_with_index i-> t->
columns_to_select = good_columns.map description->
column_set = description.first
result_type = description.second
column_name = column_set.name
input_column = case column_set.column_indices.at i of
Nothing ->
typ = SQL_Type_Reference.from_constant SQL_Type.null
expr = SQL_Expression.Literal "NULL"
Internal_Column.Value column_name typ expr
corresponding_column_index : Integer ->
t.at corresponding_column_index . as_internal . rename column_name
## We return `null` return type, as this type should
never be queried - we will just put it into the
union and the overall queried type will be taken
from there. This is just needed to create an
internal representation.
infer_return_type _ = SQL_Type_Reference.null
if cast_after_union then input_column else
dialect.make_cast input_column result_type infer_return_type
pairs = columns_to_select.map c->
[c.name, c.expression]
Query.Select pairs t.context
union_alias = all_tables.map .name . join "_"
new_from = From_Spec.Union queries union_alias
new_ctx = Context.for_subquery new_from
## TODO [RW] The result type is currently fetched
independently for each column, instead we should fetch it
for all columns at once.
See #6118.
infer_return_type expression =
SQL_Type_Reference.new self.connection new_ctx expression
new_columns = good_columns.map description->
column_set = description.first
result_type = description.second
name = column_set.name
expression = SQL_Expression.Column union_alias name
case cast_after_union of
True ->
input_column = Internal_Column.Value name SQL_Type_Reference.null expression
dialect.make_cast input_column result_type infer_return_type
False ->
Internal_Column.Value name (infer_return_type expression) expression
Table.Value union_alias self.connection new_columns new_ctx
## ALIAS group, summarize
@ -1323,6 +1390,63 @@ type Table
msg = "Parsing values is not supported in database tables, the table has to be materialized first with `read`."
Error.throw (Unsupported_Database_Operation.Error msg)
## PRIVATE
UNSTABLE
Cast the selected columns to a specific type.
Returns a new table in which the selected columns are replaced with
columns having the new types.
Arguments:
- columns: The selection of columns to cast.
- value_type: The `Value_Type` to cast the column to.
- on_problems: Specifies how to handle problems if they occur, reporting
them as warnings by default.
TODO [RW] this is a prototype needed for debugging, proper implementation
and testing will come with #6112.
In the Database backend, this will boil down to a CAST operation.
In the in-memory backend, a conversion will be performed according to
the following rules:
- Anything can be cast into the `Mixed` type.
- Converting to a `Char` type, the elements of the column will be
converted to text. If it is fixed length, the texts will be trimmed or
padded on the right with the space character to match the desired
length.
- Conversion between numeric types will replace values exceeding the
range of the target type with `Nothing`.
- Booleans may also be converted to numbers, with `True` being converted
to `1` and `False` to `0`. The reverse is not supported - use `iif`
instead.
- A `Date_Time` may be converted into a `Date` or `Time` type - the
resulting value will be truncated to the desired type.
- If a `Date` is to be converted to `Date_Time`, it will be set at
midnight of the default system timezone.
? Conversion Precision
In the in-memory backend, if the conversion is lossy, a
`Lossy_Conversion` warning will be reported. The only exception is when
truncating a column which is already a text column - as then the
truncation seems like an intended behaviour, so it is not reported. If
truncating needs to occur when converting a non-text column, a warning
will still be reported.
Currently, the warning is not reported for Database backends.
? Inexact Target Type
If the backend does not support the requested target type, the closest
supported type is chosen and a `Inexact_Type_Coercion` problem is
reported.
cast : (Text | Integer | Column_Selector | Vector (Integer | Text | Column_Selector)) -> Value_Type -> Problem_Behavior -> Table ! Illegal_Argument | Inexact_Type_Coercion | Lossy_Conversion
cast self columns=[0] value_type=Value_Type.Char on_problems=Problem_Behavior.Report_Warning =
selected = self.select_columns columns
selected.columns.fold self table-> column_to_cast->
new_column = column_to_cast.cast value_type on_problems
table.set new_column new_name=column_to_cast.name set_mode=Set_Mode.Update
## ALIAS dropna
ALIAS drop_missing_rows
Remove rows which are all blank or containing blank values.
@ -1387,8 +1511,8 @@ type Table
## Returns a Table describing this table's contents.
The table lists all columns, counts of non-null items and storage types
of each column.
The table lists all columns, counts of non-null items and value types of
each column.
info : Materialized_Table
info self =
cols = self.internal_columns
@ -1651,3 +1775,16 @@ freshen_columns used_names columns =
type Wrapped_Error
## PRIVATE
Value value
## PRIVATE
Checks if the argument is a proper table and comes from the current backend.
It returns True or throws a dataflow error explaining the issue.
TODO [RW]: this should check that the tables are on the same connection
check_db_table arg_name table =
if Table_Helpers.is_table table . not then Error.throw (Type_Error.Error Table table arg_name) else
same_backend = table.is_a Table
case same_backend of
False ->
Error.throw (Illegal_Argument.Error "Currently cross-backend operations are not supported. Materialize the table using `.read` before mixing it with an in-memory Table.")
True -> True

View File

@ -169,7 +169,7 @@ base_dialect =
fun = name -> [name, make_function name]
arith = [["ADD_NUMBER", make_binary_op "+"], ["ADD_TEXT", make_binary_op "||"], bin "-", bin "*", bin "/", bin "%", ["mod", make_function "MOD"], ["^", make_function "POWER"]]
logic = [bin "AND", bin "OR", unary "NOT", ["IIF", make_iif], ["TRUE", make_constant "TRUE"], ["FALSE", make_constant "FALSE"]]
logic = [bin "AND", bin "OR", unary "NOT", ["IIF", make_iif]]
eq = lift_binary_op "==" make_equals
neq = lift_binary_op "!=" make_not_equals
compare = [eq, neq, bin "<", bin ">", bin "<=", bin ">=", ["BETWEEN", make_between]]
@ -179,7 +179,8 @@ base_dialect =
text = [is_empty, bin "LIKE", simple_equals_ignore_case, fold_case, make_case_sensitive]
nulls = [["IS_NULL", make_right_unary_op "IS NULL"], ["FILL_NULL", make_function "COALESCE"]]
contains = [["IS_IN", make_is_in], ["IS_IN_COLUMN", make_is_in_column]]
base_map = Map.from_vector (arith + logic + compare + functions + agg + counts + text + nulls + contains)
types = [simple_cast]
base_map = Map.from_vector (arith + logic + compare + functions + agg + counts + text + nulls + contains + types)
Internal_Dialect.Value base_map wrap_in_quotes
## PRIVATE
@ -199,6 +200,10 @@ make_iif arguments = case arguments.length of
_ ->
Error.throw <| Illegal_State.Error ("Invalid amount of arguments for operation IIF")
## PRIVATE
simple_cast = Base_Generator.lift_binary_op "CAST" a-> b->
Builder.code "CAST(" ++ a ++ " AS " ++ b ++ ")"
## PRIVATE
make_between : Vector Builder -> Builder
make_between arguments = case arguments.length of
@ -249,6 +254,7 @@ generate_expression dialect expr = case expr of
SQL_Expression.Column origin name ->
dialect.wrap_identifier origin ++ '.' ++ dialect.wrap_identifier name
SQL_Expression.Constant value -> Builder.interpolation value
SQL_Expression.Literal value -> Builder.code value
SQL_Expression.Operation kind arguments ->
op = dialect.operation_map.get kind (Error.throw <| Unsupported_Database_Operation.Error kind)
parsed_args = arguments.map (generate_expression dialect)
@ -287,6 +293,14 @@ generate_from_part dialect from_spec = case from_spec of
right = generate_from_part dialect right_spec
ons = Builder.join " AND " (on.map (generate_expression dialect)) . prefix_if_present " ON "
left ++ (" " + kind.to_sql + " ") ++ right ++ ons
From_Spec.Union queries as_name ->
built_queries = queries.map query->
case query of
Query.Select _ _ -> Nothing
_ -> Panic.throw <| Illegal_State.Error "Only SELECT queries can be used in a UNION. This is a bug in the Database library."
generate_query dialect query
joined = Builder.join " UNION ALL " built_queries
joined.paren ++ alias dialect as_name
From_Spec.Sub_Query columns context as_name ->
sub = generate_query dialect (Query.Select columns context)
sub.paren ++ alias dialect as_name

View File

@ -94,12 +94,12 @@ prepare_subqueries left right needs_left_indicator needs_right_indicator =
renamer = Unique_Name_Strategy.new
renamer.mark_used (left.internal_columns.map .name)
# This is an operation, not a constant to avoid adding unnecessary interpolations to the query.
[Internal_Column.Value (renamer.make_unique "left_indicator") SQL_Type_Reference.null (SQL_Expression.Operation "TRUE" [])]
[Internal_Column.Value (renamer.make_unique "left_indicator") SQL_Type_Reference.null (SQL_Expression.Literal "TRUE")]
right_indicators = if needs_right_indicator.not then [] else
renamer = Unique_Name_Strategy.new
renamer.mark_used (right.internal_columns.map .name)
[Internal_Column.Value (renamer.make_unique "right_indicator") SQL_Type_Reference.null (SQL_Expression.Operation "TRUE" [])]
[Internal_Column.Value (renamer.make_unique "right_indicator") SQL_Type_Reference.null (SQL_Expression.Literal "TRUE")]
# Create subqueries that encapsulate the original queries and provide needed columns.
# The generated new sets of columns refer to the encapsulated expressions within the subquery and are

View File

@ -1,6 +1,7 @@
from Standard.Base import all
import project.Internal.IR.Context.Context
import project.Internal.IR.Query.Query
import project.Internal.IR.SQL_Expression.SQL_Expression
import project.Internal.IR.SQL_Join_Kind.SQL_Join_Kind
@ -46,6 +47,22 @@ type From_Spec
sources.
Join (kind : SQL_Join_Kind) (left_spec : From_Spec) (right_spec : From_Spec) (on : Vector SQL_Expression)
## PRIVATE
A query source that performs a union operation on multiple sources.
This maps to the SQL operation `UNION ALL`, keeping any duplicate rows.
Arguments:
- queries: the list of queries to be unioned. Eachn query shold have the
same number of columns, as these will be merged by position. Ideally,
corresponding columns should have the same names too, as the outer
query will be referring to columns of the union by names of the columns
from the first query.
- alias: the name for the consolidated query, to be used by column
references, referring to columns of the union.
Union (queries : Vector Query) (alias : Text)
## PRIVATE
A query source consisting of a sub-query.

View File

@ -30,6 +30,12 @@ type SQL_Expression
values depends on the database backend.
Constant (value : Any)
## PRIVATE
The internal representation of an SQL literal that should be inserted
as-is into a query.
Literal (value : Text)
## PRIVATE
The internal representation of an SQL expression built from an operation

View File

@ -92,10 +92,11 @@ type JDBC_Connection
## PRIVATE
Given a prepared statement, gets the column names and types for the
result set.
fetch_columns : Text | SQL_Statement -> Statement_Setter -> Any
fetch_columns self statement statement_setter =
raw_fetch_columns : Text | SQL_Statement -> Boolean -> Statement_Setter -> Any
raw_fetch_columns self statement execute_query statement_setter =
self.with_prepared_statement statement statement_setter stmt->
metadata = stmt.executeQuery.getMetaData
metadata = if execute_query then stmt.executeQuery.getMetaData else
stmt.getMetaData
resolve_column ix =
name = metadata.getColumnLabel ix+1

View File

@ -9,6 +9,7 @@ import Standard.Table.Internal.Problem_Builder.Problem_Builder
from Standard.Table.Data.Aggregate_Column.Aggregate_Column import all
import project.Connection.Connection.Connection
import project.Data.Dialect
import project.Data.SQL.Builder
import project.Data.SQL_Statement.SQL_Statement
import project.Data.SQL_Type.SQL_Type
@ -20,14 +21,15 @@ import project.Internal.Common.Database_Distinct_Helper
import project.Internal.Common.Database_Join_Helper
import project.Internal.IR.Context.Context
import project.Internal.IR.From_Spec.From_Spec
import project.Internal.IR.SQL_Expression.SQL_Expression
import project.Internal.IR.Internal_Column.Internal_Column
import project.Internal.IR.Order_Descriptor.Order_Descriptor
import project.Internal.IR.Nulls_Order.Nulls_Order
import project.Internal.IR.Order_Descriptor.Order_Descriptor
import project.Internal.IR.SQL_Expression.SQL_Expression
import project.Internal.IR.SQL_Join_Kind.SQL_Join_Kind
import project.Internal.IR.Query.Query
import project.Internal.Postgres.Postgres_Type_Mapping.Postgres_Type_Mapping
import project.Internal.SQL_Type_Mapping.SQL_Type_Mapping
import project.Internal.SQL_Type_Reference.SQL_Type_Reference
import project.Internal.Statement_Setter.Statement_Setter
from project.Errors import Unsupported_Database_Operation
@ -132,6 +134,35 @@ type Postgres_Dialect
get_statement_setter : Statement_Setter
get_statement_setter self = postgres_statement_setter
## PRIVATE
make_cast : Internal_Column -> SQL_Type -> (SQL_Expression -> SQL_Type_Reference) -> Internal_Column
make_cast self column target_type infer_result_type_from_database_callback =
mapping = self.get_type_mapping
source_type = mapping.sql_type_to_value_type column.sql_type_reference.get
target_value_type = mapping.sql_type_to_value_type target_type
# Boolean to Numeric casts need special handling:
transformed_expression = case source_type.is_boolean && target_value_type.is_numeric of
True ->
SQL_Expression.Operation "IIF" [column.expression, SQL_Expression.Literal "1", SQL_Expression.Literal "0"]
False -> column.expression
target_type_sql_text = mapping.sql_type_to_text target_type
new_expression = SQL_Expression.Operation "CAST" [transformed_expression, SQL_Expression.Literal target_type_sql_text]
new_sql_type_reference = infer_result_type_from_database_callback new_expression
Internal_Column.Value column.name new_sql_type_reference new_expression
## PRIVATE
needs_execute_query_for_type_inference : Boolean
needs_execute_query_for_type_inference self = False
## PRIVATE
cast_after_union : Boolean
cast_after_union self = False
## PRIVATE
prepare_fetch_types_query : SQL_Expression -> Context -> SQL_Statement
prepare_fetch_types_query self expression context =
Dialect.default_fetch_types_query self expression context
## PRIVATE
check_aggregate_support : Aggregate_Column -> Boolean ! Unsupported_Database_Operation
check_aggregate_support self aggregate =

View File

@ -7,6 +7,7 @@ from Standard.Table.Errors import Inexact_Type_Coercion
import project.Data.SQL_Type.SQL_Type
import project.Internal.IR.SQL_Expression.SQL_Expression
import project.Internal.SQL_Type_Mapping
import project.Internal.SQL_Type_Reference.SQL_Type_Reference
polyglot java import java.sql.Types
@ -48,9 +49,7 @@ type Postgres_Type_Mapping
type_name = if with_timezone then "timestamptz" else "timestamp"
SQL_Type.Value Types.TIMESTAMP type_name
Value_Type.Binary _ _ ->
# This is the maximum size that JDBC driver reports for Postgres.
max_int4 = 2147483647
SQL_Type.Value Types.BINARY "bytea" precision=max_int4
SQL_Type.Value Types.BINARY "bytea" precision=max_precision
Value_Type.Mixed ->
Error.throw (Illegal_Argument.Error "Postgres tables do not support Mixed types.")
Value_Type.Unsupported_Data_Type type_name underlying_type ->
@ -74,6 +73,19 @@ type Postgres_Type_Mapping
Nothing -> on_unknown_type sql_type
builder -> builder sql_type
## PRIVATE
sql_type_to_text : SQL_Type -> Text
sql_type_to_text sql_type =
if sql_type.name == "bool" then "bool" else
variable_length_types = [Types.VARCHAR, Types.BINARY]
## If the type is variable length and the maximum is provided, we treat
it as unbounded, otherwise too big max length may be not accepted by
Postgres.
skip_precision = (variable_length_types.contains sql_type.typeid) && (sql_type.precision == max_precision)
case skip_precision of
True -> sql_type.name
False -> SQL_Type_Mapping.default_sql_type_to_text sql_type
## PRIVATE
The Postgres type mapping always relies on the return type determined by
the database backend.
@ -95,7 +107,6 @@ type Postgres_Type_Mapping
simple_types_map = Map.from_vector <|
ints = [[Types.SMALLINT, Value_Type.Integer Bits.Bits_16], [Types.BIGINT, Value_Type.Integer Bits.Bits_64], [Types.INTEGER, Value_Type.Integer Bits.Bits_32]]
floats = [[Types.DOUBLE, Value_Type.Float Bits.Bits_64], [Types.REAL, Value_Type.Float Bits.Bits_32]]
# TODO Bit1, Date_Time
other = [[Types.DATE, Value_Type.Date], [Types.TIME, Value_Type.Time]]
ints + floats + other
@ -127,3 +138,8 @@ complex_types_map = Map.from_vector <|
## PRIVATE
on_unknown_type sql_type =
Value_Type.Unsupported_Data_Type sql_type.name sql_type
## PRIVATE
This is the maximum size that JDBC driver reports for 'unbounded' types in
Postgres.
max_precision = 2147483647

View File

@ -4,14 +4,17 @@ import Standard.Table.Internal.Naming_Helpers.Naming_Helpers
from Standard.Table import Aggregate_Column
import project.Connection.Connection.Connection
import project.Data.Dialect
import project.Data.SQL_Statement.SQL_Statement
import project.Data.SQL_Type.SQL_Type
import project.Data.Table.Table
import project.Internal.Base_Generator
import project.Internal.Column_Fetcher.Column_Fetcher
import project.Internal.Column_Fetcher as Column_Fetcher_Module
import project.Internal.IR.Context.Context
import project.Internal.IR.From_Spec.From_Spec
import project.Internal.IR.Internal_Column.Internal_Column
import project.Internal.IR.SQL_Expression.SQL_Expression
import project.Internal.IR.SQL_Join_Kind.SQL_Join_Kind
import project.Internal.IR.Order_Descriptor.Order_Descriptor
import project.Internal.IR.Query.Query
@ -19,6 +22,7 @@ import project.Internal.Postgres.Postgres_Dialect
import project.Internal.Common.Database_Join_Helper
import project.Internal.Postgres.Postgres_Type_Mapping.Postgres_Type_Mapping
import project.Internal.SQL_Type_Mapping.SQL_Type_Mapping
import project.Internal.SQL_Type_Reference.SQL_Type_Reference
import project.Internal.Statement_Setter.Statement_Setter
from project.Errors import Unsupported_Database_Operation
@ -108,6 +112,28 @@ type Redshift_Dialect
get_statement_setter : Statement_Setter
get_statement_setter self = Postgres_Dialect.postgres_statement_setter
## PRIVATE
make_cast : Internal_Column -> SQL_Type -> (SQL_Expression -> SQL_Type_Reference) -> Internal_Column
make_cast self column target_type infer_result_type_from_database_callback =
mapping = self.get_type_mapping
sql_type_text = mapping.sql_type_to_text target_type
new_expression = SQL_Expression.Operation "CAST" [column.expression, SQL_Expression.Literal sql_type_text]
new_sql_type_reference = infer_result_type_from_database_callback new_expression
Internal_Column.Value column.name new_sql_type_reference new_expression
## PRIVATE
needs_execute_query_for_type_inference : Boolean
needs_execute_query_for_type_inference self = False
## PRIVATE
cast_after_union : Boolean
cast_after_union self = False
## PRIVATE
prepare_fetch_types_query : SQL_Expression -> Context -> SQL_Statement
prepare_fetch_types_query self expression context =
Dialect.default_fetch_types_query self expression context
## PRIVATE
check_aggregate_support : Aggregate_Column -> Boolean ! Unsupported_Database_Operation
check_aggregate_support self aggregate =

View File

@ -2,6 +2,7 @@ from Standard.Base import all
import Standard.Base.Errors.Unimplemented.Unimplemented
import Standard.Table.Data.Type.Value_Type.Value_Type
from Standard.Table.Errors import Inexact_Type_Coercion
import project.Data.SQL_Type.SQL_Type
import project.Internal.IR.SQL_Expression.SQL_Expression
@ -21,7 +22,7 @@ type SQL_Type_Mapping
If the conversion is exact, it should be reversible, i.e.
`sql_type_to_value_type (value_type_to_sql x Problem_Behavior.Report_Error) = x`.
value_type_to_sql : Value_Type -> Problem_Behavior -> SQL_Type
value_type_to_sql : Value_Type -> Problem_Behavior -> SQL_Type ! Inexact_Type_Coercion
value_type_to_sql value_type on_problems =
_ = [value_type, on_problems]
Unimplemented.throw "This is an interface only."
@ -34,6 +35,14 @@ type SQL_Type_Mapping
_ = sql_type
Unimplemented.throw "This is an interface only."
## Converts an SQL_Type to a Text representation compatible with the related
SQL dialect that can be used in SQL expressions like CAST or column
definitions.
sql_type_to_text : SQL_Type -> Text
sql_type_to_text sql_type =
_ = sql_type
Unimplemented.throw "This is an interface only."
## PRIVATE
ADVANCED
Returns a `SQL_Type_Reference` that will resolve to the resulting type of
@ -76,3 +85,10 @@ type SQL_Type_Mapping
prepare_type_overrides column_type_suggestions =
_ = column_type_suggestions
Unimplemented.throw "This is an interface only."
## PRIVATE
default_sql_type_to_text sql_type =
suffix = if sql_type.precision.is_nothing then "" else
if sql_type.scale.is_nothing then "(" + sql_type.precision.to_text + ")" else
" (" + sql_type.precision.to_text + "," + sql_type.scale.to_text + ")"
sql_type.name.trim + suffix

View File

@ -44,10 +44,9 @@ type SQL_Type_Reference
new : Connection -> Context -> SQL_Expression -> SQL_Type_Reference
new connection context expression =
do_fetch =
empty_context = context.add_where_filters [SQL_Expression.Constant False]
statement = connection.dialect.generate_sql (Query.Select [["typed_column", expression]] empty_context)
statement = connection.dialect.prepare_fetch_types_query expression context
statement_setter = connection.dialect.get_statement_setter
columns = connection.jdbc_connection.fetch_columns statement statement_setter
columns = connection.fetch_columns statement statement_setter
only_column = columns.first
only_column.second
SQL_Type_Reference.Computed_By_Database do_fetch

View File

@ -18,12 +18,14 @@ import project.Internal.Column_Fetcher as Column_Fetcher_Module
import project.Internal.IR.Context.Context
import project.Internal.IR.From_Spec.From_Spec
import project.Internal.IR.Internal_Column.Internal_Column
import project.Internal.IR.SQL_Join_Kind.SQL_Join_Kind
import project.Internal.IR.Order_Descriptor.Order_Descriptor
import project.Internal.IR.Query.Query
import project.Internal.IR.SQL_Expression.SQL_Expression
import project.Internal.IR.SQL_Join_Kind.SQL_Join_Kind
import project.Internal.Common.Database_Distinct_Helper
import project.Internal.Common.Database_Join_Helper
import project.Internal.SQL_Type_Mapping.SQL_Type_Mapping
import project.Internal.SQL_Type_Reference.SQL_Type_Reference
import project.Internal.SQLite.SQLite_Type_Mapping.SQLite_Type_Mapping
import project.Internal.Statement_Setter.Statement_Setter
from project.Errors import Unsupported_Database_Operation
@ -151,6 +153,31 @@ type SQLite_Dialect
get_statement_setter : Statement_Setter
get_statement_setter self = Statement_Setter.default
## PRIVATE
make_cast : Internal_Column -> SQL_Type -> (SQL_Expression -> SQL_Type_Reference) -> Internal_Column
make_cast self column target_type infer_result_type_from_database_callback =
_ = infer_result_type_from_database_callback
mapping = self.get_type_mapping
sql_type_text = mapping.sql_type_to_text target_type
new_expression = SQL_Expression.Operation "CAST" [column.expression, SQL_Expression.Literal sql_type_text]
# We override the type here, because SQLite gets it wrong if the column starts with NULL values.
new_sql_type_reference = SQL_Type_Reference.from_constant target_type
Internal_Column.Value column.name new_sql_type_reference new_expression
## PRIVATE
needs_execute_query_for_type_inference : Boolean
needs_execute_query_for_type_inference self = True
## PRIVATE
cast_after_union : Boolean
cast_after_union self = True
## PRIVATE
prepare_fetch_types_query : SQL_Expression -> Context -> SQL_Statement
prepare_fetch_types_query self expression context =
minimized_context = context.set_limit 1
self.generate_sql (Query.Select [["typed_column", expression]] minimized_context)
## PRIVATE
check_aggregate_support : Aggregate_Column -> Boolean ! Unsupported_Database_Operation
check_aggregate_support self aggregate = case aggregate of

View File

@ -5,12 +5,14 @@ import Standard.Base.Errors.Illegal_State.Illegal_State
import Standard.Table.Data.Type.Enso_Types
import Standard.Table.Data.Type.Value_Type.Value_Type
import Standard.Table.Data.Type.Value_Type.Bits
import Standard.Table.Data.Type.Value_Type_Helpers
from Standard.Table.Errors import Inexact_Type_Coercion
import project.Data.Column.Column
import project.Data.SQL_Type.SQL_Type
import project.Internal.IR.Internal_Column.Internal_Column
import project.Internal.IR.SQL_Expression.SQL_Expression
import project.Internal.SQL_Type_Mapping
import project.Internal.SQL_Type_Reference.SQL_Type_Reference
polyglot java import java.sql.Types
@ -77,6 +79,10 @@ type SQLite_Type_Mapping
Value_Type.Unsupported_Data_Type sql_type.name sql_type
simple_types_map.get sql_type.typeid on_not_found
## PRIVATE
sql_type_to_text : SQL_Type -> Text
sql_type_to_text sql_type = SQL_Type_Mapping.default_sql_type_to_text sql_type
## PRIVATE
The SQLite type mapping takes special measures to keep boolean columns
boolean even if the Database will say that they are numeric.
@ -113,7 +119,11 @@ type SQLite_Type_Mapping
Panic.throw (Illegal_State.Error "Impossible: IIF must have 3 arguments. This is a bug in the Database library.")
inputs_types = arguments.drop 1 . map find_type
if inputs_types.first == inputs_types.second then return inputs_types.first else
infer_default_type
case Value_Type_Helpers.reconcile_types inputs_types.first inputs_types.second of
## Inference failed, fall back to default type.
Ideally, should never happen. To be handled in #6106.
Value_Type.Mixed -> infer_default_type
common -> return common
always_boolean_ops = ["==", "!=", "equals_ignore_case", ">=", "<=", "<", ">", "BETWEEN", "AND", "OR", "NOT", "IS_NULL", "IS_NAN", "IS_EMPTY", "LIKE", "IS_IN", "starts_with", "ends_with", "contains"]
always_text_ops = ["ADD_TEXT", "CONCAT", "CONCAT_QUOTE_IF_NEEDED"]

View File

@ -1240,6 +1240,13 @@ type Column
to_table : Table
to_table self = Table.Value self.java_column.toTable
## Returns a Table describing this column's contents.
The table behaves like `Table.info` - it lists the column name, the count
of non-null items and the value type.
info : Table
info self = self.to_table.info
## UNSTABLE
Sorts the column according to the specified rules.

View File

@ -1491,16 +1491,14 @@ type Table
regardless of types of other columns. Mixing any other types will
result in a `No_Common_Type` problem. If columns of incompatible types
are meant to be mixed, at least one of them should be explicitly
retyped to the `Mixed` type to indicate that intention.
retyped to the `Mixed` type to indicate that intention. Note that the
`Mixed` type may not be supported by most Database backends.
union : (Table | Vector Table) -> Match_Columns -> Boolean | Report_Unmatched -> Boolean -> Problem_Behavior -> Table
union self tables match_columns=Match_Columns.By_Name keep_unmatched_columns=Report_Unmatched allow_type_widening=True on_problems=Report_Warning =
all_tables = case tables of
v : Vector -> [self] + v
single_table -> [self, single_table]
## `is_everything_ok` should actually never be False; it will either be
True or will contain a dataflow error propagating through the if.
is_everything_ok = all_tables.all (check_table "tables")
if is_everything_ok then
all_tables.all (check_table "tables") . if_not_error <|
problem_builder = Problem_Builder.new
matched_column_sets = Match_Columns_Helpers.match_columns all_tables match_columns keep_unmatched_columns problem_builder
result_row_count = all_tables.fold 0 c-> t-> c + t.row_count
@ -1543,8 +1541,8 @@ type Table
## Returns a Table describing this table's contents.
The table lists all columns, counts of non-null items and storage types
of each column.
The table lists all columns, counts of non-null items and value types of
each column.
> Example
Get information about a table.

View File

@ -87,7 +87,7 @@ type Value_Type
Arguments:
- precision: the total number of digits in the number.
- scale: the number of digits after the decimal point.
Decimal precision:(Integer|Nothing)=Nothing scale:(Integer|Nothing)=0
Decimal precision:(Integer|Nothing)=Nothing scale:(Integer|Nothing)=Nothing
## Character string.

View File

@ -40,20 +40,26 @@ reconcile_types current new = case current of
Value_Type.Char current_size current_variable -> case new of
Value_Type.Char new_size new_variable ->
result_variable = current_variable || new_variable || current_size != new_size
case result_variable of
True -> Value_Type.Char Nothing True
False -> Value_Type.Char current_size False
result_size = max_size current_size new_size
Value_Type.Char result_size result_variable
_ -> Value_Type.Mixed
Value_Type.Binary current_size current_variable -> case new of
Value_Type.Binary new_size new_variable ->
result_variable = current_variable || new_variable || current_size != new_size
case result_variable of
True -> Value_Type.Binary Nothing True
False -> Value_Type.Binary current_size False
result_size = max_size current_size new_size
Value_Type.Binary result_size result_variable
_ -> Value_Type.Mixed
_ ->
if current == new then current else Value_Type.Mixed
## PRIVATE
Reconciles two size parameters. If either of them is `Nothing` (meaning
unbounded), returns `Nothing`. If both are bounded, the larger one is
returned.
max_size a b =
if a.is_nothing || b.is_nothing then Nothing else
Math.max a b
## PRIVATE
Finds the most specific value type that will fit all the provided types.

View File

@ -533,6 +533,12 @@ type Inexact_Type_Coercion
to_text self =
"Inexact_Type_Coercion.Warning (requested_type = " + self.requested_type.to_text + ") (actual_type = " + self.actual_type.to_text + ")"
## TODO figure out this error in #6112
type Lossy_Conversion
## Indicates that some likely not-insignificant information was lost during
a conversion.
Error
type Invalid_Value_For_Type
## PRIVATE
Indicates that a column construction/transformation failed because the

View File

@ -0,0 +1,93 @@
from Standard.Base import all
from Standard.Table import Value_Type
from Standard.Test import Test, Problems
import Standard.Test.Extensions
from project.Common_Table_Operations.Util import run_default_backend
main = run_default_backend spec
spec setup =
prefix = setup.prefix
table_builder = setup.table_builder
# TODO this spec will be expanded in #6112
Test.group prefix+"Column.cast" pending=(if setup.is_database.not then "Cast is not implemented in the in-memory backend yet.") <|
Test.specify "should allow to cast an integer column to text" <|
t = table_builder [["X", [1, 2, 3000]]]
c = t.at "X" . cast Value_Type.Char
c.value_type.is_text . should_be_true
c.to_vector . should_equal ["1", "2", "3000"]
Test.specify "should allow to cast a boolean column to integer" <|
t = table_builder [["X", [True, False, True]]]
c = t.at "X" . cast Value_Type.Integer
c.value_type.is_integer . should_be_true
c.to_vector . should_equal [1, 0, 1]
Test.specify "should allow to cast a boolean column to text" pending="TODO: sqlite has issue with this, figure out in #6112" <|
t = table_builder [["X", [True, False, True]]]
c = t.at "X" . cast Value_Type.Char
c.value_type.is_text . should_be_true
c.to_vector . should_equal ["true", "false", "true"]
Test.specify "should allow to cast a text column to fixed-length" pending=(if setup.test_selection.fixed_length_text_columns.not then "Fixed-length Char columns are not supported by this backend.") <|
t = table_builder [["X", ["a", "DEF", "a slightly longer text"]]]
c = t.at "X" . cast (Value_Type.Char size=3 variable_length=False)
c.value_type . should_equal (Value_Type.Char size=3 variable_length=False)
c.to_vector . should_equal ["a ", "DEF", "a s"]
Test.specify "should work if the first row is NULL" <|
t = table_builder [["X", [Nothing, 1, 2, 3000]], ["Y", [Nothing, True, False, True]]]
c1 = t.at "X" . cast Value_Type.Char
c1.value_type.is_text . should_be_true
c1.to_vector . should_equal [Nothing, "1", "2", "3000"]
c2 = t.at "Y" . cast Value_Type.Integer
c2.value_type.is_integer . should_be_true
c2.to_vector . should_equal [Nothing, 1, 0, 1]
Test.specify "should not lose the type after further operations were performed on the result" <|
t = table_builder [["X", [1, 2, 3000]], ["Y", [True, False, True]]]
c1 = t.at "X" . cast Value_Type.Char
c2 = t.at "Y" . cast Value_Type.Integer
c3 = c1 + '_suffix'
c3.value_type.is_text . should_be_true
c3.to_vector . should_equal ["1_suffix", "2_suffix", "3000_suffix"]
c4 = c2 + 1000
c4.value_type.is_integer . should_be_true
c4.to_vector . should_equal [1001, 1000, 1001]
pending_sqlite_types = if prefix.contains "SQLite" then "TODO: perform SQLite type inference locally - #6208"
Test.specify "should not lose the type after further operations were performed on the result, even if the first row is NULL" pending=pending_sqlite_types <|
t = table_builder [["X", [Nothing, 1, 2, 3000]], ["Y", [Nothing, True, False, True]]]
c1 = t.at "X" . cast Value_Type.Char
c2 = t.at "Y" . cast Value_Type.Integer
c3 = c1 + '_suffix'
c3.value_type.is_text . should_be_true
c3.to_vector . should_equal [Nothing, "1_suffix", "2_suffix", "3000_suffix"]
c4 = c2 + 1000
c4.value_type.is_integer . should_be_true
c4.to_vector . should_equal [Nothing, 1001, 1000, 1001]
Test.group prefix+"Table.cast" pending=(if setup.is_database.not then "Cast is not implemented in the in-memory backend yet.") <|
Test.specify 'should cast the columns "in-place" and not reorder them' <|
t = table_builder [["X", [1, 2, 3000]], ["Y", [4, 5, 6]], ["Z", [7, 8, 9]], ["A", [True, False, True]]]
t2 = t.cast ["Z", "Y"] Value_Type.Char
t2.column_names . should_equal ["X", "Y", "Z", "A"]
t2.at "X" . value_type . is_integer . should_be_true
t2.at "Y" . value_type . is_text . should_be_true
t2.at "Z" . value_type . is_text . should_be_true
t2.at "A" . value_type . is_boolean . should_be_true
t2.at "X" . to_vector . should_equal [1, 2, 3000]
t2.at "Y" . to_vector . should_equal ["4", "5", "6"]
t2.at "Z" . to_vector . should_equal ["7", "8", "9"]
t2.at "A" . to_vector . should_equal [True, False, True]

View File

@ -19,7 +19,11 @@ spec setup =
t = table_builder [["X", [True, False, Nothing, True]]]
t.at "X" . iif 22 33 . to_vector . should_equal [22, 33, Nothing, 22]
Test.specify "iif on Columns" pending="Not implemented yet." Nothing
Test.specify "iif on Columns" <|
t = table_builder [["X", [True, False, Nothing, False]], ["Y", [1, 2, 3, 4]], ["Z", [1.5, 2.5, 3.5, 4.5]]]
c = t.at "X" . iif (t.at "Y") (t.at "Z")
c.value_type . is_floating_point . should_be_true
c.to_vector . should_equal [1, 2.5, Nothing, 4.5]
t2 = table_builder [["x", [1, 4, 5, Nothing]], ["y", [2, 3, 5, Nothing]], ["b", [False, False, True, Nothing]]]
x = t2.at "x"

View File

@ -20,22 +20,23 @@ main = run_default_backend spec
spec setup =
prefix = setup.prefix
table_builder = setup.table_builder
db_todo = if prefix.contains "In-Memory" then Nothing else "Table.union is not yet implemented for the DB backend."
Test.group prefix+"Table.union" pending=db_todo <|
Test.group prefix+"Table.union" <|
Test.specify "should merge columns from multiple tables" <|
t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]]
t2 = table_builder [["A", [4, 5, 6]], ["B", ["d", "e", "f"]]]
t3 = table_builder [["A", [7, 8, 9]], ["B", ["g", "h", "i"]]]
t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]], ["C", [True, False, True]]]
t2 = table_builder [["A", [4, 5, 6]], ["B", ["d", "e", "f"]], ["C", [False, True, False]]]
t3 = table_builder [["A", [7, 8, 9]], ["B", ["g", "h", "i"]], ["C", [True, False, False]]]
t4 = t1.union t2
expect_column_names ["A", "B"] t4
expect_column_names ["A", "B", "C"] t4
t4.at "A" . to_vector . should_equal [1, 2, 3, 4, 5, 6]
t4.at "B" . to_vector . should_equal ["a", "b", "c", "d", "e", "f"]
t4.at "C" . to_vector . should_equal [True, False, True, False, True, False]
t5 = t3.union [t1, t2]
expect_column_names ["A", "B"] t5
expect_column_names ["A", "B", "C"] t5
t5.at "A" . to_vector . should_equal [7, 8, 9, 1, 2, 3, 4, 5, 6]
t5.at "B" . to_vector . should_equal ["g", "h", "i", "a", "b", "c", "d", "e", "f"]
t5.at "C" . to_vector . should_equal [True, False, False, True, False, True, False, True, False]
Test.specify "should fill unmatched columns (by name matching) with nulls and report a warning by default" <|
t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]]
@ -160,66 +161,100 @@ spec setup =
t8 = t1.union [t2, t5, t6, t7] match_columns=Match_Columns.By_Position
expect_column_names ["Y", "A", "Z"] t8
Test.specify "should allow to merge a table with itself" <|
t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]]
t2 = t1.union [t1, t1]
expect_column_names ["A", "B"] t2
t2.at "A" . to_vector . should_equal [1, 2, 3, 1, 2, 3, 1, 2, 3]
t2.at "B" . to_vector . should_equal ["a", "b", "c", "a", "b", "c", "a", "b", "c"]
Test.specify "should not de-duplicate rows" <|
t1 = table_builder [["A", [1, 1, 3]], ["B", ["a", "a", "c"]]]
t2 = table_builder [["A", [1, 2, 2]], ["B", ["a", "b", "b"]]]
t3 = t1.union t2
expect_column_names ["A", "B"] t3
t3.at "A" . to_vector . should_equal [1, 1, 3, 1, 2, 2]
t3.at "B" . to_vector . should_equal ["a", "a", "c", "a", "b", "b"]
Test.specify "should gracefully handle the case where no tables to union were provided" <|
t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]]
t1.union [] . should_equal t1
t1.union [] match_columns=Match_Columns.By_Position . should_equal t1
check_same table =
expect_column_names ["A", "B"] table
table.at "A" . to_vector . should_equal [1, 2, 3]
table.at "B" . to_vector . should_equal ["a", "b", "c"]
t1.union [] keep_unmatched_columns=False . should_equal t1
t1.union [] match_columns=Match_Columns.By_Position keep_unmatched_columns=False . should_equal t1
check_same <| t1.union []
check_same <| t1.union [] match_columns=Match_Columns.By_Position
t1.union [] keep_unmatched_columns=True . should_equal t1
t1.union [] match_columns=Match_Columns.By_Position keep_unmatched_columns=True . should_equal t1
check_same <| t1.union [] keep_unmatched_columns=False
check_same <| t1.union [] match_columns=Match_Columns.By_Position keep_unmatched_columns=False
check_same <| t1.union [] keep_unmatched_columns=True
check_same <| t1.union [] match_columns=Match_Columns.By_Position keep_unmatched_columns=True
Test.specify "should correctly unify text columns of various lengths" pending=(if setup.test_selection.fixed_length_text_columns.not then "Fixed-length Char columns are not supported by this backend.") <|
t1 = table_builder [["A", ["a", "b", "c"]]] . cast "A" (Value_Type.Char size=1 variable_length=False)
t2 = table_builder [["A", ["xyz", "abc", "def"]]] . cast "A" (Value_Type.Char size=3 variable_length=False)
t1.at "A" . value_type . should_equal (Value_Type.Char size=1 variable_length=False)
t2.at "A" . value_type . should_equal (Value_Type.Char size=3 variable_length=False)
t3 = t1.union t2
expect_column_names ["A"] t3
t3.at "A" . to_vector . should_equal ["a", "b", "c", "xyz", "abc", "def"]
t3.at "A" . value_type . is_text . should_be_true
t3.at "A" . value_type . variable_length . should_be_true
Test.specify "should find a common type that will fit the merged columns" <|
t1 = table_builder [["int+bool", [1, 2, 3]], ["int+float", [0, 1, 2]]]
t2 = table_builder [["int+bool", [True, False, Nothing]], ["int+float", [1.0, 2.0, 2.5]]]
t1.at "int+bool" . value_type . should_equal Value_Type.Integer
t1.at "int+float" . value_type . should_equal Value_Type.Integer
t2.at "int+bool" . value_type . should_equal Value_Type.Boolean
t2.at "int+float" . value_type . should_equal Value_Type.Float
t1.at "int+bool" . value_type . is_integer . should_be_true
t1.at "int+float" . value_type . is_integer . should_be_true
t2.at "int+bool" . value_type . is_boolean . should_be_true
t2.at "int+float" . value_type . is_floating_point . should_be_true
t3 = t1.union t2
expect_column_names ["int+bool", "int+float"] t3
t3.at "int+bool" . value_type . should_equal Value_Type.Integer
t3.at "int+float" . value_type . should_equal Value_Type.Float
t3.at "int+bool" . value_type . is_integer . should_be_true
t3.at "int+float" . value_type . is_floating_point . should_be_true
t3.at "int+bool" . to_vector . should_equal [1, 2, 3, 1, 0, Nothing]
t3.at "int+float" . to_vector . should_equal [0, 1, 2, 1.0, 2.0, 2.5]
t4 = table_builder [["float", [1.0, 2.0, 3.3]]]
t5 = t1.union [t2, t4] match_columns=Match_Columns.By_Position keep_unmatched_columns=False
expect_column_names ["int+bool"] t5
t5.at "int+bool" . value_type . should_equal Value_Type.Float
t5.at "int+bool" . value_type . is_floating_point . should_be_true
t5.at "int+bool" . to_vector . should_equal [1, 2, 3, 1, 0, Nothing, 1.0, 2.0, 3.3]
Test.specify "should resort to Mixed value type only if at least one column is already Mixed" <|
## TODO currently no way to retype a column to Mixed, so we are
using a custom object; this test won't work in DB so it will need
to be adapted once proper type support is implemented
t1 = table_builder [["A", [1, 2, 3]], ["mixed", ["a", My_Type.Value 1 2, Nothing]]]
t2 = table_builder [["A", [4, 5, 6]], ["mixed", [1, 2, 3]]]
t1.at "mixed" . value_type . should_equal Value_Type.Mixed
t2.at "mixed" . value_type . should_equal Value_Type.Integer
# Database backends are not required to support Mixed types.
if setup.is_database.not then
Test.specify "should resort to Mixed value type only if at least one column is already Mixed" <|
## TODO currently no way to retype a column to Mixed, so we are
using a custom object
t1 = table_builder [["A", [1, 2, 3]], ["mixed", ["a", My_Type.Value 1 2, Nothing]]]
t2 = table_builder [["A", [4, 5, 6]], ["mixed", [1, 2, 3]]]
t1.at "mixed" . value_type . should_equal Value_Type.Mixed
t2.at "mixed" . value_type . should_equal Value_Type.Integer
t3 = t1.union t2
Problems.assume_no_problems t3
expect_column_names ["A", "mixed"] t3
t3.at "A" . to_vector . should_equal [1, 2, 3, 4, 5, 6]
t3.at "mixed" . to_vector . should_equal ["a", My_Type.Value 1 2, Nothing, 1, 2, 3]
t3 = t1.union t2
Problems.assume_no_problems t3
expect_column_names ["A", "mixed"] t3
t3.at "A" . to_vector . should_equal [1, 2, 3, 4, 5, 6]
t3.at "mixed" . to_vector . should_equal ["a", My_Type.Value 1 2, Nothing, 1, 2, 3]
t4 = table_builder [["A", [1, 3]], ["mixed", [True, False]]]
t5 = table_builder [["A", [4, 5]], ["mixed", ["X", "y"]]]
t4.at "mixed" . value_type . should_equal Value_Type.Boolean
t5.at "mixed" . value_type . should_equal Value_Type.Char
t4 = table_builder [["A", [1, 3]], ["mixed", [True, False]]]
t5 = table_builder [["A", [4, 5]], ["mixed", ["X", "y"]]]
t4.at "mixed" . value_type . should_equal Value_Type.Boolean
t5.at "mixed" . value_type . should_equal Value_Type.Char
t6 = t5.union [t1, t2, t4]
Problems.assume_no_problems t6
expect_column_names ["A", "mixed"] t6
t6.at "A" . to_vector . should_equal [4, 5, 1, 2, 3, 4, 5, 6, 1, 3]
t6.at "mixed" . to_vector . should_equal ["X", "y", "a", My_Type.Value 1 2, Nothing, 1, 2, 3, True, False]
t6.at "mixed" . value_type . should_equal Value_Type.Mixed
t6 = t5.union [t1, t2, t4]
Problems.assume_no_problems t6
expect_column_names ["A", "mixed"] t6
t6.at "A" . to_vector . should_equal [4, 5, 1, 2, 3, 4, 5, 6, 1, 3]
t6.at "mixed" . to_vector . should_equal ["X", "y", "a", My_Type.Value 1 2, Nothing, 1, 2, 3, True, False]
t6.at "mixed" . value_type . should_equal Value_Type.Mixed
Test.specify "if no common type can be found, should report error and drop the problematic column" <|
t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]], ["C", [True, False, Nothing]]]
@ -236,52 +271,60 @@ spec setup =
t1 = table_builder [["A", [1, 2, 3]]]
t2 = table_builder [["A", [4, 5, 6]], ["B", [1.2, 2.2, 3.1]]]
t2.at "B" . value_type . should_equal Value_Type.Float
t3 = t1.union t2 allow_type_widening=False keep_unmatched_columns=True
within_table t3 <|
Problems.assume_no_problems t3
expect_column_names ["A", "B"] t3
t3.at "A" . to_vector . should_equal [1, 2, 3, 4, 5, 6]
t3.at "A" . value_type . should_equal Value_Type.Integer
t3.at "B" . to_vector . should_equal [Nothing, Nothing, Nothing, 1.2, 2.2, 3.1]
t3.at "B" . value_type . should_equal Value_Type.Float
t3.at "A" . value_type . is_integer . should_be_true
t2.at "B" . value_type . is_floating_point . should_be_true
t3.at "B" . value_type . is_floating_point . should_be_true
Test.specify "if type widening is not allowed and types do not match, should report error and drop the problematic column" <|
t1 = table_builder [["A", [1, 2, 3]], ["B", [1, 2, 3]], ["C", [True, False, Nothing]], ["D", [10, 20, 30]], ["E", [1.1, 2.5, 3.2]]]
t2 = table_builder [["A", [4, 5, 6]], ["B", [1.5, 2.5, 3.5]], ["C", [1, 2, 3]], ["D", [True, True, True]], ["E", [1, 2, 3]]]
t1.at "B" . value_type . should_equal Value_Type.Integer
t1.at "C" . value_type . should_equal Value_Type.Boolean
t1.at "D" . value_type . should_equal Value_Type.Integer
t1.at "E" . value_type . should_equal Value_Type.Float
t1.at "B" . value_type . is_integer . should_be_true
t1.at "C" . value_type . is_boolean . should_be_true
t1.at "D" . value_type . is_integer . should_be_true
t1.at "E" . value_type . is_floating_point . should_be_true
t2.at "B" . value_type . should_equal Value_Type.Float
t2.at "C" . value_type . should_equal Value_Type.Integer
t2.at "D" . value_type . should_equal Value_Type.Boolean
t2.at "E" . value_type . should_equal Value_Type.Integer
t2.at "B" . value_type . is_floating_point . should_be_true
t2.at "C" . value_type . is_integer . should_be_true
t2.at "D" . value_type . is_boolean . should_be_true
t2.at "E" . value_type . is_integer . should_be_true
action = t1.union t2 allow_type_widening=False on_problems=_
tester table =
expect_column_names ["A"] table
table.at "A" . to_vector . should_equal [1, 2, 3, 4, 5, 6]
problems = [Column_Type_Mismatch.Error "B" Value_Type.Integer Value_Type.Float, Column_Type_Mismatch.Error "C" Value_Type.Boolean Value_Type.Integer, Column_Type_Mismatch.Error "D" Value_Type.Integer Value_Type.Boolean, Column_Type_Mismatch.Error "E" Value_Type.Float Value_Type.Integer]
Problems.test_problem_handling action problems tester
Test.specify "even if type widening is not allowed, if the first column is mixed, it should accept any column to be concatenated to it" <|
t1 = table_builder [["X", ["a", 1, Nothing]]]
t2 = table_builder [["X", [1]]]
t3 = table_builder [["X", [1.2, 2.3, 3.4]]]
t4 = table_builder [["X", ["a", "b"]]]
t5 = table_builder [["X", [True, False]]]
problem_checker problem =
problem.should_be_a Column_Type_Mismatch
True
err_checker err =
problem_checker err.catch
warn_checker warnings =
warnings.all problem_checker
Problems.test_advanced_problem_handling action err_checker warn_checker tester
t1.at "X" . value_type . should_equal Value_Type.Mixed
t2.at "X" . value_type . should_equal Value_Type.Integer
# Database backends are not required to support Mixed types.
if setup.is_database.not then
Test.specify "even if type widening is not allowed, if the first column is mixed, it should accept any column to be concatenated to it" <|
t1 = table_builder [["X", ["a", 1, Nothing]]]
t2 = table_builder [["X", [1]]]
t3 = table_builder [["X", [1.2, 2.3, 3.4]]]
t4 = table_builder [["X", ["a", "b"]]]
t5 = table_builder [["X", [True, False]]]
t6 = t1.union [t2, t3, t4, t5] allow_type_widening=False
Problems.assume_no_problems t6
t6.at "X" . value_type . should_equal Value_Type.Mixed
t6.at "X" . to_vector . should_equal ["a", 1, Nothing, 1, 1.2, 2.3, 3.4, "a", "b", True, False]
t1.at "X" . value_type . should_equal Value_Type.Mixed
t2.at "X" . value_type . should_equal Value_Type.Integer
t6 = t1.union [t2, t3, t4, t5] allow_type_widening=False
Problems.assume_no_problems t6
t6.at "X" . value_type . should_equal Value_Type.Mixed
t6.at "X" . to_vector . should_equal ["a", 1, Nothing, 1, 1.2, 2.3, 3.4, "a", "b", True, False]
Test.specify "if type mismatches cause all columns to be dropped, fail with No_Output_Columns" <|
t1 = table_builder [["A", [1, 2, 3]]]

View File

@ -4,6 +4,7 @@ import project.Common_Table_Operations.Aggregate_Spec
import project.Common_Table_Operations.Column_Operations_Spec
import project.Common_Table_Operations.Core_Spec
import project.Common_Table_Operations.Cross_Tab_Spec
import project.Common_Table_Operations.Cast_Spec
import project.Common_Table_Operations.Date_Time_Spec
import project.Common_Table_Operations.Distinct_Spec
import project.Common_Table_Operations.Expression_Spec
@ -88,13 +89,16 @@ type Test_Selection
each group. Guaranteed in the in-memory backend, but may not be
supported by all databases.
- date_time: Specifies if the backend supports date/time operations.
Config supports_case_sensitive_columns=True order_by=True natural_ordering=False case_insensitive_ordering=True order_by_unicode_normalization_by_default=False case_insensitive_ascii_only=False take_drop=True allows_mixed_type_comparisons=True supports_unicode_normalization=False is_nan_and_nothing_distinct=True supports_full_join=True distinct_returns_first_row_from_group_if_ordered=True date_time=True
- fixed_length_text_columns: Specifies if the backend supports fixed
length text columns.
Config supports_case_sensitive_columns=True order_by=True natural_ordering=False case_insensitive_ordering=True order_by_unicode_normalization_by_default=False case_insensitive_ascii_only=False take_drop=True allows_mixed_type_comparisons=True supports_unicode_normalization=False is_nan_and_nothing_distinct=True supports_full_join=True distinct_returns_first_row_from_group_if_ordered=True date_time=True fixed_length_text_columns=False
spec setup =
Core_Spec.spec setup
Select_Columns_Spec.spec setup
Column_Operations_Spec.spec setup
Date_Time_Spec.spec setup
Cast_Spec.spec setup
Aggregate_Spec.spec setup
Filter_Spec.spec setup
Missing_Values_Spec.spec setup

View File

@ -181,7 +181,7 @@ run_tests connection db_name =
Common_Spec.spec prefix connection
postgres_specific_spec connection db_name
common_selection = Common_Table_Operations.Main.Test_Selection.Config supports_case_sensitive_columns=True order_by_unicode_normalization_by_default=True take_drop=False allows_mixed_type_comparisons=False
common_selection = Common_Table_Operations.Main.Test_Selection.Config supports_case_sensitive_columns=True order_by_unicode_normalization_by_default=True take_drop=False allows_mixed_type_comparisons=False fixed_length_text_columns=True
aggregate_selection = Common_Table_Operations.Aggregate_Spec.Test_Selection.Config first_last_row_order=False aggregation_problems=False
agg_in_memory_table = (enso_project.data / "data.csv") . read
agg_table = connection.upload_table (Name_Generator.random_name "Agg1") agg_in_memory_table

View File

@ -44,8 +44,8 @@ spec connection db_name =
t3.at "b" . value_type . should_equal Value_Type.Decimal
t3.at "c" . value_type . should_equal (Value_Type.Decimal precision=10 scale=2)
t3.at "d" . value_type . should_equal (Value_Type.Decimal precision=20 scale=4)
t3.at "e" . value_type . should_equal (Value_Type.Decimal precision=10)
t3.at "f" . value_type . should_equal (Value_Type.Decimal precision=20)
t3.at "e" . value_type . should_equal (Value_Type.Decimal precision=10 scale=0)
t3.at "f" . value_type . should_equal (Value_Type.Decimal precision=20 scale=0)
Test.specify "text" <|
t = make_table "texts" [["a", "char(10)"], ["b", "varchar"], ["c", "varchar(20)"], ["d", "text"]]

View File

@ -2,6 +2,7 @@ from Standard.Base import all
import Standard.Table.Data.Type.Value_Type.Bits
import Standard.Table.Data.Type.Value_Type.Value_Type
import Standard.Table.Data.Type.Value_Type_Helpers
from Standard.Test import Test, Test_Suite
import Standard.Test.Extensions
@ -14,7 +15,7 @@ spec =
(Value_Type.Integer Bits.Bits_16).to_display_text . should_equal "Integer (16 bits)"
Value_Type.Float.to_display_text . should_equal "Float (64 bits)"
Value_Type.Decimal.to_display_text . should_equal "Decimal (precision=Nothing, scale=0)"
Value_Type.Decimal.to_display_text . should_equal "Decimal (precision=Nothing, scale=Nothing)"
Value_Type.Char.to_display_text . should_equal "Char (max_size=Nothing, variable_length=True)"
(Value_Type.Binary 8 False).to_display_text . should_equal "Binary (max_size=8 bytes, variable_length=False)"
@ -27,4 +28,38 @@ spec =
Value_Type.Unsupported_Data_Type.to_display_text . should_equal "Unsupported_Data_Type"
(Value_Type.Unsupported_Data_Type "FOO-BAR").to_display_text . should_equal "Unsupported_Data_Type (FOO-BAR)"
Test.specify "should use correct in-memory logic to reconcile pairs of types for operations like union/iif" <|
Value_Type_Helpers.reconcile_types Value_Type.Boolean Value_Type.Boolean . should_equal Value_Type.Boolean
Value_Type_Helpers.reconcile_types Value_Type.Boolean Value_Type.Integer . should_equal Value_Type.Integer
Value_Type_Helpers.reconcile_types (Value_Type.Integer Bits.Bits_16) (Value_Type.Integer Bits.Bits_32) . should_equal (Value_Type.Integer Bits.Bits_32)
Value_Type_Helpers.reconcile_types (Value_Type.Float Bits.Bits_32) (Value_Type.Float Bits.Bits_32) . should_equal (Value_Type.Float Bits.Bits_32)
Value_Type_Helpers.reconcile_types (Value_Type.Float Bits.Bits_32) (Value_Type.Float Bits.Bits_64) . should_equal (Value_Type.Float Bits.Bits_64)
Value_Type_Helpers.reconcile_types Value_Type.Boolean Value_Type.Byte . should_equal Value_Type.Byte
Value_Type_Helpers.reconcile_types (Value_Type.Integer Bits.Bits_16) Value_Type.Byte . should_equal (Value_Type.Integer Bits.Bits_16)
# 64-bit floats are always used when unifying with integers
Value_Type_Helpers.reconcile_types (Value_Type.Float Bits.Bits_32) Value_Type.Byte . should_equal Value_Type.Float
Value_Type_Helpers.reconcile_types (Value_Type.Float Bits.Bits_32) Value_Type.Boolean . should_equal Value_Type.Float
Value_Type_Helpers.reconcile_types (Value_Type.Char 10 False) (Value_Type.Char 10 False) . should_equal (Value_Type.Char 10 False)
Value_Type_Helpers.reconcile_types (Value_Type.Char 10 False) (Value_Type.Char 10 True) . should_equal (Value_Type.Char 10 True)
Value_Type_Helpers.reconcile_types (Value_Type.Char 100 False) (Value_Type.Char 10 True) . should_equal (Value_Type.Char 100 True)
Value_Type_Helpers.reconcile_types (Value_Type.Char 10 False) (Value_Type.Char 15 False) . should_equal (Value_Type.Char 15 True)
Value_Type_Helpers.reconcile_types Value_Type.Date Value_Type.Date . should_equal Value_Type.Date
Value_Type_Helpers.reconcile_types Value_Type.Time Value_Type.Time . should_equal Value_Type.Time
Value_Type_Helpers.reconcile_types Value_Type.Date_Time Value_Type.Date_Time . should_equal Value_Type.Date_Time
## Mixing date and time leads to mixed, if the user wants to convert date to at-midnight timestamp or
date-time to just date, they need to do it explicitly.
Value_Type_Helpers.reconcile_types Value_Type.Date Value_Type.Date_Time . should_equal Value_Type.Mixed
Value_Type_Helpers.reconcile_types Value_Type.Time Value_Type.Date_Time . should_equal Value_Type.Mixed
Value_Type_Helpers.reconcile_types Value_Type.Float Value_Type.Integer . should_equal Value_Type.Float
Value_Type_Helpers.reconcile_types Value_Type.Char Value_Type.Integer . should_equal Value_Type.Mixed
Value_Type_Helpers.reconcile_types Value_Type.Float Value_Type.Char . should_equal Value_Type.Mixed
Value_Type_Helpers.reconcile_types Value_Type.Float Value_Type.Binary . should_equal Value_Type.Mixed
Value_Type_Helpers.reconcile_types Value_Type.Char Value_Type.Binary . should_equal Value_Type.Mixed
Value_Type_Helpers.reconcile_types Value_Type.Char Value_Type.Boolean . should_equal Value_Type.Mixed
main = Test_Suite.run_main spec

View File

@ -89,7 +89,7 @@ visualization_spec connection =
Test.specify "should visualize value type info" <|
Value_Type.Boolean.to_json . should_equal '{"type":"Value_Type","constructor":"Boolean","_display_text_":"Boolean"}'
Value_Type.Float.to_json . should_equal '{"type":"Value_Type","constructor":"Float","_display_text_":"Float (64 bits)","bits":64}'
Value_Type.Decimal.to_json . should_equal '{"type":"Value_Type","constructor":"Decimal","_display_text_":"Decimal (precision=Nothing, scale=0)","precision":null,"scale":0}'
Value_Type.Decimal.to_json . should_equal '{"type":"Value_Type","constructor":"Decimal","_display_text_":"Decimal (precision=Nothing, scale=Nothing)","precision":null,"scale":null}'
Value_Type.Char.to_json . should_equal '{"type":"Value_Type","constructor":"Char","_display_text_":"Char (max_size=Nothing, variable_length=True)","size":null,"variable_length":true}'
Value_Type.Unsupported_Data_Type.to_json . should_equal '{"type":"Value_Type","constructor":"Unsupported_Data_Type","_display_text_":"Unsupported_Data_Type","type_name":null}'