Merge branch 'develop' into wip/gmt/9674-f-parse

This commit is contained in:
Gregory Travis 2024-10-07 11:42:00 -04:00
commit 78099533a3
83 changed files with 1687 additions and 639 deletions

View File

@ -68,10 +68,12 @@
- [Support for creating Atoms in expressions.][10820]
- [IO.print without new line][10858]
- [Add `Text.to_decimal`.][10874]
- [Added .floor, .ceil, .trunc to the in-memory `Decimal` column.][10887]
- [Added `floor`, `ceil`, `trunc` to the in-memory `Decimal` column.][10887]
- [Added vectorized .round to the in-memory `Decimal` column.][10912]
- [`select_into_database_table` no longer defaults the primary key to the first
column.][11120]
- [Extend the range of `floor`, `ceil`, `trunc` to values outside the `Long`
range.][11135]
- [Added `format` parameter to `Decimal.parse`.][11205]
- [Added `format` parameter to `Float.parse`.][11229]
@ -86,6 +88,7 @@
[10887]: https://github.com/enso-org/enso/pull/10887
[10912]: https://github.com/enso-org/enso/pull/10912
[11120]: https://github.com/enso-org/enso/pull/11120
[11135]: https://github.com/enso-org/enso/pull/11135
[11205]: https://github.com/enso-org/enso/pull/11205
[11229]: https://github.com/enso-org/enso/pull/11229

View File

@ -6,6 +6,7 @@ from Standard.Table import Aggregate_Column, Value_Type
import Standard.Database.Connection.Connection.Connection
import Standard.Database.DB_Column.DB_Column
import Standard.Database.Dialect
import Standard.Database.Feature.Feature
import Standard.Database.Internal.Base_Generator
import Standard.Database.Internal.Column_Fetcher as Column_Fetcher_Module
import Standard.Database.Internal.Column_Fetcher.Column_Fetcher
@ -186,9 +187,14 @@ type Redshift_Dialect
## PRIVATE
Checks if an operation is supported by the dialect.
is_supported : Text -> Boolean
is_supported self operation =
self.dialect_operations.is_supported operation
is_operation_supported self operation:Text -> Boolean =
self.dialect_operations.is_operation_supported operation
## PRIVATE
Checks if a feature is supported by the dialect.
is_feature_supported self feature:Feature -> Boolean =
_ = feature
True
## PRIVATE
The default table types to use when listing tables.

View File

@ -1,5 +1,6 @@
from Standard.Base import all
import Standard.Base.Errors.Common.Missing_Argument
import Standard.Base.Errors.Common.Type_Error
import Standard.Base.Metadata.Widget.Text_Input
import Standard.Database.Connection.Client_Certificate.Client_Certificate
@ -36,7 +37,7 @@ type Redshift_Details
Attempt to resolve the constructor.
resolve : Function -> Redshift_Details | Nothing
resolve constructor =
Panic.catch Any (constructor:Redshift_Details) _->Nothing
Panic.catch Type_Error (constructor:Redshift_Details) _->Nothing
## PRIVATE
Build the Connection resource.

View File

@ -9,6 +9,7 @@ import project.Data.Vector.Vector
import project.Error.Error
import project.Errors.Common.Incomparable_Values
import project.Errors.Common.Missing_Argument
import project.Errors.Common.Type_Error
import project.Errors.Illegal_Argument.Illegal_Argument
import project.Function.Function
import project.Meta
@ -167,7 +168,7 @@ type Filter_Condition
resolve_auto_scoped filter =
resolve filter:Filter_Condition = filter
case filter of
_ : Function -> Panic.catch Any (resolve filter) _->filter
_ : Function -> Panic.catch Type_Error (resolve filter) _->filter
_ : Filter_Condition -> filter
_ -> Panic.throw (Illegal_Argument.Error "The filter condition can either be a Function or a Filter_Condition, but got: "+filter.to_display_text)

View File

@ -34,6 +34,7 @@ import project.System.File.File
from project.Data.Boolean import Boolean, False, True
from project.Data.Text.Extensions import all
from project.Enso_Cloud.Public_Utils import get_required_field
from project.Logging import all
polyglot java import org.enso.base.enso_cloud.AuthenticationProvider
@ -67,9 +68,11 @@ type Authentication_Service
## PRIVATE
get_access_token self -> Text =
is_still_valid = self.auth_data.get.expire_at > (Date_Time.now + token_early_refresh_period)
expiry_date = self.auth_data.get.expire_at
is_still_valid = expiry_date > (Date_Time.now + token_early_refresh_period)
if is_still_valid then self.auth_data.get.access_token else
# The token has expired or will expire soon, so we need to refresh it.
Authentication_Service.log_message "Access token expires at "+expiry_date.to_display_text+", so we are refreshing it."
self.force_refresh
self.auth_data.get.access_token
@ -139,9 +142,11 @@ type Refresh_Token_Data
payload = JS_Object.from_pairs [["ClientId", self.client_id], ["AuthFlow", "REFRESH_TOKEN_AUTH"], ["AuthParameters", auth_parameters]]
response = Context.Output.with_enabled <| HTTP.post self.refresh_url body=(Request_Body.Json payload) headers=headers
. catch HTTP_Error error-> case error of
HTTP_Error.Status_Error status _ _ ->
# If the status code is 400-499, then most likely the reason is that the session has expired, so we ask the user to log in again.
if (400 <= status.code) && (status.code < 500) then Panic.throw (Cloud_Session_Expired.Error error) else
HTTP_Error.Status_Error status message _ ->
Authentication_Service.log_message level=..Warning "Refresh token request failed with status "+status.to_text+": "+(message.if_nothing "<no message>")+"."
# As per OAuth specification, an expired refresh token should result in a 401 status code: https://www.rfc-editor.org/rfc/rfc6750.html#section-3.1
if status.code == 401 then Panic.throw (Cloud_Session_Expired.Error error) else
# Otherwise, we fail with the generic error that gives more details.
Panic.throw (Enso_Cloud_Error.Connection_Error error)
_ -> Panic.throw (Enso_Cloud_Error.Connection_Error error)

View File

@ -29,7 +29,8 @@ import project.System.File.File
from project.Data.Boolean import Boolean, False, True
from project.Data.Json.Extensions import all
polyglot java import java.lang.Exception as JException
polyglot java import java.lang.IllegalArgumentException
polyglot java import java.io.IOException
polyglot java import java.net.http.HttpClient
polyglot java import java.net.http.HttpClient.Builder as ClientBuilder
polyglot java import java.net.http.HttpClient.Redirect
@ -112,7 +113,7 @@ type HTTP
handler caught_panic =
exception = caught_panic.payload
Error.throw (Request_Error.Error (Meta.type_of exception . to_text) exception.getMessage)
Panic.catch JException handler=handler
Panic.catch IllegalArgumentException handler=handler <| Panic.catch IOException handler=handler
handle_request_error <| Illegal_Argument.handle_java_exception <| check_output_context <|
headers = resolve_headers req

View File

@ -7,6 +7,7 @@ import project.Data.Text.Encoding.Encoding
import project.Data.Text.Text
import project.Data.Vector.Vector
import project.Error.Error
import project.Errors.Common.Type_Error
import project.Errors.File_Error.File_Error
import project.Errors.Illegal_Argument.Illegal_Argument
import project.Errors.Problem_Behavior.Problem_Behavior
@ -140,7 +141,7 @@ type Plain_Text_Format
Resolve an unresolved constructor to the actual type.
resolve : Function -> Plain_Text_Format | Nothing
resolve constructor =
Panic.catch Any (constructor:Plain_Text_Format) _->Nothing
Panic.catch Type_Error (constructor:Plain_Text_Format) _->Nothing
## PRIVATE
If the File_Format supports reading from the file, return a configured instance.

View File

@ -0,0 +1 @@
C:\Code\enso\distribution\lib\Standard\Database\0.0.0-dev\src\Feature.enso

View File

@ -1,5 +1,6 @@
from Standard.Base import all
import Standard.Base.Data.Numbers.Number_Parse_Error
import Standard.Base.Errors.Common.Type_Error
import Standard.Base.Errors.Illegal_State.Illegal_State
import project.Connection.Client_Certificate.Client_Certificate
@ -34,7 +35,7 @@ type Postgres
Attempt to resolve the constructor.
resolve : Function -> Postgres | Nothing
resolve constructor =
Panic.catch Any (constructor:Postgres) _->Nothing
Panic.catch Type_Error (constructor:Postgres) _->Nothing
## PRIVATE
Build the Connection resource.

View File

@ -1,5 +1,6 @@
from Standard.Base import all
import Standard.Base.Errors.Common.Missing_Argument
import Standard.Base.Errors.Common.Type_Error
import project.Connection.Connection_Options.Connection_Options
import project.Connection.SQLite_Connection.SQLite_Connection
@ -18,7 +19,7 @@ type SQLite
Attempt to resolve the constructor.
resolve : Function -> SQLite | Nothing
resolve constructor =
Panic.catch Any (constructor:SQLite) _->Nothing
Panic.catch Type_Error (constructor:SQLite) _->Nothing
## PRIVATE
Build the Connection resource.

View File

@ -1,4 +1,5 @@
from Standard.Base import all
import Standard.Base.Errors.Common.Type_Error
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
import Standard.Base.System.File.Generic.Writable_File.Writable_File
import Standard.Base.System.File_Format_Metadata.File_Format_Metadata
@ -21,7 +22,7 @@ type SQLite_Format
Resolve an unresolved constructor to the actual type.
resolve : Function -> SQLite_Format | Nothing
resolve constructor =
Panic.catch Any (constructor:SQLite_Format) _->Nothing
Panic.catch Type_Error (constructor:SQLite_Format) _->Nothing
## PRIVATE
If the File_Format supports reading from the file, return a configured instance.

View File

@ -327,7 +327,7 @@ type DB_Column
- metadata: Optional metadata for the `SQL_Expression.Operation`.
make_op : Text -> Vector Text -> (Text | Nothing) -> (Any | Nothing) -> DB_Column
make_op self op_kind operands new_name metadata=Nothing =
checked_support = if self.connection.dialect.is_supported op_kind then True else
checked_support = if self.connection.dialect.is_operation_supported op_kind then True else
Error.throw (Unsupported_Database_Operation.Error op_kind)
checked_support.if_not_error <|
type_mapping = self.connection.dialect.get_type_mapping
@ -1047,7 +1047,7 @@ type DB_Column
False -> case precise_value_type == Value_Type.Date_Time of
True ->
op = "date_trunc_to_day"
case self.connection.dialect.is_supported op of
case self.connection.dialect.is_operation_supported op of
True ->
self.make_unary_op op new_name
False ->
@ -1541,7 +1541,7 @@ type DB_Column
Location.Both -> "TRIM"
Location.Left -> "LTRIM"
Location.Right -> "RTRIM"
if self.connection.dialect.is_supported operator then self.make_binary_op operator what new_name else
if self.connection.dialect.is_operation_supported operator then self.make_binary_op operator what new_name else
Error.throw (Unsupported_Database_Operation.Error ("`trim "+where.to_text+"`"))
## GROUP Standard.Base.Text

View File

@ -47,6 +47,7 @@ from Standard.Table.Table import make_fill_nothing_default_widget
import project.Connection.Connection.Connection
import project.DB_Column.DB_Column
import project.Feature.Feature
import project.Internal.Aggregate_Helper
import project.Internal.Base_Generator
import project.Internal.Common.Database_Join_Helper
@ -137,9 +138,11 @@ type DB_Table
- selector: The name or index of the column to get.
@selector Widget_Helpers.make_column_name_selector
at : Integer | Text -> DB_Column ! No_Such_Column | Index_Out_Of_Bounds
at self (selector:(Integer | Text)=0) = case selector of
_ : Integer -> self.make_column (self.internal_columns.at selector)
_ -> self.get selector (Error.throw (No_Such_Column.Error selector))
at self (selector:(Integer | Text)=0) =
Feature.Column_Operations.if_supported_else_throw self.connection.dialect "at" <|
case selector of
_ : Integer -> self.make_column (self.internal_columns.at selector)
_ -> self.get selector (Error.throw (No_Such_Column.Error selector))
## ICON select_column
Returns the column with the given name or index.
@ -150,10 +153,11 @@ type DB_Table
@selector Widget_Helpers.make_column_name_selector
get : Integer | Text -> Any -> DB_Column | Any
get self (selector:(Integer | Text)=0) ~if_missing=Nothing =
internal_column = case selector of
_ : Integer -> self.internal_columns.get selector if_missing=Nothing
_ : Text -> self.internal_columns.find (p -> p.name == selector) if_missing=Nothing
if internal_column.is_nothing then if_missing else self.make_column internal_column
Feature.Column_Operations.if_supported_else_throw self.connection.dialect "get" <|
internal_column = case selector of
_ : Integer -> self.internal_columns.get selector if_missing=Nothing
_ : Text -> self.internal_columns.find (p -> p.name == selector) if_missing=Nothing
if internal_column.is_nothing then if_missing else self.make_column internal_column
## ALIAS cell value, get cell
GROUP Standard.Base.Selections
@ -168,8 +172,9 @@ type DB_Table
@if_missing (make_any_selector add_text=True add_regex=True add_number=True add_boolean=True add_named_pattern=True add_date=True add_time=True add_date_time=True add_nothing=True)
get_value : Text | Integer -> Integer -> Any -> Any
get_value self selector:(Text | Integer)=0 index:Integer=0 ~if_missing=Nothing =
col = self.get selector if_missing=Nothing
if Nothing == col then if_missing else col.get index if_missing
Feature.Column_Operations.if_supported_else_throw self.connection.dialect "get_value" <|
col = self.get selector if_missing=Nothing
if Nothing == col then if_missing else col.get index if_missing
## ALIAS row
GROUP Standard.Base.Selections
@ -183,38 +188,47 @@ type DB_Table
- if_missing: The value to use if the selector isn't present.
get_row : Integer -> Any -> Any
get_row self index:Integer=0 ~if_missing=Nothing =
if index == -1 then self.last_row else
real_row = (if index < 0 then index + self.row_count else index)
if real_row < 0 then if_missing else
self.rows real_row+1 . get real_row if_missing
Feature.Sample.if_supported_else_throw self.connection.dialect "get_row" <|
if index == -1 then self.last_row else
real_row = (if index < 0 then index + self.row_count else index)
if real_row < 0 then if_missing else
self.rows real_row+1 . get real_row if_missing
## ALIAS first cell
GROUP Standard.Base.Selections
ICON local_scope4
Gets the top left value from the table.
first_value : Any ! Index_Out_Of_Bounds
first_value self = self.at 0 . at 0
first_value self =
Feature.Column_Operations.if_supported_else_throw self.connection.dialect "first_value" <|
self.at 0 . at 0
## ALIAS last cell
GROUP Standard.Base.Selections
ICON local_scope4
Gets the bottom right value from the table.
last_value : Any ! Index_Out_Of_Bounds
last_value self = self.last_row . at -1
last_value self =
Feature.Column_Operations.if_supported_else_throw self.connection.dialect "last_value" <|
self.last_row . at -1
## ALIAS first field
GROUP Standard.Base.Selections
ICON select_column
Gets the first column.
first_column : DB_Column ! Index_Out_Of_Bounds
first_column self = self.at 0
first_column self =
Feature.Column_Operations.if_supported_else_throw self.connection.dialect "first_column" <|
self.at 0
## ALIAS last field
GROUP Standard.Base.Selections
ICON select_column
Gets the last column
last_column : DB_Column ! Index_Out_Of_Bounds
last_column self = self.at -1
last_column self =
Feature.Column_Operations.if_supported_else_throw self.connection.dialect "last_column" <|
self.at -1
## ALIAS field count
GROUP Standard.Base.Metadata
@ -284,8 +298,9 @@ type DB_Table
@columns (Widget_Helpers.make_column_name_multi_selector add_regex=True add_by_type=True)
select_columns : Vector (Integer | Text | Regex | By_Type) | Text | Integer | Regex | By_Type -> Boolean -> Case_Sensitivity -> Boolean -> Problem_Behavior -> DB_Table ! No_Output_Columns | Missing_Input_Columns
select_columns self (columns : (Vector | Text | Integer | Regex | By_Type) = [self.columns.first.name]) (reorder:Boolean=False) (case_sensitivity:Case_Sensitivity=..Default) (error_on_missing_columns:Boolean=True) (on_problems:Problem_Behavior=..Report_Warning) =
new_columns = self.columns_helper.select_columns columns case_sensitivity reorder error_on_missing_columns on_problems
self.updated_columns (new_columns.map _.as_internal)
Feature.Select_Columns.if_supported_else_throw self.connection.dialect "select_columns" <|
new_columns = self.columns_helper.select_columns columns case_sensitivity reorder error_on_missing_columns on_problems
self.updated_columns (new_columns.map _.as_internal)
## PRIVATE
ALIAS select fields by type
@ -302,9 +317,10 @@ type DB_Table
@types Widget_Helpers.make_value_type_vector_selector
select_columns_by_type : Vector Value_Type -> Boolean -> Table
select_columns_by_type self types:Vector strict:Boolean=False =
new_columns = self.columns_helper.select_by_type types strict
result = self.updated_columns (new_columns.map _.as_internal)
Warning.attach (Deprecated.Warning "Standard.Database.DB_Table.DB_Table" "select_columns_by_type" "Deprecated: use `select_columns` with a `By_Type` instead.") result
Feature.Select_Columns.if_supported_else_throw self.connection.dialect "select_columns_by_type" <|
new_columns = self.columns_helper.select_by_type types strict
result = self.updated_columns (new_columns.map _.as_internal)
Warning.attach (Deprecated.Warning "Standard.Database.DB_Table.DB_Table" "select_columns_by_type" "Deprecated: use `select_columns` with a `By_Type` instead.") result
## ALIAS drop fields, drop_columns, remove fields, select columns, select fields
GROUP Standard.Base.Selections
@ -362,8 +378,9 @@ type DB_Table
@columns (Widget_Helpers.make_column_name_multi_selector add_regex=True add_by_type=True)
remove_columns : Vector (Integer | Text | Regex | By_Type) | Text | Integer | Regex | By_Type -> Case_Sensitivity -> Boolean -> Problem_Behavior -> DB_Table ! No_Output_Columns | Missing_Input_Columns
remove_columns self (columns : (Vector | Text | Integer | Regex | By_Type) = [self.columns.first.name]) (case_sensitivity:Case_Sensitivity=..Default) (error_on_missing_columns:Boolean=False) (on_problems:Problem_Behavior=..Report_Warning) =
new_columns = self.columns_helper.remove_columns columns case_sensitivity error_on_missing_columns=error_on_missing_columns on_problems=on_problems
self.updated_columns (new_columns.map _.as_internal)
Feature.Select_Columns.if_supported_else_throw self.connection.dialect "remove_columns" <|
new_columns = self.columns_helper.remove_columns columns case_sensitivity error_on_missing_columns=error_on_missing_columns on_problems=on_problems
self.updated_columns (new_columns.map _.as_internal)
## PRIVATE
ALIAS remove fields by type, select columns by type, select fields by type
@ -380,9 +397,10 @@ type DB_Table
@types Widget_Helpers.make_value_type_vector_selector
remove_columns_by_type : Vector Value_Type -> Boolean -> Table
remove_columns_by_type self types:Vector strict:Boolean=False =
new_columns = self.columns_helper.remove_by_type types strict
result = self.updated_columns (new_columns.map _.as_internal)
Warning.attach (Deprecated.Warning "Standard.Database.DB_Table.DB_Table" "remove_columns_by_type" "Deprecated: use `remove_columns` with a `By_Type` instead.") result
Feature.Select_Columns.if_supported_else_throw self.connection.dialect "remove_columns_by_type" <|
new_columns = self.columns_helper.remove_by_type types strict
result = self.updated_columns (new_columns.map _.as_internal)
Warning.attach (Deprecated.Warning "Standard.Database.DB_Table.DB_Table" "remove_columns_by_type" "Deprecated: use `remove_columns` with a `By_Type` instead.") result
## ALIAS select_blank_fields, select_missing_columns, select_na
GROUP Standard.Base.Selections
@ -407,8 +425,9 @@ type DB_Table
table.select_blank_columns
select_blank_columns : Blank_Selector -> Boolean -> DB_Table
select_blank_columns self (when : Blank_Selector = ..All_Cells) treat_nans_as_blank:Boolean=False =
new_columns = self.columns_helper.select_blank_columns_helper when treat_nans_as_blank
self.updated_columns new_columns
Feature.Select_Columns.if_supported_else_throw self.connection.dialect "select_blank_columns" <|
new_columns = self.columns_helper.select_blank_columns_helper when treat_nans_as_blank
self.updated_columns new_columns
## ALIAS drop_missing_columns, drop_na, select_blank_columns, select_blank_fields, select_missing_columns, select_na
GROUP Standard.Base.Selections
@ -433,8 +452,9 @@ type DB_Table
table.remove_blank_columns
remove_blank_columns : Blank_Selector -> Boolean -> DB_Table
remove_blank_columns self (when : Blank_Selector = ..All_Cells) treat_nans_as_blank:Boolean=False =
new_columns = self.columns_helper.select_blank_columns_helper when treat_nans_as_blank invert_selection=True
self.updated_columns new_columns
Feature.Select_Columns.if_supported_else_throw self.connection.dialect "remove_blank_columns" <|
new_columns = self.columns_helper.select_blank_columns_helper when treat_nans_as_blank invert_selection=True
self.updated_columns new_columns
## GROUP Standard.Base.Selections
ICON select_column
@ -488,8 +508,9 @@ type DB_Table
@columns (Widget_Helpers.make_column_name_multi_selector add_regex=True add_by_type=True)
reorder_columns : Vector (Integer | Text | Regex | By_Type) | Text | Integer | Regex | By_Type -> Position -> Case_Sensitivity -> Boolean -> Problem_Behavior -> DB_Table ! Missing_Input_Columns
reorder_columns self (columns : (Vector | Text | Integer | Regex | By_Type) = [self.columns.first.name]) (position:Position=..Before_Other_Columns) (case_sensitivity:Case_Sensitivity=..Default) (error_on_missing_columns:Boolean=False) (on_problems:Problem_Behavior=..Report_Warning) =
new_columns = self.columns_helper.reorder_columns columns position case_sensitivity error_on_missing_columns on_problems
self.updated_columns (new_columns.map _.as_internal)
Feature.Select_Columns.if_supported_else_throw self.connection.dialect "reorder_columns" <|
new_columns = self.columns_helper.reorder_columns columns position case_sensitivity error_on_missing_columns on_problems
self.updated_columns (new_columns.map _.as_internal)
## GROUP Standard.Base.Selections
ICON select_column
@ -517,8 +538,9 @@ type DB_Table
table.reorder_columns Sort_Direction.Descending
sort_columns : Sort_Direction -> Text_Ordering -> DB_Table
sort_columns self order:Sort_Direction=..Ascending text_ordering:Text_Ordering=..Default =
new_columns = Table_Helpers.sort_columns internal_columns=self.internal_columns order text_ordering
self.updated_columns new_columns
Feature.Select_Columns.if_supported_else_throw self.connection.dialect "sort_columns" <|
new_columns = Table_Helpers.sort_columns internal_columns=self.internal_columns order text_ordering
self.updated_columns new_columns
## GROUP Standard.Base.Metadata
ICON table_edit
@ -587,14 +609,16 @@ type DB_Table
table.rename_columns (Dictionary.from_vector [["name=(.*)".to_regex, "key:$1"]])
@column_map Widget_Helpers.make_rename_name_vector_selector
rename_columns : Table | Dictionary (Text | Integer | Regex) Text | Vector Text | Vector Vector -> Case_Sensitivity -> Boolean -> Problem_Behavior -> DB_Table ! Missing_Input_Columns | Ambiguous_Column_Rename | Too_Many_Column_Names_Provided | Invalid_Column_Names | Duplicate_Output_Column_Names
rename_columns self (column_map:(Table | Dictionary | Vector)=["Column"]) (case_sensitivity:Case_Sensitivity=..Default) (error_on_missing_columns:Boolean=True) (on_problems:Problem_Behavior=..Report_Warning) = case column_map of
_ : Table ->
resolved = Table_Helpers.read_name_mapping_from_table column_map
self.rename_columns resolved case_sensitivity error_on_missing_columns on_problems
_ ->
new_names = Table_Helpers.rename_columns self.column_naming_helper self.internal_columns column_map case_sensitivity error_on_missing_columns on_problems
Warning.with_suspended new_names names->
self.updated_columns (self.internal_columns.map c-> c.rename (names.at c.name))
rename_columns self (column_map:(Table | Dictionary | Vector)=["Column"]) (case_sensitivity:Case_Sensitivity=..Default) (error_on_missing_columns:Boolean=True) (on_problems:Problem_Behavior=..Report_Warning) =
Feature.Select_Columns.if_supported_else_throw self.connection.dialect "rename_columns" <|
case column_map of
_ : Table ->
resolved = Table_Helpers.read_name_mapping_from_table column_map
self.rename_columns resolved case_sensitivity error_on_missing_columns on_problems
_ ->
new_names = Table_Helpers.rename_columns self.column_naming_helper self.internal_columns column_map case_sensitivity error_on_missing_columns on_problems
Warning.with_suspended new_names names->
self.updated_columns (self.internal_columns.map c-> c.rename (names.at c.name))
## ALIAS rename, header
GROUP Standard.Base.Metadata
@ -631,11 +655,13 @@ type DB_Table
If instead of a name, a column is provided, it is returned as-is as long
as it comes from the same context.
resolve : Text | DB_Column -> DB_Column
resolve self column = case column of
_ : Text -> Panic.rethrow (self.at column)
_ ->
if Helpers.check_integrity self column then column else
Panic.throw (Integrity_Error.Error "DB_Column "+column.name)
resolve self column =
Feature.Select_Columns.if_supported_else_throw self.connection.dialect "resolve" <|
case column of
_ : Text -> Panic.rethrow (self.at column)
_ ->
if Helpers.check_integrity self column then column else
Panic.throw (Integrity_Error.Error "DB_Column "+column.name)
## ALIAS filter rows, where
GROUP Standard.Base.Selections
@ -689,27 +715,29 @@ type DB_Table
@column (Widget_Helpers.make_column_name_selector add_expression=True)
@filter Widget_Helpers.make_filter_condition_selector
filter : (DB_Column | Text | Integer) -> (Filter_Condition | (Any -> Boolean)) -> Problem_Behavior -> DB_Table ! No_Such_Column | Index_Out_Of_Bounds | Invalid_Value_Type
filter self column (filter : Filter_Condition | (Any -> Boolean) = Filter_Condition.Equal True) on_problems:Problem_Behavior=..Report_Warning = case column of
_ : DB_Column ->
mask filter_column = case Helpers.check_integrity self filter_column of
False ->
Error.throw (Integrity_Error.Error "DB_Column "+filter_column.name)
True ->
new_filters = self.context.where_filters + [filter_column.expression]
new_ctx = self.context.set_where_filters new_filters
self.updated_context new_ctx
filter self column (filter : Filter_Condition | (Any -> Boolean) = Filter_Condition.Equal True) on_problems:Problem_Behavior=..Report_Warning =
Feature.Filter.if_supported_else_throw self.connection.dialect "filter" <|
case column of
_ : DB_Column ->
mask filter_column = case Helpers.check_integrity self filter_column of
False ->
Error.throw (Integrity_Error.Error "DB_Column "+filter_column.name)
True ->
new_filters = self.context.where_filters + [filter_column.expression]
new_ctx = self.context.set_where_filters new_filters
self.updated_context new_ctx
filter_condition = Filter_Condition.resolve_auto_scoped filter
case filter_condition of
_ : Filter_Condition ->
resolved = (self:Table_Ref).resolve_condition filter_condition
mask (make_filter_column column resolved on_problems)
_ : Function ->
Error.throw (Unsupported_Database_Operation.Error "Filtering with a custom predicate")
_ : Expression -> self.filter (self.evaluate_expression column on_problems) filter on_problems
_ ->
table_at = self.at column
self.filter table_at filter on_problems
filter_condition = Filter_Condition.resolve_auto_scoped filter
case filter_condition of
_ : Filter_Condition ->
resolved = (self:Table_Ref).resolve_condition filter_condition
mask (make_filter_column column resolved on_problems)
_ : Function ->
Error.throw (Unsupported_Database_Operation.Error "Filtering with a custom predicate")
_ : Expression -> self.filter (self.evaluate_expression column on_problems) filter on_problems
_ ->
table_at = self.at column
self.filter table_at filter on_problems
## PRIVATE
ALIAS filter rows
@ -747,9 +775,10 @@ type DB_Table
people.filter_by_expression "[age] % 10 == 0"
filter_by_expression : Text -> Problem_Behavior -> DB_Table ! No_Such_Column | Invalid_Value_Type | Expression_Error
filter_by_expression self expression:Text on_problems:Problem_Behavior=..Report_Warning =
column = self.evaluate_expression (Expression.Value expression) on_problems
result = self.filter column Filter_Condition.Is_True
Warning.attach (Deprecated.Warning "Standard.Database.DB_Table.DB_Table" "filter_by_expression" "Deprecated: use `filter` with an `Expression` instead.") result
Feature.Filter.if_supported_else_throw self.connection.dialect "filter_by_expression" <|
column = self.evaluate_expression (Expression.Value expression) on_problems
result = self.filter column Filter_Condition.Is_True
Warning.attach (Deprecated.Warning "Standard.Database.DB_Table.DB_Table" "filter_by_expression" "Deprecated: use `filter` with an `Expression` instead.") result
## ALIAS first, head, keep, last, limit, sample, slice, tail, top
GROUP Standard.Base.Selections
@ -781,7 +810,8 @@ type DB_Table
@range Index_Sub_Range.default_widget
take : (Index_Sub_Range | Range | Integer) -> DB_Table
take self range:(Index_Sub_Range | Range | Integer)=..First =
Take_Drop_Helpers.take_drop_helper Take_Drop.Take self range
Feature.Sample.if_supported_else_throw self.connection.dialect "take" <|
Take_Drop_Helpers.take_drop_helper Take_Drop.Take self range
## ALIAS remove, skip
GROUP Standard.Base.Selections
@ -813,12 +843,15 @@ type DB_Table
@range Index_Sub_Range.default_widget
drop : (Index_Sub_Range | Range | Integer) -> DB_Table
drop self range:(Index_Sub_Range | Range | Integer)=..First =
Take_Drop_Helpers.take_drop_helper Take_Drop.Drop self range
Feature.Sample.if_supported_else_throw self.connection.dialect "drop" <|
Take_Drop_Helpers.take_drop_helper Take_Drop.Drop self range
## PRIVATE
Filter out all rows.
remove_all_rows : DB_Table
remove_all_rows self = self.filter (Expression.Value "0==1")
remove_all_rows self =
Feature.Filter.if_supported_else_throw self.connection.dialect "remove_all_rows" <|
self.filter (Expression.Value "0==1")
## ALIAS add index column, rank, record id
GROUP Standard.Base.Values
@ -858,37 +891,38 @@ type DB_Table
@order_by (Widget_Helpers.make_order_by_selector display=..When_Modified)
add_row_number : Text -> Integer -> Integer -> Vector (Text | Integer | Regex) | Text | Integer | Regex -> Vector (Text | Sort_Column) | Text -> Problem_Behavior -> DB_Table
add_row_number self (name:Text="Row") (from:Integer=0) (step:Integer=1) (group_by:(Vector | Text | Integer | Regex)=[]) (order_by:(Vector | Text)=[]) (on_problems:Problem_Behavior=..Report_Warning) =
problem_builder = Problem_Builder.new error_on_missing_columns=True
grouping_columns = self.columns_helper.select_columns_helper group_by Case_Sensitivity.Default True problem_builder
grouping_columns.each column->
if column.value_type.is_floating_point then
problem_builder.report_other_warning (Floating_Point_Equality.Error column.name)
ordering = Table_Helpers.resolve_order_by self.columns order_by problem_builder
problem_builder.attach_problems_before on_problems <|
order_descriptors = case ordering.is_empty of
False -> ordering.map element->
column = element.column
associated_selector = element.associated_selector
self.connection.dialect.prepare_order_descriptor column associated_selector.direction text_ordering=Nothing
True -> case self.default_ordering of
Nothing -> Error.throw (Illegal_Argument.Error "No `order_by` is specified and the table has no existing ordering (e.g. from an `order_by` operation or a primary key). Some ordering is required for `add_row_number` in Database tables.")
descriptors -> descriptors
grouping_expressions = (grouping_columns.map _.as_internal).map .expression
Feature.Add_Row_Number.if_supported_else_throw self.connection.dialect "add_row_number" <|
problem_builder = Problem_Builder.new error_on_missing_columns=True
grouping_columns = self.columns_helper.select_columns_helper group_by Case_Sensitivity.Default True problem_builder
grouping_columns.each column->
if column.value_type.is_floating_point then
problem_builder.report_other_warning (Floating_Point_Equality.Error column.name)
ordering = Table_Helpers.resolve_order_by self.columns order_by problem_builder
problem_builder.attach_problems_before on_problems <|
order_descriptors = case ordering.is_empty of
False -> ordering.map element->
column = element.column
associated_selector = element.associated_selector
self.connection.dialect.prepare_order_descriptor column associated_selector.direction text_ordering=Nothing
True -> case self.default_ordering of
Nothing -> Error.throw (Illegal_Argument.Error "No `order_by` is specified and the table has no existing ordering (e.g. from an `order_by` operation or a primary key). Some ordering is required for `add_row_number` in Database tables.")
descriptors -> descriptors
grouping_expressions = (grouping_columns.map _.as_internal).map .expression
new_expr = Row_Number_Helpers.make_row_number from step order_descriptors grouping_expressions
new_expr = Row_Number_Helpers.make_row_number from step order_descriptors grouping_expressions
type_mapping = self.connection.dialect.get_type_mapping
infer_from_database_callback expression =
SQL_Type_Reference.new self.connection self.context expression
new_type_ref = type_mapping.infer_return_type infer_from_database_callback "ROW_NUMBER" [] new_expr
type_mapping = self.connection.dialect.get_type_mapping
infer_from_database_callback expression =
SQL_Type_Reference.new self.connection self.context expression
new_type_ref = type_mapping.infer_return_type infer_from_database_callback "ROW_NUMBER" [] new_expr
new_column = Internal_Column.Value name new_type_ref new_expr
new_column = Internal_Column.Value name new_type_ref new_expr
rebuild_table columns =
self.updated_columns (columns.map .as_internal)
renamed_table = Add_Row_Number.rename_columns_if_needed self name on_problems rebuild_table
updated_table = renamed_table.updated_columns (renamed_table.internal_columns + [new_column])
updated_table.as_subquery
rebuild_table columns =
self.updated_columns (columns.map .as_internal)
renamed_table = Add_Row_Number.rename_columns_if_needed self name on_problems rebuild_table
updated_table = renamed_table.updated_columns (renamed_table.internal_columns + [new_column])
updated_table.as_subquery
## ALIAS order_by
@ -926,8 +960,9 @@ type DB_Table
t2.read
limit : Integer -> DB_Table
limit self max_rows:Integer=1000 =
new_ctx = self.context.set_limit max_rows
self.updated_context new_ctx
Feature.Sample.if_supported_else_throw self.connection.dialect "limit" <|
new_ctx = self.context.set_limit max_rows
self.updated_context new_ctx
## ALIAS add column, expression, formula, new column, update column
GROUP Standard.Base.Values
@ -975,45 +1010,46 @@ type DB_Table
@value Simple_Expression.default_widget
set : DB_Column | Text | Expression | Array | Vector | Range | Date_Range | Constant_Column | Simple_Expression -> Text -> Set_Mode -> Problem_Behavior -> DB_Table ! Existing_Column | Missing_Column | No_Such_Column | Expression_Error
set self value:(DB_Column | Text | Expression | Array | Vector | Range | Date_Range | Constant_Column | Simple_Expression) (as : Text = "") (set_mode : Set_Mode = ..Add_Or_Update) (on_problems : Problem_Behavior = ..Report_Warning) =
problem_builder = Problem_Builder.new
unique = self.column_naming_helper.create_unique_name_strategy
unique.mark_used self.column_names
Feature.Set.if_supported_else_throw self.connection.dialect "set" <|
problem_builder = Problem_Builder.new
unique = self.column_naming_helper.create_unique_name_strategy
unique.mark_used self.column_names
resolved = case value of
_ : Text -> self.make_constant_column value
_ : Expression -> self.evaluate_expression value on_problems
_ : DB_Column ->
if Helpers.check_integrity self value then value else
Error.throw (Integrity_Error.Error "Column "+value.name)
_ : Constant_Column -> self.make_constant_column value
_ : Simple_Expression -> value.evaluate self (set_mode==Set_Mode.Update && as=="") on_problems
_ : Vector -> Error.throw (Unsupported_Database_Operation "`Vector` for `set`")
_ : Array -> Error.throw (Unsupported_Database_Operation.Error "`Array` for `set`")
_ : Range -> Error.throw (Unsupported_Database_Operation.Error "`Range` for `set`")
_ : Date_Range -> Error.throw (Unsupported_Database_Operation.Error "`Date_Range` for `set`")
_ -> Error.throw (Illegal_Argument.Error "Unsupported type for `DB_Table.set`.")
resolved = case value of
_ : Text -> self.make_constant_column value
_ : Expression -> self.evaluate_expression value on_problems
_ : DB_Column ->
if Helpers.check_integrity self value then value else
Error.throw (Integrity_Error.Error "Column "+value.name)
_ : Constant_Column -> self.make_constant_column value
_ : Simple_Expression -> value.evaluate self (set_mode==Set_Mode.Update && as=="") on_problems
_ : Vector -> Error.throw (Unsupported_Database_Operation "`Vector` for `set`")
_ : Array -> Error.throw (Unsupported_Database_Operation.Error "`Array` for `set`")
_ : Range -> Error.throw (Unsupported_Database_Operation.Error "`Range` for `set`")
_ : Date_Range -> Error.throw (Unsupported_Database_Operation.Error "`Date_Range` for `set`")
_ -> Error.throw (Illegal_Argument.Error "Unsupported type for `DB_Table.set`.")
## If `as` was specified, use that. Otherwise, if `value` is a
`DB_Column`, use its name. In these two cases, do not make it unique.
Otherwise, make it unique. If set_mode is Update, however, do not
make it unique.
new_column_name = if as != "" then as else
if value.is_a DB_Column || set_mode==Set_Mode.Update || set_mode==Set_Mode.Add_Or_Update then resolved.name else unique.make_unique resolved.name
renamed = resolved.rename new_column_name
renamed.if_not_error <| self.column_naming_helper.check_ambiguity self.column_names renamed.name <|
index = self.internal_columns.index_of (c -> c.name == renamed.name)
check_add = case set_mode of
Set_Mode.Add_Or_Update -> True
Set_Mode.Add -> if index.is_nothing then True else Error.throw (Existing_Column.Error renamed.name)
Set_Mode.Update -> if index.is_nothing then Error.throw (Missing_Column.Error renamed.name) else True
new_table = check_add.if_not_error <|
new_col = renamed.as_internal
new_cols = if index.is_nothing then self.internal_columns + [new_col] else
Vector.new self.column_count i-> if i == index then new_col else self.internal_columns.at i
self.updated_columns new_cols
## If `as` was specified, use that. Otherwise, if `value` is a
`DB_Column`, use its name. In these two cases, do not make it unique.
Otherwise, make it unique. If set_mode is Update, however, do not
make it unique.
new_column_name = if as != "" then as else
if value.is_a DB_Column || set_mode==Set_Mode.Update || set_mode==Set_Mode.Add_Or_Update then resolved.name else unique.make_unique resolved.name
renamed = resolved.rename new_column_name
renamed.if_not_error <| self.column_naming_helper.check_ambiguity self.column_names renamed.name <|
index = self.internal_columns.index_of (c -> c.name == renamed.name)
check_add = case set_mode of
Set_Mode.Add_Or_Update -> True
Set_Mode.Add -> if index.is_nothing then True else Error.throw (Existing_Column.Error renamed.name)
Set_Mode.Update -> if index.is_nothing then Error.throw (Missing_Column.Error renamed.name) else True
new_table = check_add.if_not_error <|
new_col = renamed.as_internal
new_cols = if index.is_nothing then self.internal_columns + [new_col] else
Vector.new self.column_count i-> if i == index then new_col else self.internal_columns.at i
self.updated_columns new_cols
problem_builder.report_unique_name_strategy unique
problem_builder.attach_problems_after on_problems new_table
problem_builder.report_unique_name_strategy unique
problem_builder.attach_problems_after on_problems new_table
## PRIVATE
Given an expression, create a derived column where each value is the
@ -1039,16 +1075,18 @@ type DB_Table
- If more than 10 rows encounter computation issues,
an `Additional_Warnings`.
evaluate_expression : Text | Expression -> Problem_Behavior -> DB_Column ! No_Such_Column | Invalid_Value_Type | Expression_Error
evaluate_expression self expression:(Text | Expression) on_problems:Problem_Behavior=..Report_Warning = if expression.is_a Text then self.evaluate_expression (Expression.Value expression) on_problems else
get_column name = self.at name
make_constant_column value = case value of
_ : DB_Column -> value
_ -> self.make_constant_column value
new_column = Expression.evaluate expression get_column make_constant_column "Standard.Database.DB_Column" "DB_Column" DB_Column.var_args_functions
problems = Warning.get_all new_column . map .value
result = new_column.rename (self.connection.base_connection.column_naming_helper.sanitize_name expression.expression)
on_problems.attach_problems_before problems <|
Warning.set result []
evaluate_expression self expression:(Text | Expression) on_problems:Problem_Behavior=..Report_Warning =
Feature.Set.if_supported_else_throw self.connection.dialect "evaluate_expression" <|
if expression.is_a Text then self.evaluate_expression (Expression.Value expression) on_problems else
get_column name = self.at name
make_constant_column value = case value of
_ : DB_Column -> value
_ -> self.make_constant_column value
new_column = Expression.evaluate expression get_column make_constant_column "Standard.Database.DB_Column" "DB_Column" DB_Column.var_args_functions
problems = Warning.get_all new_column . map .value
result = new_column.rename (self.connection.base_connection.column_naming_helper.sanitize_name expression.expression)
on_problems.attach_problems_before problems <|
Warning.set result []
## PRIVATE
A helper that creates a two-column table from a Dictionary.
@ -1066,12 +1104,13 @@ type DB_Table
- value_column_name: The name to use for the second column.
make_table_from_dictionary : Dictionary Any Any -> Text -> Text -> Table
make_table_from_dictionary self dict key_column_name value_column_name =
total_size = dict.size * 2
Feature.Make_Table_From.if_supported_else_throw self.connection.dialect "make_table_from_dictionary" <|
total_size = dict.size * 2
if dict.is_empty then Error.throw (Illegal_Argument.Error "Dictionary cannot be empty") else
if total_size > MAX_LITERAL_ELEMENT_COUNT then Error.throw (Illegal_Argument.Error "Dictionary is too large ("+dict.size.to_text+" entries): materialize a table into the database instead") else
keys_and_values = dict.to_vector
self.make_table_from_vectors [keys_and_values.map .first, keys_and_values.map .second] [key_column_name, value_column_name]
if dict.is_empty then Error.throw (Illegal_Argument.Error "Dictionary cannot be empty") else
if total_size > MAX_LITERAL_ELEMENT_COUNT then Error.throw (Illegal_Argument.Error "Dictionary is too large ("+dict.size.to_text+" entries): materialize a table into the database instead") else
keys_and_values = dict.to_vector
self.make_table_from_vectors [keys_and_values.map .first, keys_and_values.map .second] [key_column_name, value_column_name]
## PRIVATE
A helper that creates a literal table from `Vector`s.
@ -1085,29 +1124,31 @@ type DB_Table
- column_names: The names of the columns of the new table.
make_table_from_vectors : Vector (Vector Any) -> Vector Text -> DB_Table
make_table_from_vectors self column_vectors column_names =
literal_table_name = self.connection.base_connection.table_naming_helper.generate_random_table_name "enso-literal-"
make_literal_table self.connection column_vectors column_names literal_table_name
Feature.Make_Table_From.if_supported_else_throw self.connection.dialect "make_table_from_vectors" <|
literal_table_name = self.connection.base_connection.table_naming_helper.generate_random_table_name "enso-literal-"
make_literal_table self.connection column_vectors column_names literal_table_name
## PRIVATE
Create a constant column from a value.
make_constant_column : Any -> DB_Column ! Illegal_Argument
make_constant_column self value =
if Table_Helpers.is_column value then Error.throw (Illegal_Argument.Error "A constant value may only be created from a scalar, not a DB_Column") else
type_mapping = self.connection.dialect.get_type_mapping
argument_value_type = Value_Type_Helpers.find_argument_type value
sql_type = case argument_value_type of
Nothing -> SQL_Type.null
_ -> type_mapping.value_type_to_sql argument_value_type Problem_Behavior.Ignore
expr = SQL_Expression.Constant value
new_type_ref = SQL_Type_Reference.from_constant sql_type
base_column = Internal_Column.Value value.pretty new_type_ref expr
needs_cast = argument_value_type.is_nothing.not && self.connection.dialect.needs_literal_table_cast argument_value_type
result_internal_column = if needs_cast.not then base_column else
infer_type_from_database new_expression =
SQL_Type_Reference.new self.connection self.context new_expression
self.connection.dialect.make_cast base_column sql_type infer_type_from_database
self.make_column result_internal_column
Feature.Column_Operations.if_supported_else_throw self.connection.dialect "make_constant_column" <|
if Table_Helpers.is_column value then Error.throw (Illegal_Argument.Error "A constant value may only be created from a scalar, not a DB_Column") else
type_mapping = self.connection.dialect.get_type_mapping
argument_value_type = Value_Type_Helpers.find_argument_type value
sql_type = case argument_value_type of
Nothing -> SQL_Type.null
_ -> type_mapping.value_type_to_sql argument_value_type Problem_Behavior.Ignore
expr = SQL_Expression.Constant value
new_type_ref = SQL_Type_Reference.from_constant sql_type
base_column = Internal_Column.Value value.pretty new_type_ref expr
needs_cast = argument_value_type.is_nothing.not && self.connection.dialect.needs_literal_table_cast argument_value_type
result_internal_column = if needs_cast.not then base_column else
infer_type_from_database new_expression =
SQL_Type_Reference.new self.connection self.context new_expression
self.connection.dialect.make_cast base_column sql_type infer_type_from_database
self.make_column result_internal_column
## PRIVATE
Create a unique temporary column name.
@ -1134,9 +1175,11 @@ type DB_Table
## ICON convert
Returns the vector of columns contained in this table.
columns : Vector DB_Column
columns self = Vector.from_polyglot_array <|
Array_Proxy.new self.internal_columns.length i->
self.make_column (self.internal_columns.at i)
columns self =
Feature.Column_Operations.if_supported_else_throw self.connection.dialect "columns" <|
Vector.from_polyglot_array <|
Array_Proxy.new self.internal_columns.length i->
self.make_column (self.internal_columns.at i)
## GROUP Standard.Base.Metadata
ICON metadata
@ -1246,16 +1289,17 @@ type DB_Table
@columns Widget_Helpers.make_order_by_selector
sort : Vector (Text | Sort_Column) | Text -> Text_Ordering -> Boolean -> Problem_Behavior -> DB_Table ! Incomparable_Values | No_Input_Columns_Selected | Missing_Input_Columns
sort self (columns = ([(Sort_Column.Name (self.columns.at 0 . name))])) text_ordering:Text_Ordering=..Default error_on_missing_columns:Boolean=True on_problems:Problem_Behavior=..Report_Warning =
problem_builder = Problem_Builder.new error_on_missing_columns=error_on_missing_columns types_to_always_throw=[No_Input_Columns_Selected]
columns_for_ordering = Table_Helpers.prepare_order_by self.columns columns problem_builder
problem_builder.attach_problems_before on_problems <|
new_order_descriptors = columns_for_ordering.map selected_column->
column = selected_column.column
associated_selector = selected_column.associated_selector
effective_text_ordering = if column.value_type.is_text then text_ordering else Nothing
self.connection.dialect.prepare_order_descriptor column associated_selector.direction effective_text_ordering
new_ctx = self.context.add_orders new_order_descriptors
self.updated_context new_ctx
Feature.Sort.if_supported_else_throw self.connection.dialect "sort" <|
problem_builder = Problem_Builder.new error_on_missing_columns=error_on_missing_columns types_to_always_throw=[No_Input_Columns_Selected]
columns_for_ordering = Table_Helpers.prepare_order_by self.columns columns problem_builder
problem_builder.attach_problems_before on_problems <|
new_order_descriptors = columns_for_ordering.map selected_column->
column = selected_column.column
associated_selector = selected_column.associated_selector
effective_text_ordering = if column.value_type.is_text then text_ordering else Nothing
self.connection.dialect.prepare_order_descriptor column associated_selector.direction effective_text_ordering
new_ctx = self.context.add_orders new_order_descriptors
self.updated_context new_ctx
## PRIVATE
GROUP Standard.Base.Selections
@ -1336,12 +1380,13 @@ type DB_Table
@columns Widget_Helpers.make_column_name_multi_selector
distinct : Vector (Integer | Text | Regex) | Text | Integer | Regex -> Case_Sensitivity -> Problem_Behavior -> DB_Table ! No_Output_Columns | Missing_Input_Columns | No_Input_Columns_Selected | Floating_Point_Equality
distinct self columns=self.column_names case_sensitivity:Case_Sensitivity=..Default on_problems:Problem_Behavior=..Report_Warning =
key_columns = self.columns_helper.select_columns columns Case_Sensitivity.Default reorder=True error_on_missing_columns=True on_problems=on_problems . catch No_Output_Columns _->
Error.throw No_Input_Columns_Selected
key_columns.if_not_error <|
problem_builder = Problem_Builder.new
new_table = self.connection.dialect.prepare_distinct self key_columns case_sensitivity problem_builder
problem_builder.attach_problems_before on_problems new_table
Feature.Distinct.if_supported_else_throw self.connection.dialect "distinct" <|
key_columns = self.columns_helper.select_columns columns Case_Sensitivity.Default reorder=True error_on_missing_columns=True on_problems=on_problems . catch No_Output_Columns _->
Error.throw No_Input_Columns_Selected
key_columns.if_not_error <|
problem_builder = Problem_Builder.new
new_table = self.connection.dialect.prepare_distinct self key_columns case_sensitivity problem_builder
problem_builder.attach_problems_before on_problems new_table
## GROUP Standard.Base.Selections
ICON preparation
@ -1454,7 +1499,8 @@ type DB_Table
@on Widget_Helpers.make_join_condition_selector
join : DB_Table -> Join_Kind -> Join_Condition | Text | Vector (Join_Condition | Text) -> Text -> Problem_Behavior -> DB_Table
join self right (join_kind : Join_Kind = ..Left_Outer) (on : Join_Condition | Text | Vector (Join_Condition | Text) = (default_join_condition self join_kind)) (right_prefix:Text="Right ") (on_problems:Problem_Behavior=..Report_Warning) =
self.join_or_cross_join right join_kind on right_prefix on_problems
Feature.Join.if_supported_else_throw self.connection.dialect "join" <|
self.join_or_cross_join right join_kind on right_prefix on_problems
## PRIVATE
Implementation of both `join` and `cross_join`.
@ -1559,12 +1605,13 @@ type DB_Table
The ordering of rows in the resulting table is not specified.
cross_join : DB_Table -> Integer | Nothing -> Text -> Problem_Behavior -> DB_Table
cross_join self right:DB_Table right_row_limit=100 right_prefix:Text="Right " on_problems:Problem_Behavior=..Report_Warning =
limit_problems = case right_row_limit.is_nothing.not && (right.row_count > right_row_limit) of
True ->
[Cross_Join_Row_Limit_Exceeded.Error right_row_limit right.row_count]
False -> []
on_problems.attach_problems_before limit_problems <|
self.join_or_cross_join right join_kind=Join_Kind_Cross on=[] right_prefix on_problems
Feature.Cross_Join.if_supported_else_throw self.connection.dialect "cross_join" <|
limit_problems = case right_row_limit.is_nothing.not && (right.row_count > right_row_limit) of
True ->
[Cross_Join_Row_Limit_Exceeded.Error right_row_limit right.row_count]
False -> []
on_problems.attach_problems_before limit_problems <|
self.join_or_cross_join right join_kind=Join_Kind_Cross on=[] right_prefix on_problems
## ALIAS lookup
GROUP Standard.Base.Calculations
@ -1624,8 +1671,9 @@ type DB_Table
@key_columns Widget_Helpers.make_column_name_multi_selector
merge : DB_Table -> (Vector (Integer | Text | Regex) | Text | Integer | Regex) -> Boolean -> Boolean -> Problem_Behavior -> DB_Table ! Missing_Input_Columns | Non_Unique_Key | Unmatched_Rows_In_Lookup
merge self lookup_table:DB_Table key_columns:(Vector (Integer | Text | Regex) | Text | Integer | Regex) add_new_columns:Boolean=False allow_unmatched_rows:Boolean=True on_problems:Problem_Behavior=..Report_Warning =
Helpers.ensure_same_connection "table" [self, lookup_table] <|
Lookup_Query_Helper.build_lookup_query self lookup_table key_columns add_new_columns allow_unmatched_rows on_problems
Feature.Merge.if_supported_else_throw self.connection.dialect "merge" <|
Helpers.ensure_same_connection "table" [self, lookup_table] <|
Lookup_Query_Helper.build_lookup_query self lookup_table key_columns add_new_columns allow_unmatched_rows on_problems
## ALIAS find replace
GROUP Standard.Base.Text
@ -1708,7 +1756,8 @@ type DB_Table
@to_column Widget.Text_Input
replace : (DB_Table | Dictionary) -> Vector (Integer | Text | Regex | By_Type) | Text | Integer | Regex | By_Type -> (Text | Integer | Nothing) -> (Text | Integer | Nothing) -> Boolean -> Problem_Behavior -> DB_Table ! Missing_Input_Columns | Non_Unique_Key | Unmatched_Rows_In_Lookup
replace self lookup_table:(DB_Table | Dictionary) columns:(Vector (Integer | Text | Regex | By_Type) | Text | Integer | Regex | By_Type) from_column:(Text | Integer | Nothing)=Nothing to_column:(Text | Integer | Nothing)=Nothing allow_unmatched_rows:Boolean=True on_problems:Problem_Behavior=..Report_Warning =
Replace_Helpers.replace self lookup_table columns from_column to_column allow_unmatched_rows on_problems
Feature.Replace.if_supported_else_throw self.connection.dialect "replace" <|
Replace_Helpers.replace self lookup_table columns from_column to_column allow_unmatched_rows on_problems
## ALIAS join by row position
GROUP Standard.Base.Calculations
@ -1842,86 +1891,87 @@ type DB_Table
@columns_to_keep Columns_To_Keep.default_widget
union : (DB_Table | Vector DB_Table) -> Columns_To_Keep -> Match_Columns -> Problem_Behavior -> DB_Table
union self tables:(DB_Table | Vector) (columns_to_keep : Columns_To_Keep = ..In_Any_Warn_On_Missing) (match_columns : Match_Columns = ..By_Name) (on_problems : Problem_Behavior = ..Report_Warning) =
all_tables = case tables of
v : Vector -> [self] + (v.map t-> DB_Table.from t)
single_table -> [self, single_table]
Helpers.ensure_same_connection "table" all_tables <|
## We keep separate problem builders, because if we are reporting `No_Output_Columns`,
we only want to add a cause coming from unification; matching reports problems that would not fit this error.
problem_builder_for_matching = Problem_Builder.new
problem_builder_for_unification = Problem_Builder.new
matched_column_sets = Match_Columns_Helpers.match_columns all_tables match_columns columns_to_keep problem_builder_for_matching
dialect = self.connection.dialect
type_mapping = dialect.get_type_mapping
merged_columns = matched_column_sets.map column_set->
sql_type_from_value_type value_type =
type_mapping.value_type_to_sql value_type Problem_Behavior.Report_Error . catch Inexact_Type_Coercion error->
Panic.throw <|
Illegal_State.Error "Unexpected inexact type coercion in Union. The union logic should only operate in types supported by the given backend. This is a bug in the Database library. The coercion was: "+error.to_display_text cause=error
case Table_Helpers.unify_result_type_for_union column_set all_tables problem_builder_for_unification of
Union_Result_Type.Common_Type common_type ->
[column_set, sql_type_from_value_type common_type, common_type]
Union_Result_Type.Fallback_To_Text ->
[column_set, sql_type_from_value_type Value_Type.Char, Value_Type.Char]
Union_Result_Type.No_Types_To_Unify ->
## If the column is all nulls, we still need to give it some type.
For DB `Mixed` is not available, so a portable type to use is `Char`.
[column_set, SQL_Type.null, Value_Type.Char]
Feature.Union.if_supported_else_throw self.connection.dialect "union" <|
all_tables = case tables of
v : Vector -> [self] + (v.map t-> DB_Table.from t)
single_table -> [self, single_table]
Helpers.ensure_same_connection "table" all_tables <|
## We keep separate problem builders, because if we are reporting `No_Output_Columns`,
we only want to add a cause coming from unification; matching reports problems that would not fit this error.
problem_builder_for_matching = Problem_Builder.new
problem_builder_for_unification = Problem_Builder.new
matched_column_sets = Match_Columns_Helpers.match_columns all_tables match_columns columns_to_keep problem_builder_for_matching
dialect = self.connection.dialect
type_mapping = dialect.get_type_mapping
merged_columns = matched_column_sets.map column_set->
sql_type_from_value_type value_type =
type_mapping.value_type_to_sql value_type Problem_Behavior.Report_Error . catch Inexact_Type_Coercion error->
Panic.throw <|
Illegal_State.Error "Unexpected inexact type coercion in Union. The union logic should only operate in types supported by the given backend. This is a bug in the Database library. The coercion was: "+error.to_display_text cause=error
case Table_Helpers.unify_result_type_for_union column_set all_tables problem_builder_for_unification of
Union_Result_Type.Common_Type common_type ->
[column_set, sql_type_from_value_type common_type, common_type]
Union_Result_Type.Fallback_To_Text ->
[column_set, sql_type_from_value_type Value_Type.Char, Value_Type.Char]
Union_Result_Type.No_Types_To_Unify ->
## If the column is all nulls, we still need to give it some type.
For DB `Mixed` is not available, so a portable type to use is `Char`.
[column_set, SQL_Type.null, Value_Type.Char]
problem_builder_for_matching.attach_problems_before on_problems <| problem_builder_for_unification.attach_problems_before on_problems <|
if merged_columns.is_empty then problem_builder_for_unification.raise_no_output_columns_with_cause else
queries = all_tables.map_with_index i-> t->
columns_to_select = merged_columns.map description->
column_set = description.at 0
sql_type = description.at 1
problem_builder_for_matching.attach_problems_before on_problems <| problem_builder_for_unification.attach_problems_before on_problems <|
if merged_columns.is_empty then problem_builder_for_unification.raise_no_output_columns_with_cause else
queries = all_tables.map_with_index i-> t->
columns_to_select = merged_columns.map description->
column_set = description.at 0
sql_type = description.at 1
result_type = description.at 2
column_name = column_set.name
## We assume that the type for this expression will never be queried - it is
just used internally to build the Union operation and never exposed externally.
infer_return_type _ = SQL_Type_Reference.null
case column_set.column_indices.at i of
corresponding_column_index : Integer ->
column = t.at corresponding_column_index
internal_named_column = column.as_internal.rename column_name
## We cast if the result type is different.
This is a bit on the safe side. In some cases the cast is not needed
(for example, most databases will allow union of int2 and int4 without casts; or SQLite does not need casts at all).
However, we do this for simplicity as determining the rules when the cast is needed or not is adding a lot of complication.
This is a possible future improvement to make queries lighter, but the benefit is unlikely to be worth it.
needs_cast = column.value_type != result_type
if needs_cast.not then internal_named_column else
dialect.make_cast internal_named_column sql_type infer_return_type
Nothing ->
typ = SQL_Type_Reference.from_constant SQL_Type.null
expr = SQL_Expression.Literal "NULL"
null_column = Internal_Column.Value column_name typ expr
if sql_type == SQL_Type.null then null_column else
dialect.make_cast null_column sql_type infer_return_type
pairs = columns_to_select.map c->
[c.name, c.expression]
Query.Select pairs t.context
table_name_deduplicator = self.connection.base_connection.table_naming_helper.create_unique_name_strategy
table_name_deduplicator.mark_used (all_tables.map .name)
union_alias = table_name_deduplicator.make_unique <|
all_tables.map .name . join "_"
new_from = From_Spec.Union queries union_alias
new_ctx = Context.for_subquery new_from
## TODO [RW] The result type is currently fetched
independently for each column, instead we should fetch it
for all columns at once.
See #6118.
infer_return_type expression =
SQL_Type_Reference.new self.connection new_ctx expression
new_columns = merged_columns.map description->
column_set = description.first
result_type = description.at 2
column_name = column_set.name
## We assume that the type for this expression will never be queried - it is
just used internally to build the Union operation and never exposed externally.
infer_return_type _ = SQL_Type_Reference.null
case column_set.column_indices.at i of
corresponding_column_index : Integer ->
column = t.at corresponding_column_index
internal_named_column = column.as_internal.rename column_name
## We cast if the result type is different.
This is a bit on the safe side. In some cases the cast is not needed
(for example, most databases will allow union of int2 and int4 without casts; or SQLite does not need casts at all).
However, we do this for simplicity as determining the rules when the cast is needed or not is adding a lot of complication.
This is a possible future improvement to make queries lighter, but the benefit is unlikely to be worth it.
needs_cast = column.value_type != result_type
if needs_cast.not then internal_named_column else
dialect.make_cast internal_named_column sql_type infer_return_type
Nothing ->
typ = SQL_Type_Reference.from_constant SQL_Type.null
expr = SQL_Expression.Literal "NULL"
null_column = Internal_Column.Value column_name typ expr
if sql_type == SQL_Type.null then null_column else
dialect.make_cast null_column sql_type infer_return_type
pairs = columns_to_select.map c->
[c.name, c.expression]
Query.Select pairs t.context
name = column_set.name
expression = SQL_Expression.Column union_alias name
input_column = Internal_Column.Value name (infer_return_type expression) expression
dialect.adapt_unified_column input_column result_type infer_return_type
table_name_deduplicator = self.connection.base_connection.table_naming_helper.create_unique_name_strategy
table_name_deduplicator.mark_used (all_tables.map .name)
union_alias = table_name_deduplicator.make_unique <|
all_tables.map .name . join "_"
new_from = From_Spec.Union queries union_alias
new_ctx = Context.for_subquery new_from
## TODO [RW] The result type is currently fetched
independently for each column, instead we should fetch it
for all columns at once.
See #6118.
infer_return_type expression =
SQL_Type_Reference.new self.connection new_ctx expression
new_columns = merged_columns.map description->
column_set = description.first
result_type = description.at 2
name = column_set.name
expression = SQL_Expression.Column union_alias name
input_column = Internal_Column.Value name (infer_return_type expression) expression
dialect.adapt_unified_column input_column result_type infer_return_type
DB_Table.Value union_alias self.connection new_columns new_ctx
DB_Table.Value union_alias self.connection new_columns new_ctx
## ALIAS group by, summarize, count, count distinct, sum, average, mean, median, percentile, mode, standard deviation, variance, minimum, maximum, first, last, shortest, longest
GROUP Standard.Base.Calculations
@ -1995,7 +2045,8 @@ type DB_Table
@columns Widget_Helpers.make_aggregate_column_vector_selector
aggregate : Vector (Integer | Text | Regex | Aggregate_Column) | Text | Integer | Regex -> Vector Aggregate_Column -> Boolean -> Problem_Behavior -> DB_Table ! No_Output_Columns | Invalid_Aggregate_Column | Invalid_Column_Names | Duplicate_Output_Column_Names | Floating_Point_Equality | Invalid_Aggregation | Unquoted_Delimiter | Additional_Warnings
aggregate self (group_by : Vector | Text | Integer | Regex = []) (columns : Vector = []) (error_on_missing_columns : Boolean = False) (on_problems : Problem_Behavior = ..Report_Warning) =
Aggregate_Helper.aggregate self group_by columns error_on_missing_columns on_problems
Feature.Aggregate.if_supported_else_throw self.connection.dialect "aggregate" <|
Aggregate_Helper.aggregate self group_by columns error_on_missing_columns on_problems
## ALIAS pivot, unpivot
GROUP Standard.Base.Calculations
@ -2921,8 +2972,9 @@ type DB_Table
@remove make_data_cleanse_vector_selector
text_cleanse : Vector (Integer | Text | Regex | By_Type) -> Vector Named_Pattern -> DB_Table
text_cleanse self from:(Vector (Integer | Text | Regex | By_Type)) remove =
transformer col = col.text_cleanse remove
Table_Helpers.replace_columns_with_transformed_columns self from transformer
Feature.Text_Cleanse.if_supported_else_throw self.connection.dialect "text_cleanse" <|
transformer col = col.text_cleanse remove
Table_Helpers.replace_columns_with_transformed_columns self from transformer
## ALIAS cumulative, count, sum, total, minimum, maximum, sum, mean, product, variance, standard deviation
GROUP Standard.Base.Values

View File

@ -28,6 +28,7 @@ import project.SQL_Statement.SQL_Statement
import project.SQL_Type.SQL_Type
from project.Dialect_Flags import all
from project.Errors import SQL_Error, Unsupported_Database_Operation
from project.Feature import Feature
from project.Internal.JDBC_Connection import JDBC_Connection
from project.Internal.Result_Set import result_set_to_table
@ -201,11 +202,16 @@ type Dialect
## PRIVATE
Checks if an operation is supported by the dialect.
is_supported : Text -> Boolean
is_supported self operation =
is_operation_supported self operation:Text -> Boolean =
_ = operation
Unimplemented.throw "This is an interface only."
## PRIVATE
Checks if a feature is supported by the dialect.
is_feature_supported self feature:Feature -> Boolean =
_ = feature
Unimplemented.throw "This is an interface only."
## PRIVATE
Returns a helper for mapping database-specific SQL errors into our common
error types.

View File

@ -0,0 +1,63 @@
from Standard.Base import all
from project.Errors import Unsupported_Database_Operation
## PRIVATE
type Feature
## PRIVATE
select, remove, reorder, and rename columns.
Select_Columns
## PRIVATE
filter rows in a table.
Filter
## PRIVATE
aggregate values in a table.
Aggregate
## PRIVATE
sort rows in a table.
Sort
## PRIVATE
join tables.
Join
## PRIVATE
combine the results of two queries.
Union
## PRIVATE
remove duplicate rows from a table.
Distinct
## PRIVATE
cleanse text data.
Text_Cleanse
## PRIVATE
Catch all for tests that haven't yet been categorized correctly or use multiple features.
Integration_Tests
## PRIVATE
add a row number column to a table.
Add_Row_Number
## PRIVATE
create a table from a dictionary or vectors.
Make_Table_From
## PRIVATE
currently blocks getting a DB_Column from a DB_Table, but will soon refine to operations on DB_Column.
Column_Operations
## PRIVATE
set operations on tables.
Set
## PRIVATE
get_row, take, drop, limit
Sample
## PRIVATE
replace values in a table.
Replace
## PRIVATE
merge two tables.
Merge
## PRIVATE
cross join two tables.
Cross_Join
## PRIVATE
Check if a feature is supported by a dialect, and throw an error if it is not.
if_supported_else_throw self dialect:Any error_name:Text ~action =
if dialect.is_feature_supported self then action else
Error.throw (Unsupported_Database_Operation.Error error_name)

View File

@ -43,8 +43,7 @@ type Dialect_Operations
## PRIVATE
Checks if an operation is supported by the dialect.
is_supported : Text -> Boolean
is_supported self operation =
is_operation_supported self operation:Text -> Boolean =
self.operations_dict.contains_key operation
## PRIVATE

View File

@ -122,6 +122,9 @@ type From_Spec_Comparator
From_Spec.Table other_table_name _ _ ->
if table_name == other_table_name then Ordering.Equal else Nothing
_ -> Nothing
From_Spec.Literal_Values _ _ _ -> case y of
From_Spec.Literal_Values _ _ _ ->
Nothing
_ -> Ordering.compare x y
## PRIVATE

View File

@ -41,6 +41,7 @@ import project.SQL_Statement.SQL_Statement
import project.SQL_Type.SQL_Type
from project.Dialect import Temp_Table_Style
from project.Errors import SQL_Error, Unsupported_Database_Operation
from project.Feature import Feature
from project.Internal.IR.Operation_Metadata import Date_Period_Metadata
from project.Internal.JDBC_Connection import JDBC_Connection
@ -261,9 +262,14 @@ type Postgres_Dialect
## PRIVATE
Checks if an operation is supported by the dialect.
is_supported : Text -> Boolean
is_supported self operation =
self.dialect_operations.is_supported operation
is_operation_supported self operation:Text -> Boolean =
self.dialect_operations.is_operation_supported operation
## PRIVATE
Checks if a feature is supported by the dialect.
is_feature_supported self feature:Feature -> Boolean =
_ = feature
True
## PRIVATE
The default table types to use when listing tables.

View File

@ -38,6 +38,7 @@ import project.SQL_Statement.SQL_Statement
import project.SQL_Type.SQL_Type
from project.Dialect import Temp_Table_Style
from project.Errors import SQL_Error, Unsupported_Database_Operation
from project.Feature import Feature
from project.Internal.JDBC_Connection import JDBC_Connection
## PRIVATE
@ -272,9 +273,14 @@ type SQLite_Dialect
## PRIVATE
Checks if an operation is supported by the dialect.
is_supported : Text -> Boolean
is_supported self operation =
self.dialect_operations.is_supported operation
is_operation_supported self operation:Text -> Boolean =
self.dialect_operations.is_operation_supported operation
## PRIVATE
Checks if a feature is supported by the dialect.
is_feature_supported self feature:Feature -> Boolean =
_ = feature
True
## PRIVATE
The default table types to use when listing tables.

View File

@ -1,4 +1,5 @@
from Standard.Base import all
import Standard.Base.Errors.Common.Type_Error
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
import Standard.Base.System.File.Generic.Writable_File.Writable_File
import Standard.Base.System.File_Format_Metadata.File_Format_Metadata
@ -20,7 +21,7 @@ type Image_File_Format
Resolve an unresolved constructor to the actual type.
resolve : Function -> Image_File_Format | Nothing
resolve constructor =
Panic.catch Any (constructor:Image_File_Format) _->Nothing
Panic.catch Type_Error (constructor:Image_File_Format) _->Nothing
## PRIVATE
If the File_Format supports reading from the file, return a configured instance.

View File

@ -1,6 +1,7 @@
from Standard.Base import all
import Standard.Base.Data.Numbers.Number_Parse_Error
import Standard.Base.Errors.Common.Missing_Argument
import Standard.Base.Errors.Common.Type_Error
import Standard.Base.Errors.Illegal_State.Illegal_State
import Standard.Base.Metadata.Widget.Text_Input
@ -25,7 +26,7 @@ type SQLServer_Details
Attempt to resolve the constructor.
resolve : Function -> SQLServer_Details | Nothing
resolve constructor =
Panic.catch Any (constructor:SQLServer_Details) _->Nothing
Panic.catch Type_Error (constructor:SQLServer_Details) _->Nothing
## PRIVATE
Build the Connection resource.

View File

@ -16,6 +16,7 @@ import Standard.Database.Connection.Connection.Connection
import Standard.Database.DB_Column.DB_Column
import Standard.Database.DB_Table.DB_Table
import Standard.Database.Dialect
import Standard.Database.Feature.Feature
import Standard.Database.Internal.Base_Generator
import Standard.Database.Internal.Common.Database_Distinct_Helper
import Standard.Database.Internal.Common.Database_Join_Helper
@ -41,7 +42,7 @@ import Standard.Database.SQL_Type.SQL_Type
from Standard.Database.Dialect import Temp_Table_Style
from Standard.Database.Dialect_Flags import all
from Standard.Database.Errors import SQL_Error, Unsupported_Database_Operation
from Standard.Database.Internal.Base_Generator import lift_binary_op
from Standard.Database.Internal.Base_Generator import lift_binary_op
from Standard.Database.Internal.IR.Operation_Metadata import Date_Period_Metadata
from Standard.Database.Internal.JDBC_Connection import JDBC_Connection
from Standard.Database.Internal.Statement_Setter import fill_hole_default
@ -54,13 +55,13 @@ polyglot java import org.enso.database.JDBCUtils
## PRIVATE
The dialect of SQL Server databases.
sqlserver : SQLSever_Dialect
sqlserver : SQLServer_Dialect
sqlserver =
SQLSever_Dialect.Value make_dialect_operations
SQLServer_Dialect.Value make_dialect_operations
## PRIVATE
The dialect of SQL Server databases.
type SQLSever_Dialect
type SQLServer_Dialect
## PRIVATE
The dialect of SQL Server databases.
Value dialect_operations
@ -235,9 +236,14 @@ type SQLSever_Dialect
## PRIVATE
Checks if an operation is supported by the dialect.
is_supported : Text -> Boolean
is_supported self operation =
self.dialect_operations.is_supported operation
is_operation_supported self operation:Text -> Boolean =
self.dialect_operations.is_operation_supported operation
## PRIVATE
Checks if a feature is supported by the dialect.
is_feature_supported self feature:Feature -> Boolean =
case feature of
_ -> False
## PRIVATE
The default table types to use when listing tables.

View File

@ -1,6 +1,7 @@
from Standard.Base import all
import Standard.Base.Data.Numbers.Number_Parse_Error
import Standard.Base.Errors.Common.Missing_Argument
import Standard.Base.Errors.Common.Type_Error
import Standard.Base.Errors.Illegal_State.Illegal_State
import Standard.Base.Metadata.Widget.Text_Input
@ -28,7 +29,7 @@ type Snowflake_Details
Attempt to resolve the constructor.
resolve : Function -> Snowflake_Details | Nothing
resolve constructor =
Panic.catch Any (constructor:Snowflake_Details) _->Nothing
Panic.catch Type_Error (constructor:Snowflake_Details) _->Nothing
## PRIVATE
Build the Connection resource.

View File

@ -17,6 +17,7 @@ import Standard.Database.Connection.Connection.Connection
import Standard.Database.DB_Column.DB_Column
import Standard.Database.DB_Table.DB_Table
import Standard.Database.Dialect
import Standard.Database.Feature.Feature
import Standard.Database.Internal.Aggregate_Helper
import Standard.Database.Internal.Aggregate_Helper.Aggregate_With_Helper_Expressions
import Standard.Database.Internal.Base_Generator
@ -249,9 +250,14 @@ type Snowflake_Dialect
## PRIVATE
Checks if an operation is supported by the dialect.
is_supported : Text -> Boolean
is_supported self operation =
self.dialect_operations.is_supported operation
is_operation_supported self operation:Text -> Boolean =
self.dialect_operations.is_operation_supported operation
## PRIVATE
Checks if a feature is supported by the dialect.
is_feature_supported self feature:Feature -> Boolean =
_ = feature
True
## PRIVATE
The default table types to use when listing tables.

View File

@ -46,7 +46,6 @@ polyglot java import org.enso.table.data.column.operation.unary.IsNothingOperati
polyglot java import org.enso.table.data.column.operation.unary.NotOperation
polyglot java import org.enso.table.data.column.operation.unary.TextLengthOperation
polyglot java import org.enso.table.data.column.operation.unary.TruncatedTimePartOperation
polyglot java import org.enso.table.data.column.operation.unary.UnaryDecimalRoundOperation
polyglot java import org.enso.table.data.column.operation.unary.UnaryRoundOperation
polyglot java import org.enso.table.data.column.operation.UnaryOperation
polyglot java import org.enso.table.data.column.storage.Storage as Java_Storage
@ -930,11 +929,8 @@ type Column
case precise_value_type.is_integer of
True ->
self.rename new_name
False -> case precise_value_type.is_decimal of
True ->
apply_unary_operation self UnaryDecimalRoundOperation.TRUNCATE_INSTANCE
False ->
apply_unary_operation self UnaryRoundOperation.TRUNCATE_INSTANCE
False ->
apply_unary_operation self UnaryRoundOperation.TRUNCATE_INSTANCE
False -> case precise_value_type == Value_Type.Date_Time of
True ->
fun = _.date
@ -959,11 +955,7 @@ type Column
new_name = naming_helper.function_name "ceil" [self]
self.rename new_name
False ->
case self.inferred_precise_value_type.is_decimal of
True ->
apply_unary_operation self UnaryDecimalRoundOperation.CEIL_INSTANCE
False ->
apply_unary_operation self UnaryRoundOperation.CEIL_INSTANCE
apply_unary_operation self UnaryRoundOperation.CEIL_INSTANCE
## GROUP Standard.Base.Rounding
ICON math
@ -983,11 +975,7 @@ type Column
new_name = naming_helper.function_name "floor" [self]
self.rename new_name
False ->
case self.inferred_precise_value_type.is_decimal of
True ->
apply_unary_operation self UnaryDecimalRoundOperation.FLOOR_INSTANCE
False ->
apply_unary_operation self UnaryRoundOperation.FLOOR_INSTANCE
apply_unary_operation self UnaryRoundOperation.FLOOR_INSTANCE
## GROUP Standard.Base.Logical
ICON operators
@ -1007,8 +995,8 @@ type Column
coalesce self values =
vec = Vector.unify_vector_or_element values
new_name = self.naming_helper.function_name "coalesce" [self]+vec
result = if values.is_empty then self else
values.fold self acc-> v-> acc.fill_nothing v
result = if vec.is_empty then self else
vec.fold self acc-> v-> acc.fill_nothing v
result.rename new_name
## GROUP Standard.Base.Math

View File

@ -13,7 +13,7 @@ import project.Value_Type.Auto
import project.Value_Type.Bits
import project.Value_Type.Value_Type
polyglot java import java.lang.Exception as Java_Exception
polyglot java import java.lang.Exception as JException
polyglot java import java.lang.IllegalArgumentException
polyglot java import org.enso.table.formatting.AnyObjectFormatter
polyglot java import org.enso.table.formatting.BooleanFormatter
@ -215,17 +215,17 @@ type Data_Formatter
## PRIVATE
make_date_parser self = self.wrap_base_parser <|
Panic.catch Java_Exception handler=(caught_panic-> Error.throw (Illegal_Argument.Error caught_panic.payload.getMessage)) <|
Panic.catch JException handler=(caught_panic-> Error.throw (Illegal_Argument.Error caught_panic.payload.getMessage)) <|
DateParser.new (self.date_formats.map on_problems=No_Wrap .get_java_formatter_for_parsing)
## PRIVATE
make_date_time_parser self = self.wrap_base_parser <|
Panic.catch Java_Exception handler=(caught_panic-> Error.throw (Illegal_Argument.Error caught_panic.payload.getMessage)) <|
Panic.catch JException handler=(caught_panic-> Error.throw (Illegal_Argument.Error caught_panic.payload.getMessage)) <|
DateTimeParser.new (self.datetime_formats.map on_problems=No_Wrap .get_java_formatter_for_parsing)
## PRIVATE
make_time_of_day_parser self = self.wrap_base_parser <|
Panic.catch Java_Exception handler=(caught_panic-> Error.throw (Illegal_Argument.Error caught_panic.payload.getMessage)) <|
Panic.catch JException handler=(caught_panic-> Error.throw (Illegal_Argument.Error caught_panic.payload.getMessage)) <|
TimeOfDayParser.new (self.time_formats.map on_problems=No_Wrap .get_java_formatter_for_parsing)
## PRIVATE
@ -289,19 +289,19 @@ type Data_Formatter
## PRIVATE
make_date_formatter self =
if self.date_formats.is_empty then Error.throw (Illegal_Argument.Error "Formatting dates requires at least one entry in the `date_formats` parameter") else
Panic.catch Java_Exception handler=(caught_panic-> Error.throw (Illegal_Argument.Error caught_panic.payload.getMessage)) <|
Panic.catch JException handler=(caught_panic-> Error.throw (Illegal_Argument.Error caught_panic.payload.getMessage)) <|
DateFormatter.new self.date_formats.first.underlying
## PRIVATE
make_time_of_day_formatter self =
if self.time_formats.is_empty then Error.throw (Illegal_Argument.Error "Formatting times requires at least one entry in the `time_formats` parameter") else
Panic.catch Java_Exception handler=(caught_panic-> Error.throw (Illegal_Argument.Error caught_panic.payload.getMessage)) <|
Panic.catch JException handler=(caught_panic-> Error.throw (Illegal_Argument.Error caught_panic.payload.getMessage)) <|
TimeFormatter.new self.time_formats.first.underlying
## PRIVATE
make_date_time_formatter self =
if self.datetime_formats.is_empty then Error.throw (Illegal_Argument.Error "Formatting date-times requires at least one entry in the `datetime_formats` parameter") else
Panic.catch Java_Exception handler=(caught_panic-> Error.throw (Illegal_Argument.Error caught_panic.payload.getMessage)) <|
Panic.catch JException handler=(caught_panic-> Error.throw (Illegal_Argument.Error caught_panic.payload.getMessage)) <|
DateTimeFormatter.new self.datetime_formats.first.underlying
## PRIVATE

View File

@ -1,4 +1,5 @@
from Standard.Base import all
import Standard.Base.Errors.Common.Type_Error
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
import Standard.Base.Network.HTTP.Response.Response
import Standard.Base.System.File.Generic.Writable_File.Writable_File
@ -65,7 +66,7 @@ type Delimited_Format
Resolve an unresolved constructor to the actual type.
resolve : Function -> Delimited_Format | Nothing
resolve constructor =
Panic.catch Any (constructor:Delimited_Format) _->Nothing
Panic.catch Type_Error (constructor:Delimited_Format) _->Nothing
## PRIVATE
ADVANCED

View File

@ -1,5 +1,6 @@
from Standard.Base import all
import Standard.Base.Errors.Common.Missing_Argument
import Standard.Base.Errors.Common.Type_Error
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
import Standard.Base.Metadata.Display
import Standard.Base.System.File.Generic.Writable_File.Writable_File
@ -82,7 +83,7 @@ type Excel_Format
Resolve an unresolved constructor to the actual type.
resolve : Function -> Excel_Format | Nothing
resolve constructor =
Panic.catch Any (constructor:Excel_Format) _->Nothing
Panic.catch Type_Error (constructor:Excel_Format) _->Nothing
## PRIVATE
ADVANCED

View File

@ -1,4 +1,5 @@
from Standard.Base import all
import Standard.Base.Errors.Common.Type_Error
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
import Standard.Base.System.File.Generic.Writable_File.Writable_File
import Standard.Base.System.File_Format_Metadata.File_Format_Metadata
@ -19,7 +20,7 @@ type Tableau_Format
Resolve an unresolved constructor to the actual type.
resolve : Function -> Tableau_Format | Nothing
resolve constructor =
Panic.catch Any (constructor:Tableau_Format) _->Nothing
Panic.catch Type_Error (constructor:Tableau_Format) _->Nothing
## PRIVATE
ADVANCED

View File

@ -1,5 +1,6 @@
from Standard.Base import all
polyglot java import java.math.BigInteger
polyglot java import java.util.Random as Java_Random
polyglot java import org.enso.base.Text_Utils
@ -112,6 +113,38 @@ type Faker
integer self minimum=0 maximum=100 =
minimum + (self.generator.nextInt (maximum - minimum))
## GROUP Standard.Base.Random
ICON random
Create a random large Integer value (represented internally as a Java
`BigInteger`.
The values provided by this method are selected from a sparse set within
the specified range. For example, with `bit_length=4`, the possible range
is -16 to 16, but the actual values only include 9, 11, 13 and 15.
Arguments
- bit_length: specifies the range of values to select from. The values
will be between -2^bit_length and 2^bit_length.
large_integer : Integer -> Integer -> Integer
large_integer self bit_length =
BigInteger.new bit_length 0 self.generator
## GROUP Standard.Base.Random
ICON random
Create a random Decimal value (represented internally as a Java
`BigDecimal`.
This generator uses `large_integer` to generate an `Integer`, and then
adds a random `Float`. See `large_integer` for a description of the range
of values that this can return.
Arguments
- bit_length: specifies the range of values to select from. The values
will be between -2^bit_length and 2^bit_length.
decimal : Integer -> Integer -> Integer
decimal self bit_length =
(self.large_integer bit_length) + self.float
## GROUP Standard.Base.Random
ICON random
Create a random Float value

View File

@ -742,23 +742,30 @@ public class Main {
factory.checkForWarnings(mainFile.getName().replace(".enso", "") + ".main");
}
var context = new PolyglotContext(factory.build());
if (projectMode) {
var result = PackageManager$.MODULE$.Default().loadPackage(file);
if (result.isSuccess()) {
var s = (scala.util.Success) result;
@SuppressWarnings("unchecked")
var pkg = (org.enso.pkg.Package<java.io.File>) s.get();
var mainModuleName = pkg.moduleNameForFile(pkg.mainFile()).toString();
runPackage(context, mainModuleName, file, additionalArgs);
try {
if (projectMode) {
var result = PackageManager$.MODULE$.Default().loadPackage(file);
if (result.isSuccess()) {
var s = (scala.util.Success) result;
@SuppressWarnings("unchecked")
var pkg = (org.enso.pkg.Package<java.io.File>) s.get();
var mainModuleName = pkg.moduleNameForFile(pkg.mainFile()).toString();
runPackage(context, mainModuleName, file, additionalArgs);
} else {
println(((scala.util.Failure) result).exception().getMessage());
throw exitFail();
}
} else {
println(((scala.util.Failure) result).exception().getMessage());
throw exitFail();
runSingleFile(context, file, additionalArgs);
}
} else {
runSingleFile(context, file, additionalArgs);
} catch (RuntimeException e) {
// forces computation of the exception message sooner than context is closed
// should work around issues seen at #11127
logger.debug("Execution failed with " + e.getMessage());
throw e;
} finally {
context.context().close();
}
context.context().close();
throw exitSuccess();
}
@ -1215,7 +1222,7 @@ public class Main {
try {
return main.call();
} catch (IOException ex) {
} catch (IOException | RuntimeException ex) {
throw ex;
} catch (Exception ex) {
throw new IOException(ex);

View File

@ -43,9 +43,13 @@ public final class VisualizationResult {
}
}
public static boolean isInterruptedException(Throwable ex) {
var iop = InteropLibrary.getUncached();
return isInterruptedException(ex, iop);
public static boolean isInterruptedException(Object object) {
if (object instanceof Throwable ex) {
var iop = InteropLibrary.getUncached();
return isInterruptedException(ex, iop);
} else {
return false;
}
}
private static boolean isInterruptedException(Object ex, InteropLibrary iop) {

View File

@ -190,7 +190,7 @@ final class JobExecutionEngine(
logger.log(
Level.FINE,
"Aborting {0} jobs because {1}: {2}",
Array(cancellableJobs.length, reason, cancellableJobs.map(_.id))
Array[Any](cancellableJobs.length, reason, cancellableJobs.map(_.id))
)
cancellableJobs.foreach { runningJob =>
runningJob.future.cancel(runningJob.job.mayInterruptIfRunning)
@ -215,7 +215,7 @@ final class JobExecutionEngine(
logger.log(
Level.FINE,
"Aborting job {0} because {1}",
Array(runningJob.id, reason)
Array[Any](runningJob.id, reason)
)
runningJob.future.cancel(runningJob.job.mayInterruptIfRunning)
}
@ -237,7 +237,7 @@ final class JobExecutionEngine(
logger.log(
Level.FINE,
"Aborting job {0} because {1}",
Array(runningJob.id, reason)
Array[Any](runningJob.id, reason)
)
runningJob.future.cancel(runningJob.job.mayInterruptIfRunning)
}
@ -260,7 +260,7 @@ final class JobExecutionEngine(
logger.log(
Level.FINE,
"Aborting {0} background jobs because {1}: {2}",
Array(cancellableJobs.length, reason, cancellableJobs.map(_.id))
Array[Any](cancellableJobs.length, reason, cancellableJobs.map(_.id))
)
cancellableJobs.foreach { runningJob =>
runningJob.future.cancel(runningJob.job.mayInterruptIfRunning)

View File

@ -213,7 +213,7 @@ class ReentrantLocking(logger: TruffleLogger) extends Locking {
contextLock.lock,
"context lock",
where
) //acquireContextLock(contextId)
)
callable.call()
} catch {
case _: InterruptedException =>

View File

@ -545,7 +545,7 @@ object ProgramExecutionSupport {
} else {
runtimeCache.getAnyValue(visualization.expressionId)
}
if (v != null) {
if (v != null && !VisualizationResult.isInterruptedException(v)) {
executeAndSendVisualizationUpdate(
contextId,
runtimeCache,

View File

@ -6,7 +6,11 @@ import org.enso.common.RuntimeOptions
import org.enso.interpreter.runtime.`type`.ConstantsGen
import org.enso.polyglot.RuntimeServerInfo
import org.enso.polyglot.runtime.Runtime.Api
import org.enso.polyglot.runtime.Runtime.Api.{MethodCall, MethodPointer}
import org.enso.polyglot.runtime.Runtime.Api.{
InvalidatedExpressions,
MethodCall,
MethodPointer
}
import org.enso.text.{ContentVersion, Sha3_224VersionCalculator}
import org.graalvm.polyglot.Context
import org.scalatest.BeforeAndAfterEach
@ -28,6 +32,19 @@ class RuntimeAsyncCommandsTest
var context: TestContext = _
object Visualization {
val metadata = new Metadata
val code =
metadata.appendToCode(
"""
|encode x = (x + 1).to_text
|""".stripMargin.linesIterator.mkString("\n")
)
}
class TestContext(packageName: String)
extends InstrumentTestContext(packageName) {
val out: ByteArrayOutputStream = new ByteArrayOutputStream()
@ -245,7 +262,7 @@ class RuntimeAsyncCommandsTest
diagnostic.stack should not be empty
}
it should "interrupt running execution context without raising Panic" in {
it should "interrupt running execution context without sending Panic in expression updates" in {
val moduleName = "Enso_Test.Test.Main"
val contextId = UUID.randomUUID()
val requestId = UUID.randomUUID()
@ -303,7 +320,7 @@ class RuntimeAsyncCommandsTest
var iteration = 0
while (!isProgramStarted && iteration < 100) {
val out = context.consumeOut
Thread.sleep(200)
Thread.sleep(100)
isProgramStarted = out == List("started")
iteration += 1
}
@ -335,4 +352,175 @@ class RuntimeAsyncCommandsTest
context.executionComplete(contextId)
)
}
it should "interrupt running execution context without sending Panic in visualization updates" in {
val contextId = UUID.randomUUID()
val requestId = UUID.randomUUID()
val visualizationId = UUID.randomUUID()
val moduleName = "Enso_Test.Test.Main"
val metadata = new Metadata("import Standard.Base.Data.Numbers\n\n")
val visualizationFile =
context.writeInSrcDir("Visualization", Visualization.code)
val idOp1 = metadata.addItem(203, 7)
val idOp2 = metadata.addItem(227, 13)
val code =
"""from Standard.Base import all
|
|polyglot java import java.lang.Thread
|
|loop n s=0 =
| if (s > n) then s else
| Thread.sleep 200
| loop n s+1
|
|main =
| IO.println "started"
| operator1 = loop 50
| operator2 = operator1 + 1
| operator2
|
|fun1 x = x.to_text
|""".stripMargin.linesIterator.mkString("\n")
val contents = metadata.appendToCode(code)
val mainFile = context.writeMain(contents)
// create context
context.send(Api.Request(requestId, Api.CreateContextRequest(contextId)))
context.receive shouldEqual Some(
Api.Response(requestId, Api.CreateContextResponse(contextId))
)
// Open visualizations
context.send(
Api.Request(
requestId,
Api.OpenFileRequest(
visualizationFile,
Visualization.code
)
)
)
context.receive shouldEqual Some(
Api.Response(Some(requestId), Api.OpenFileResponse)
)
// Open the new file
context.send(
Api.Request(requestId, Api.OpenFileRequest(mainFile, contents))
)
context.receive shouldEqual Some(
Api.Response(Some(requestId), Api.OpenFileResponse)
)
// push main
val item1 = Api.StackItem.ExplicitCall(
Api.MethodPointer(moduleName, moduleName, "main"),
None,
Vector()
)
context.send(
Api.Request(requestId, Api.PushContextRequest(contextId, item1))
)
// attach visualizations to both expressions
context.send(
Api.Request(
requestId,
Api.AttachVisualization(
visualizationId,
idOp2,
Api.VisualizationConfiguration(
contextId,
Api.VisualizationExpression.Text(
"Enso_Test.Test.Visualization",
"x -> encode x",
Vector()
),
"Enso_Test.Test.Visualization"
)
)
)
)
context.send(
Api.Request(
requestId,
Api.AttachVisualization(
visualizationId,
idOp1,
Api.VisualizationConfiguration(
contextId,
Api.VisualizationExpression.Text(
"Enso_Test.Test.Visualization",
"x -> encode x",
Vector()
),
"Enso_Test.Test.Visualization"
)
)
)
)
val response1 = context.receiveNIgnoreExpressionUpdates(
6,
timeoutSeconds = 20
)
response1 should contain allOf (
Api.Response(requestId, Api.PushContextResponse(contextId)),
Api.Response(requestId, Api.VisualizationAttached()),
context.executionComplete(contextId)
)
context.consumeOut
response1
.map(_.payload)
.count(_.isInstanceOf[Api.VisualizationAttached]) should be(2)
response1
.map(_.payload)
.count(_.isInstanceOf[Api.VisualizationUpdate]) should be(2)
context.send(
Api.Request(
requestId,
Api.RecomputeContextRequest(
contextId,
Some(InvalidatedExpressions.Expressions(Vector(idOp1, idOp2))),
None
)
)
)
var isProgramStarted = false
var iteration = 0
while (!isProgramStarted && iteration < 100) {
val out = context.consumeOut
Thread.sleep(100)
isProgramStarted = out == List("started")
iteration += 1
}
if (!isProgramStarted) {
fail("Program start timed out")
}
// Trigger interruption
context.send(
Api.Request(requestId, Api.InterruptContextRequest(contextId))
)
val response2 = context.receiveNIgnoreExpressionUpdates(
5,
timeoutSeconds = 20
)
response2 should contain allOf (
Api.Response(requestId, Api.RecomputeContextResponse(contextId)),
Api.Response(requestId, Api.InterruptContextResponse(contextId)),
context.executionComplete(contextId)
)
val failure = response2.collectFirst({
case Api.Response(None, Api.VisualizationEvaluationFailed(_, msg, _)) =>
msg
})
failure should be(Symbol("empty"))
}
}

View File

@ -49,6 +49,54 @@ class PanicsTest extends InterpreterTest {
consumeOut shouldEqual List("(Error: MyError)")
}
"message should be computed before leaving the context" in {
val code =
"""from Standard.Base import all
|
|type LM
| V txt
|
| to_display_text self =
| res = self.txt + " from exception"
| Panic.throw res
|
|main = Panic.throw (LM.V "Hi")
|""".stripMargin
try {
eval(code)
fail("Should raise an InterpreterException");
} catch {
case ex: InterpreterException =>
ex.getMessage() shouldEqual "Hi from exception"
case any: Throwable => throw any
}
}
"panic causing stack overflow in to_display_text should still generate some error message" in {
val code =
"""from Standard.Base import all
|
|type LM
| V txt
|
| to_display_text self =
| res = LM.V (self.txt + "Ex")
| res.to_display_text
|
|main = Panic.throw (LM.V "Hi")
|""".stripMargin
try {
eval(code)
fail("Should raise an InterpreterException");
} catch {
case ex: InterpreterException =>
ex.getMessage() shouldEqual "LM"
case any: Throwable => throw any
}
}
"catch polyglot errors" in {
val code =
"""from Standard.Base import all

View File

@ -17,6 +17,7 @@ import com.oracle.truffle.api.library.ExportLibrary;
import com.oracle.truffle.api.library.ExportMessage;
import com.oracle.truffle.api.nodes.Node;
import com.oracle.truffle.api.nodes.RootNode;
import com.oracle.truffle.api.profiles.InlinedBranchProfile;
import com.oracle.truffle.api.source.SourceSection;
import org.enso.common.MethodNames;
import org.enso.interpreter.node.callable.InteropApplicationNode;
@ -29,9 +30,11 @@ import org.enso.interpreter.runtime.callable.function.FunctionSchema.CallerFrame
import org.enso.interpreter.runtime.data.EnsoObject;
import org.enso.interpreter.runtime.data.Type;
import org.enso.interpreter.runtime.data.vector.ArrayLikeHelpers;
import org.enso.interpreter.runtime.error.PanicException;
import org.enso.interpreter.runtime.library.dispatch.TypesLibrary;
import org.enso.interpreter.runtime.state.State;
import org.enso.interpreter.runtime.type.Types;
import org.slf4j.LoggerFactory;
/** A runtime representation of a function object in Enso. */
@ExportLibrary(InteropLibrary.class)
@ -201,9 +204,28 @@ public final class Function implements EnsoObject {
Function function,
Object[] arguments,
@Cached InteropApplicationNode interopApplicationNode,
@CachedLibrary("function") InteropLibrary thisLib) {
return interopApplicationNode.execute(
function, EnsoContext.get(thisLib).emptyState(), arguments);
@CachedLibrary("function") InteropLibrary thisLib,
@Cached InlinedBranchProfile panicProfile) {
try {
return interopApplicationNode.execute(
function, EnsoContext.get(thisLib).emptyState(), arguments);
} catch (StackOverflowError err) {
CompilerDirectives.transferToInterpreter();
var asserts = false;
assert asserts = true;
var logger = LoggerFactory.getLogger(Function.class);
if (asserts) {
logger.error("StackOverflowError detected", err);
} else {
logger.debug("StackOverflowError detected", err);
}
throw err;
} catch (PanicException ex) {
panicProfile.enter(thisLib);
// materialize the exception message
ex.getMessage();
throw ex;
}
}
}

View File

@ -13,7 +13,6 @@ import com.oracle.truffle.api.library.ExportLibrary;
import com.oracle.truffle.api.library.ExportMessage;
import com.oracle.truffle.api.nodes.Node;
import com.oracle.truffle.api.source.SourceSection;
import java.util.logging.Level;
import org.enso.interpreter.node.BaseNode.TailStatus;
import org.enso.interpreter.node.callable.IndirectInvokeMethodNode;
import org.enso.interpreter.node.callable.InvokeCallableNode.ArgumentsExecutionMode;
@ -27,6 +26,8 @@ import org.enso.interpreter.runtime.data.Type;
import org.enso.interpreter.runtime.data.text.Text;
import org.enso.interpreter.runtime.library.dispatch.TypesLibrary;
import org.enso.interpreter.runtime.state.State;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** An exception type for user thrown panic exceptions. */
@ExportLibrary(value = InteropLibrary.class, delegateTo = "payload")
@ -81,21 +82,13 @@ public final class PanicException extends AbstractTruffleException implements En
@CompilerDirectives.TruffleBoundary
private String computeMessage() {
String msg;
InteropLibrary library = InteropLibrary.getUncached();
Object info = null;
var library = InteropLibrary.getUncached();
try {
info = library.getExceptionMessage(this);
var info = library.getExceptionMessage(this);
msg = library.asString(info);
} catch (AssertionError | UnsupportedMessageException e) {
try {
var ctx = EnsoContext.get(null);
ctx.getLogger().log(Level.WARNING, "Cannot convert " + info + " to string", e);
msg = TypeToDisplayTextNode.getUncached().execute(payload);
} catch (AssertionError assertionError) {
throw new AssertionError(
"Failed to log failed conversion of " + info + " to string and payload " + payload,
assertionError);
}
} catch (StackOverflowError | AssertionError | UnsupportedMessageException e) {
logger().atError().log("Cannot compute message for " + payload, e);
msg = TypeToDisplayTextNode.getUncached().execute(payload);
}
cacheMessage = msg;
return msg;
@ -165,7 +158,7 @@ public final class PanicException extends AbstractTruffleException implements En
return Text.create(strings.asString(text));
} catch (UnsupportedMessageException e) {
CompilerDirectives.transferToInterpreter();
ctx.getLogger().log(Level.WARNING, "Cannot convert " + text + " to string", e);
logger().error("Cannot convert " + text + " to string", e);
return Text.create(typeToDisplayTextNode.execute(payload));
}
}
@ -218,4 +211,8 @@ public final class PanicException extends AbstractTruffleException implements En
}
return getLocation().getEncapsulatingSourceSection();
}
private static Logger logger() {
return LoggerFactory.getLogger(PanicException.class);
}
}

View File

@ -59,7 +59,7 @@ public final class EnsoSecretHelper extends SecretValueResolver {
Builder builder,
URIWithSecrets uri,
List<Pair<String, HideableValue>> headers)
throws IOException, InterruptedException {
throws IllegalArgumentException, IOException, InterruptedException {
// Build a new URI with the query arguments.
URI resolvedURI = resolveURI(uri);

View File

@ -0,0 +1,132 @@
package org.enso.table.data.column.builder;
import java.math.BigInteger;
import org.enso.base.polyglot.NumericConverter;
import org.enso.table.data.column.storage.Storage;
import org.enso.table.data.column.storage.type.BigIntegerType;
import org.enso.table.data.column.storage.type.IntegerType;
import org.enso.table.data.column.storage.type.StorageType;
import org.enso.table.problems.ProblemAggregator;
/**
* A builder for storing enso Integers, which might be Longs or BigIntegers.
*
* <p>This builder starts off delegating to LongBuilder, but if it receives a BigInteger, it retypes
* the LongBuilder to a BigIntegerBuilder.
*/
public class InferredIntegerBuilder extends Builder {
private LongBuilder longBuilder = null;
private TypedBuilder bigIntegerBuilder = null;
private int currentSize = 0;
private final int initialSize;
private final ProblemAggregator problemAggregator;
/** Creates a new instance of this builder, with the given known result length. */
public InferredIntegerBuilder(int initialSize, ProblemAggregator problemAggregator) {
this.initialSize = initialSize;
this.problemAggregator = problemAggregator;
longBuilder =
NumericBuilder.createLongBuilder(this.initialSize, IntegerType.INT_64, problemAggregator);
}
@Override
public void appendNoGrow(Object o) {
if (o == null) {
appendNulls(1);
} else if (o instanceof BigInteger bi) {
retypeToBigIntegerMaybe();
bigIntegerBuilder.appendNoGrow(bi);
} else {
Long lng = NumericConverter.tryConvertingToLong(o);
if (lng == null) {
throw new IllegalStateException(
"Unexpected value added to InferredIntegerBuilder "
+ o.getClass()
+ ". This is a bug in the Table library.");
} else {
if (bigIntegerBuilder != null) {
bigIntegerBuilder.appendNoGrow(BigInteger.valueOf(lng));
} else {
longBuilder.appendNoGrow(lng);
}
}
}
currentSize++;
}
@Override
public void append(Object o) {
if (o == null) {
appendNulls(1);
} else if (o instanceof BigInteger bi) {
retypeToBigIntegerMaybe();
bigIntegerBuilder.append(bi);
} else {
Long lng = NumericConverter.tryConvertingToLong(o);
if (lng == null) {
throw new IllegalStateException(
"Unexpected value added to InferredIntegerBuilder "
+ o.getClass()
+ ". This is a bug in the Table library.");
} else {
if (bigIntegerBuilder != null) {
bigIntegerBuilder.append(BigInteger.valueOf(lng));
} else {
longBuilder.append(lng);
}
}
}
currentSize++;
}
@Override
public void appendNulls(int count) {
if (bigIntegerBuilder != null) {
bigIntegerBuilder.appendNulls(count);
} else {
longBuilder.appendNulls(count);
}
currentSize += count;
}
@Override
public void appendBulkStorage(Storage<?> storage) {
for (int i = 0; i < storage.size(); i++) {
append(storage.getItemBoxed(i));
}
}
@Override
public int getCurrentSize() {
return currentSize;
}
@Override
public Storage<?> seal() {
if (bigIntegerBuilder != null) {
return bigIntegerBuilder.seal();
} else {
return longBuilder.seal();
}
}
@Override
public StorageType getType() {
if (bigIntegerBuilder != null) {
return BigIntegerType.INSTANCE;
} else {
return IntegerType.INT_64;
}
}
// Retype the LongBuilder to a BigIntegerBuilder, if we haven't already
// done so.
private void retypeToBigIntegerMaybe() {
if (bigIntegerBuilder != null) {
return;
}
bigIntegerBuilder = longBuilder.retypeTo(BigIntegerType.INSTANCE);
longBuilder = null;
}
}

View File

@ -1,53 +0,0 @@
package org.enso.table.data.column.operation.unary;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.function.Function;
import org.enso.base.numeric.Decimal_Utils;
import org.enso.table.data.column.builder.Builder;
import org.enso.table.data.column.builder.InferredBuilder;
import org.enso.table.data.column.operation.UnaryOperation;
import org.enso.table.data.column.operation.map.MapOperationProblemAggregator;
import org.enso.table.data.column.storage.ColumnStorage;
import org.enso.table.data.column.storage.type.BigDecimalType;
public class UnaryDecimalRoundOperation extends AbstractUnaryOperation {
public static final String CEIL = "ceil";
public static final UnaryOperation CEIL_INSTANCE =
new UnaryDecimalRoundOperation(CEIL, Decimal_Utils::ceil);
public static final String FLOOR = "floor";
public static final UnaryOperation FLOOR_INSTANCE =
new UnaryDecimalRoundOperation(FLOOR, Decimal_Utils::floor);
public static String TRUNCATE = "truncate";
public static final UnaryOperation TRUNCATE_INSTANCE =
new UnaryDecimalRoundOperation(TRUNCATE, Decimal_Utils::truncate);
private final Function<BigDecimal, BigInteger> function;
private UnaryDecimalRoundOperation(String name, Function<BigDecimal, BigInteger> function) {
super(name, true);
this.function = function;
}
@Override
public boolean canApply(ColumnStorage storage) {
return storage.getType() instanceof BigDecimalType;
}
@Override
protected final void applyObjectRow(
Object value, Builder builder, MapOperationProblemAggregator problemAggregator) {
applyObjectRow(value, (InferredBuilder) builder, problemAggregator);
}
protected void applyObjectRow(
Object value, InferredBuilder builder, MapOperationProblemAggregator problemAggregator) {
switch (value) {
case BigDecimal d -> builder.append(function.apply(d));
default -> throw new IllegalArgumentException(
"Unsupported type: " + value.getClass() + " (expected decimal).");
}
}
}

View File

@ -1,31 +1,58 @@
package org.enso.table.data.column.operation.unary;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.function.DoubleToLongFunction;
import org.enso.table.data.column.builder.LongBuilder;
import java.util.function.Function;
import org.enso.base.numeric.Decimal_Utils;
import org.enso.table.data.column.builder.Builder;
import org.enso.table.data.column.builder.InferredIntegerBuilder;
import org.enso.table.data.column.operation.UnaryOperation;
import org.enso.table.data.column.operation.map.MapOperationProblemAggregator;
import org.enso.table.data.column.storage.ColumnLongStorage;
import org.enso.table.data.column.storage.ColumnStorage;
import org.enso.table.data.column.storage.type.IntegerType;
import org.enso.table.data.column.storage.numeric.BigIntegerStorage;
public class UnaryRoundOperation extends AbstractUnaryOperation {
// Used to determine whether we should use Double or BigDecimal operations.
// Values outside this range are promoted to BigDecimal operation, because
// representing their rounded value as a Long might overflow the Long dynamic
// range.
public static final double USE_DOUBLE_LIMIT_POSITIVE = 9223372036854775000.0;
public static final double USE_DOUBLE_LIMIT_NEGATIVE = -9223372036854775000.0;
public class UnaryRoundOperation extends AbstractUnaryLongOperation {
public static final String CEIL = "ceil";
public static final UnaryOperation CEIL_INSTANCE =
new UnaryRoundOperation(CEIL, d -> (long) Math.ceil(d));
new UnaryRoundOperation(CEIL, d -> (long) Math.ceil(d), Decimal_Utils::ceil);
public static final String FLOOR = "floor";
public static final UnaryOperation FLOOR_INSTANCE =
new UnaryRoundOperation(FLOOR, d -> (long) Math.floor(d));
new UnaryRoundOperation(FLOOR, d -> (long) Math.floor(d), Decimal_Utils::floor);
public static String TRUNCATE = "truncate";
public static final UnaryOperation TRUNCATE_INSTANCE =
new UnaryRoundOperation(TRUNCATE, d -> (long) d);
new UnaryRoundOperation(TRUNCATE, d -> (long) d, Decimal_Utils::truncate);
private final DoubleToLongFunction function;
private final DoubleToLongFunction doubleFunction;
private final Function<BigDecimal, BigInteger> bigDecimalFunction;
private UnaryRoundOperation(String name, DoubleToLongFunction function) {
super(name, true, IntegerType.INT_64);
this.function = function;
private UnaryRoundOperation(
String name,
DoubleToLongFunction doubleFunction,
Function<BigDecimal, BigInteger> bigDecimalFunction) {
super(name, true);
this.doubleFunction = doubleFunction;
this.bigDecimalFunction = bigDecimalFunction;
}
protected Builder createBuilder(
ColumnStorage storage, MapOperationProblemAggregator problemAggregator) {
if (storage.getSize() > Integer.MAX_VALUE) {
throw new IllegalArgumentException(
"Cannot currently operate on columns larger than " + Integer.MAX_VALUE + ".");
}
return new InferredIntegerBuilder((int) storage.getSize(), problemAggregator);
}
@Override
@ -36,9 +63,9 @@ public class UnaryRoundOperation extends AbstractUnaryLongOperation {
@Override
public ColumnStorage apply(
ColumnStorage storage, MapOperationProblemAggregator problemAggregator) {
if (storage instanceof ColumnLongStorage longStorage) {
// For a long storage, the operation is an identity operation.
return longStorage;
if (storage instanceof ColumnLongStorage || storage instanceof BigIntegerStorage) {
// For an integral type storage, the operation is an identity operation.
return storage;
}
return super.apply(storage, problemAggregator);
@ -46,7 +73,7 @@ public class UnaryRoundOperation extends AbstractUnaryLongOperation {
@Override
protected void applyObjectRow(
Object value, LongBuilder builder, MapOperationProblemAggregator problemAggregator) {
Object value, Builder builder, MapOperationProblemAggregator problemAggregator) {
// Null handled by base class
switch (value) {
case Double d -> {
@ -54,11 +81,16 @@ public class UnaryRoundOperation extends AbstractUnaryLongOperation {
String msg = "Value is " + d;
problemAggregator.reportArithmeticError(msg, builder.getCurrentSize());
builder.appendNulls(1);
} else if (d > USE_DOUBLE_LIMIT_POSITIVE || d < USE_DOUBLE_LIMIT_NEGATIVE) {
builder.append(bigDecimalFunction.apply(BigDecimal.valueOf(d)));
} else {
builder.appendLong(function.applyAsLong(d));
builder.append(doubleFunction.applyAsLong(d));
}
}
case Float f -> applyObjectRow((double) f, builder, problemAggregator);
case BigDecimal bd -> {
builder.append(bigDecimalFunction.apply(bd));
}
case Number n -> applyObjectRow(n.doubleValue(), builder, problemAggregator);
default -> throw new IllegalArgumentException(
"Unsupported type: " + value.getClass() + " (expected numeric type).");

View File

@ -273,7 +273,7 @@ public class ExcelConnectionPool {
// If the initialization succeeds, the POIFSFileSystem will be closed by the
// HSSFWorkbook::close.
yield new HSSFWorkbook(fs);
} catch (Exception e) {
} catch (IOException e) {
fs.close();
throw e;
}

View File

@ -8,6 +8,7 @@ import java.net.URI;
import java.net.URISyntaxException;
import java.nio.channels.Channels;
import java.nio.file.Files;
import java.nio.file.InvalidPathException;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Arrays;
@ -46,7 +47,7 @@ public class HyperReader {
if (!Files.exists(HYPER_PATH)) {
try {
Files.createDirectories(HYPER_PATH);
} catch (Exception e) {
} catch (IOException | UnsupportedOperationException | SecurityException e) {
throw new IOException("Failed to create Hyper directory: " + HYPER_PATH, e);
}
}
@ -75,7 +76,11 @@ public class HyperReader {
"Unsupported platform: " + OSPlatform.CurrentPlatform);
}
}
} catch (Exception e) {
} catch (IOException
| URISyntaxException
| InvalidPathException
| UnsupportedOperationException
| SecurityException e) {
throw new IOException("Failed to download hyperd.", e);
}
@ -100,7 +105,11 @@ public class HyperReader {
}
private static void downloadHyper(String uri, String fileName, boolean setExecutable)
throws IOException, URISyntaxException {
throws IOException,
URISyntaxException,
InvalidPathException,
UnsupportedOperationException,
SecurityException {
LOGGER.log(Level.INFO, "Downloading Hyper from: " + uri);
var hyperdFile = HYPER_PATH.resolve(fileName).toFile();
var url = new URI(uri);

View File

@ -4,6 +4,7 @@ import Standard.Base.Runtime.Ref.Ref
from Standard.Table import Table, Value_Type, Bits
from Standard.Database import all
import Standard.Database.Feature.Feature
from Standard.AWS import Redshift_Details, AWS_Credential
@ -73,8 +74,6 @@ add_database_specs suite_builder create_connection_fn =
default_connection.get.base_connection.create_literal_table (Table.new columns) "literal_table"
materialize = .read
Common_Spec.add_specs suite_builder prefix create_connection_fn default_connection
common_selection = Common_Table_Operations.Main.Test_Selection.Config supports_case_sensitive_columns=True order_by_unicode_normalization_by_default=True allows_mixed_type_comparisons=False supports_decimal_type=True run_advanced_edge_case_tests_by_default=False
aggregate_selection = Common_Table_Operations.Aggregate_Spec.Test_Selection.Config first_last_row_order=False aggregation_problems=False date_support=False
agg_in_memory_table = (enso_project.data / "data.csv") . read
@ -84,7 +83,11 @@ add_database_specs suite_builder create_connection_fn =
empty_agg_table_fn = _->
(agg_in_memory_table.take (..First 0)).select_into_database_table default_connection.get (Name_Generator.random_name "Agg_Empty") primary_key=Nothing temporary=True
setup = Common_Table_Operations.Main.Test_Setup.Config prefix agg_table_fn empty_agg_table_fn table_builder materialize is_database=True test_selection=common_selection aggregate_test_selection=aggregate_selection create_connection_func=create_connection_fn light_table_builder=light_table_builder
is_feature_supported_fn feature:Feature = default_connection.get.dialect.is_feature_supported feature
setup = Common_Table_Operations.Main.Test_Setup.Config prefix agg_table_fn empty_agg_table_fn table_builder materialize is_database=True test_selection=common_selection aggregate_test_selection=aggregate_selection create_connection_func=create_connection_fn light_table_builder=light_table_builder is_feature_supported=is_feature_supported_fn
Common_Spec.add_specs suite_builder prefix create_connection_fn default_connection setup
add_redshift_specific_specs suite_builder create_connection_fn setup
Common_Table_Operations.Main.add_specs suite_builder setup
IR_Spec.add_specs suite_builder setup prefix default_connection.get

View File

@ -8,10 +8,10 @@ options = Bench.options
type Data
Value ~ints ~floats
Value ~ints ~floats ~large_ints ~large_floats ~decimals
create vector_size faker =
Data.Value (create_ints vector_size faker) (create_floats vector_size faker)
Data.Value (create_ints vector_size faker) (create_floats vector_size faker) (create_large_ints vector_size faker) (create_large_floats vector_size faker) (create_decimals vector_size faker)
create_ints vector_size faker =
@ -24,6 +24,21 @@ create_floats vector_size faker =
Column.from_vector "floats" floats_vec
create_large_ints vector_size faker =
ints_vec = Vector.new vector_size _->(faker.large_integer 60)
Column.from_vector "large_ints" ints_vec
create_large_floats vector_size faker =
floats_vec = Vector.new vector_size _->(faker.float -1000000000000000000000.0 1000000000000000000000.0)
Column.from_vector "large_floats" floats_vec
create_decimals vector_size faker =
decimals_vec = Vector.new vector_size _->(faker.decimal 60)
Column.from_vector "decimals" decimals_vec
collect_benches = Bench.build builder->
vector_size = 5 * 1000 * 1000
## No specific significance to this constant, just fixed to make generated set deterministic
@ -33,18 +48,6 @@ collect_benches = Bench.build builder->
data = Data.create vector_size faker
builder.group "Column_Numeric" options group_builder->
group_builder.specify "round_floats" <|
data.floats.round
group_builder.specify "truncate_floats" <|
data.floats.truncate
group_builder.specify "ceil_floats" <|
data.floats.ceil
group_builder.specify "floor_floats" <|
data.floats.floor
group_builder.specify "round_ints" <|
data.ints.round
@ -57,16 +60,76 @@ collect_benches = Bench.build builder->
group_builder.specify "floor_ints" <|
data.ints.floor
group_builder.specify "round_floats" <|
data.floats.round
group_builder.specify "truncate_floats" <|
data.floats.truncate
group_builder.specify "ceil_floats" <|
data.floats.ceil
group_builder.specify "floor_floats" <|
data.floats.floor
group_builder.specify "round_large_ints" <|
data.large_ints.round
group_builder.specify "truncate_large_ints" <|
data.large_ints.truncate
group_builder.specify "ceil_large_ints" <|
data.large_ints.ceil
group_builder.specify "floor_large_ints" <|
data.large_ints.floor
## Re-enable when https://github.com/enso-org/enso/issues/11132 is done.
group_builder.specify "round_large_floats" <|
data.large_floats.round
group_builder.specify "truncate_large_floats" <|
data.large_floats.truncate
group_builder.specify "ceil_large_floats" <|
data.large_floats.ceil
group_builder.specify "floor_large_floats" <|
data.large_floats.floor
## Re-enable when https://github.com/enso-org/enso/issues/11132 is done.
group_builder.specify "round_decimals" <|
data.decimals.round
group_builder.specify "truncate_decimals" <|
data.decimals.truncate
group_builder.specify "ceil_decimals" <|
data.decimals.ceil
group_builder.specify "floor_decimals" <|
data.decimals.floor
[True, False].each use_bankers->
[0, -2, 2].map decimal_places->
name = create_name "round_decimal_places_" decimal_places use_bankers
fun x = x.round decimal_places use_bankers
group_builder.specify ("ints_" + name) <|
fun data.ints
group_builder.specify ("floats_" + name) <|
fun data.floats
group_builder.specify ("ints_" + name) <|
fun data.ints
## Re-enable when https://github.com/enso-org/enso/issues/11132 is done.
group_builder.specify ("large_ints_" + name) <|
fun data.large_ints
group_builder.specify ("large_floats_" + name) <|
fun data.large_floats
group_builder.specify ("decimals_" + name) <|
fun data.decimals
## Creates a valid name for the benchmark

View File

@ -8,6 +8,7 @@ from Standard.Table.Errors import Invalid_Column_Names, Inexact_Type_Coercion, D
import Standard.Database.DB_Column.DB_Column
import Standard.Database.DB_Table.DB_Table
import Standard.Database.Feature.Feature
import Standard.Database.SQL_Type.SQL_Type
import Standard.Database.Internal.Replace_Params.Replace_Params
from Standard.Database import all
@ -85,73 +86,113 @@ add_specs suite_builder =
suite_builder.group "[SQLServer] Database tests" pending=message (_-> Nothing)
connection_builder ->
add_sqlserver_specs suite_builder connection_builder
suite_builder.group "[SQLServer] Info" group_builder->
default_connection = Database.connect get_configured_connection_details
data = SQLServer_Info_Data.setup default_connection
default_connection = Database.connect get_configured_connection_details
if default_connection.dialect.is_feature_supported Feature.Integration_Tests then
suite_builder.group "[SQLServer] Info" group_builder->
data = SQLServer_Info_Data.setup default_connection
group_builder.teardown <|
data.teardown
group_builder.teardown <|
data.teardown
group_builder.specify "should return Table information" <|
i = data.t.column_info
i.at "Column" . to_vector . should_equal ["strs", "ints", "bools", "reals", "floats", "tinyints", "smallints", "bigints", "times", "dates", "datetimes", "smalldatetimes", "datetime2s", "datetimeoffsets"]
i.at "Items Count" . to_vector . should_equal [3, 1, 2, 3, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2]
i.at "Value Type" . to_vector . should_equal [Value_Type.Char 255, Value_Type.Integer ..Bits_32, Value_Type.Boolean, Value_Type.Float ..Bits_32, Value_Type.Float, Value_Type.Integer Bits.Bits_16, Value_Type.Integer Bits.Bits_16, Value_Type.Integer Bits.Bits_64, Value_Type.Time, Value_Type.Date, Value_Type.Date_Time False, Value_Type.Date_Time False, Value_Type.Date_Time False, Value_Type.Date_Time True]
group_builder.specify "should return Table information" <|
i = data.t.column_info
i.at "Column" . to_vector . should_equal ["strs", "ints", "bools", "reals", "floats", "tinyints", "smallints", "bigints", "times", "dates", "datetimes", "smalldatetimes", "datetime2s", "datetimeoffsets"]
i.at "Items Count" . to_vector . should_equal [3, 1, 2, 3, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2]
i.at "Value Type" . to_vector . should_equal [Value_Type.Char 255, Value_Type.Integer ..Bits_32, Value_Type.Boolean, Value_Type.Float ..Bits_32, Value_Type.Float, Value_Type.Integer Bits.Bits_16, Value_Type.Integer Bits.Bits_16, Value_Type.Integer Bits.Bits_64, Value_Type.Time, Value_Type.Date, Value_Type.Date_Time False, Value_Type.Date_Time False, Value_Type.Date_Time False, Value_Type.Date_Time True]
group_builder.specify "should return Table information, also for aggregated results" <|
i = data.t.aggregate columns=[Aggregate_Column.Sum "ints", Aggregate_Column.Count_Distinct "bools"] . column_info
i.at "Column" . to_vector . should_equal ["Sum ints", "Count Distinct bools"]
i.at "Items Count" . to_vector . should_equal [1, 1]
i.at "Value Type" . to_vector . should_equal [Value_Type.Integer ..Bits_32, Value_Type.Integer ..Bits_32]
group_builder.specify "should return Table information, also for aggregated results" <|
i = data.t.aggregate columns=[Aggregate_Column.Sum "ints", Aggregate_Column.Count_Distinct "bools"] . column_info
i.at "Column" . to_vector . should_equal ["Sum ints", "Count Distinct bools"]
i.at "Items Count" . to_vector . should_equal [1, 1]
i.at "Value Type" . to_vector . should_equal [Value_Type.Integer ..Bits_32, Value_Type.Integer ..Bits_32]
group_builder.specify "should infer standard types correctly" <|
data.t.at "strs" . value_type . is_text . should_be_true
data.t.at "ints" . value_type . is_integer . should_be_true
data.t.at "bools" . value_type . is_boolean . should_be_true
data.t.at "floats" . value_type . is_floating_point . should_be_true
group_builder.specify "should infer standard types correctly" <|
data.t.at "strs" . value_type . is_text . should_be_true
data.t.at "ints" . value_type . is_integer . should_be_true
data.t.at "bools" . value_type . is_boolean . should_be_true
data.t.at "floats" . value_type . is_floating_point . should_be_true
group_builder.specify "should preserve SQLServer types when table is materialized, where possible" pending="TODO" <|
name = Name_Generator.random_name "types-test"
Problems.assume_no_problems <|
data.connection.execute_update 'CREATE TABLE "#'+name+'" ("int4" int4, "int2" int2, "txt-limited" varchar(10), "txt-fixed" char(3))'
t1 = data.connection.query (SQL_Query.Table_Name name)
t1.at "int4" . value_type . should_equal (Value_Type.Integer Bits.Bits_32)
t1.at "int2" . value_type . should_equal (Value_Type.Integer Bits.Bits_16)
t1.at "txt-limited" . value_type . should_equal (Value_Type.Char size=10 variable_length=True)
t1.at "txt-fixed" . value_type . should_equal (Value_Type.Char size=3 variable_length=False)
group_builder.specify "should preserve SQLServer types when table is materialized, where possible" pending="TODO" <|
name = Name_Generator.random_name "types-test"
Problems.assume_no_problems <|
data.connection.execute_update 'CREATE TABLE "#'+name+'" ("int4" int4, "int2" int2, "txt-limited" varchar(10), "txt-fixed" char(3))'
t1 = data.connection.query (SQL_Query.Table_Name name)
t1.at "int4" . value_type . should_equal (Value_Type.Integer Bits.Bits_32)
t1.at "int2" . value_type . should_equal (Value_Type.Integer Bits.Bits_16)
t1.at "txt-limited" . value_type . should_equal (Value_Type.Char size=10 variable_length=True)
t1.at "txt-fixed" . value_type . should_equal (Value_Type.Char size=3 variable_length=False)
in_memory = t1.read
in_memory.at "int4" . value_type . should_equal (Value_Type.Integer Bits.Bits_32)
in_memory.at "int2" . value_type . should_equal (Value_Type.Integer Bits.Bits_16)
in_memory.at "txt-limited" . value_type . should_equal (Value_Type.Char size=10 variable_length=True)
in_memory.at "txt-fixed" . value_type . should_equal (Value_Type.Char size=3 variable_length=False)
in_memory = t1.read
in_memory.at "int4" . value_type . should_equal (Value_Type.Integer Bits.Bits_32)
in_memory.at "int2" . value_type . should_equal (Value_Type.Integer Bits.Bits_16)
in_memory.at "txt-limited" . value_type . should_equal (Value_Type.Char size=10 variable_length=True)
in_memory.at "txt-fixed" . value_type . should_equal (Value_Type.Char size=3 variable_length=False)
group_builder.specify "test datetime2 precision round trip" <|
name = "#" + (Name_Generator.random_name "datetime2-test")
Problems.assume_no_problems <|
data.connection.execute 'CREATE TABLE "'+name+'" ("dt2" DATETIME2)'
t = data.connection.query (SQL_Query.Table_Name name)
row1 = [Date_Time.new 2021 1 1 12 13 14 500 1 1]
row2 = [Date_Time.new 2021 1 1 9 12 12 987 654 321]
row3 = [Nothing]
source_table = Table.from_rows ["dt2"] [row1, row2, row3]
t.update_rows source_table update_action=Update_Action.Insert
## SQLServer only supports precision to 100 nanoseconds
expected_row1 = [Date_Time.new 2021 1 1 12 13 14 500 1 0]
expected_row2 = [Date_Time.new 2021 1 1 9 12 12 987 654 300]
expected_row3 = [Nothing]
expected_table = Table.from_rows ["dt2"] [expected_row1, expected_row2, expected_row3]
returned_table = t.read
returned_table.should_equal expected_table
data.connection.execute 'DROP TABLE "'+name+'"'
group_builder.specify "test datetime2 precision round trip" <|
name = "#" + (Name_Generator.random_name "datetime2-test")
Problems.assume_no_problems <|
data.connection.execute 'CREATE TABLE "'+name+'" ("dt2" DATETIME2)'
t = data.connection.query (SQL_Query.Table_Name name)
row1 = [Date_Time.new 2021 1 1 12 13 14 500 1 1]
row2 = [Date_Time.new 2021 1 1 9 12 12 987 654 321]
row3 = [Nothing]
source_table = Table.from_rows ["dt2"] [row1, row2, row3]
t.update_rows source_table update_action=Update_Action.Insert
## SQLServer only supports precision to 100 nanoseconds
expected_row1 = [Date_Time.new 2021 1 1 12 13 14 500 1 0]
expected_row2 = [Date_Time.new 2021 1 1 9 12 12 987 654 300]
expected_row3 = [Nothing]
expected_table = Table.from_rows ["dt2"] [expected_row1, expected_row2, expected_row3]
returned_table = t.read
returned_table.should_equal expected_table
data.connection.execute 'DROP TABLE "'+name+'"'
type Lazy_Ref
Value ~get
## PRIVATE
supported_replace_params : Hashset Replace_Params
supported_replace_params =
e0 = [Replace_Params.Value Text Case_Sensitivity.Default False, Replace_Params.Value Text Case_Sensitivity.Default True, Replace_Params.Value Text Case_Sensitivity.Sensitive False]
e1 = [Replace_Params.Value Text Case_Sensitivity.Sensitive True, Replace_Params.Value Text Case_Sensitivity.Insensitive False, Replace_Params.Value Text Case_Sensitivity.Insensitive True]
e2 = [Replace_Params.Value Regex Case_Sensitivity.Default False, Replace_Params.Value Regex Case_Sensitivity.Default True, Replace_Params.Value Regex Case_Sensitivity.Sensitive False]
e3 = [Replace_Params.Value Regex Case_Sensitivity.Sensitive True, Replace_Params.Value Regex Case_Sensitivity.Insensitive False, Replace_Params.Value Regex Case_Sensitivity.Insensitive True]
e4 = [Replace_Params.Value DB_Column Case_Sensitivity.Default False, Replace_Params.Value DB_Column Case_Sensitivity.Sensitive False]
Hashset.from_vector <| e0 + e1 + e2 + e3 + e4
add_sqlserver_specs suite_builder create_connection_fn =
prefix = "[SQLServer] "
name_counter = Ref.new 0
default_connection = Lazy_Ref.Value (create_connection_fn Nothing)
Common_Spec.add_specs suite_builder prefix create_connection_fn default_connection
table_builder columns connection=Nothing =
ix = name_counter.get
name_counter . put ix+1
name = Name_Generator.random_name "table_"+ix.to_text
in_mem_table = Table.new columns
in_mem_table.select_into_database_table (connection.if_nothing default_connection.get) name primary_key=Nothing temporary=True
light_table_builder columns =
default_connection.get.base_connection.create_literal_table (Table.new columns) "literal_table"
materialize = .read
common_selection = Common_Table_Operations.Main.Test_Selection.Config supports_case_sensitive_columns=True order_by_unicode_normalization_by_default=True allows_mixed_type_comparisons=False text_length_limited_columns=True fixed_length_text_columns=True removes_trailing_whitespace_casting_from_char_to_varchar=True char_max_size_after_substring=..Reset supports_decimal_type=True supported_replace_params=supported_replace_params run_advanced_edge_case_tests_by_default=True supports_date_time_without_timezone=False date_time=False is_nan_comparable=True
aggregate_selection = Common_Table_Operations.Aggregate_Spec.Test_Selection.Config first_last_row_order=False aggregation_problems=False
agg_in_memory_table = (enso_project.data / "data.csv") . read
agg_table_fn = _->
agg_in_memory_table.select_into_database_table default_connection.get (Name_Generator.random_name "Agg1") primary_key=Nothing temporary=True
empty_agg_table_fn = _->
(agg_in_memory_table.take (..First 0)).select_into_database_table default_connection.get (Name_Generator.random_name "Agg_Empty") primary_key=Nothing temporary=True
is_feature_supported_fn feature:Feature = default_connection.get.dialect.is_feature_supported feature
setup = Common_Table_Operations.Main.Test_Setup.Config prefix agg_table_fn empty_agg_table_fn table_builder materialize is_database=True test_selection=common_selection aggregate_test_selection=aggregate_selection create_connection_func=create_connection_fn light_table_builder=light_table_builder is_feature_supported=is_feature_supported_fn
Common_Spec.add_specs suite_builder prefix create_connection_fn default_connection setup
Common_Table_Operations.Main.add_specs suite_builder setup
main filter=Nothing =
suite = Test.build suite_builder->

View File

@ -8,6 +8,7 @@ from Standard.Table.Errors import Invalid_Column_Names, Inexact_Type_Coercion, D
import Standard.Database.DB_Column.DB_Column
import Standard.Database.DB_Table.DB_Table
import Standard.Database.Feature.Feature
import Standard.Database.SQL_Type.SQL_Type
import Standard.Database.Internal.Replace_Params.Replace_Params
from Standard.Database import all
@ -530,8 +531,6 @@ add_snowflake_specs suite_builder create_connection_fn db_name =
default_connection.get.base_connection.create_literal_table (Table.new columns) "literal_table"
materialize = .read
Common_Spec.add_specs suite_builder prefix create_connection_fn default_connection
common_selection = Common_Table_Operations.Main.Test_Selection.Config supports_case_sensitive_columns=True order_by_unicode_normalization_by_default=True allows_mixed_type_comparisons=False text_length_limited_columns=True fixed_length_text_columns=False different_size_integer_types=False removes_trailing_whitespace_casting_from_char_to_varchar=False supports_decimal_type=True supported_replace_params=supported_replace_params run_advanced_edge_case_tests_by_default=False supports_date_time_without_timezone=True supports_nanoseconds_in_time=True is_nan_comparable=True distinct_returns_first_row_from_group_if_ordered=False
aggregate_selection = Common_Table_Operations.Aggregate_Spec.Test_Selection.Config first_last_row_order=False aggregation_problems=False text_concat=False
agg_in_memory_table = ((Project_Description.new enso_dev.Table_Tests).data / "data.csv") . read
@ -542,8 +541,11 @@ add_snowflake_specs suite_builder create_connection_fn db_name =
empty_agg_table_fn = _->
(agg_in_memory_table.take (..First 0)).select_into_database_table default_connection.get (Name_Generator.random_name "Agg_Empty") primary_key=Nothing temporary=True
setup = Common_Table_Operations.Main.Test_Setup.Config prefix agg_table_fn empty_agg_table_fn table_builder materialize is_database=True test_selection=common_selection aggregate_test_selection=aggregate_selection create_connection_func=create_connection_fn light_table_builder=light_table_builder is_integer_type=is_snowflake_integer
is_feature_supported_fn feature:Feature = default_connection.get.dialect.is_feature_supported feature
setup = Common_Table_Operations.Main.Test_Setup.Config prefix agg_table_fn empty_agg_table_fn table_builder materialize is_database=True test_selection=common_selection aggregate_test_selection=aggregate_selection create_connection_func=create_connection_fn light_table_builder=light_table_builder is_integer_type=is_snowflake_integer is_feature_supported=is_feature_supported_fn
Common_Spec.add_specs suite_builder prefix create_connection_fn default_connection setup
snowflake_specific_spec suite_builder default_connection db_name setup
Common_Table_Operations.Main.add_specs suite_builder setup
Upload_Spec.add_specs suite_builder setup create_connection_fn

View File

@ -3,6 +3,8 @@ import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
import Standard.Database.Extensions.Upload_Database_Table
import Standard.Database.Extensions.Upload_In_Memory_Table
import Standard.Database.Feature.Feature
from Standard.Database.Errors import all
from Standard.Table import Sort_Column, Aggregate_Column
from Standard.Table.Errors import Missing_Input_Columns, Duplicate_Output_Column_Names, Floating_Point_Equality
@ -27,8 +29,16 @@ type Data
teardown self =
self.connection.close
add_specs suite_builder setup =
if setup.is_feature_supported Feature.Filter then (add_row_number_specs suite_builder setup) else
suite_builder.group setup.prefix+"Table.add_row_number" group_builder->
group_builder.specify "add_row_number should report unsupported" <|
table_builder = setup.light_table_builder
t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", [100, 3, Nothing, 4, 12]], ["Y", [100, 4, 2, Nothing, 11]]]
t2 = t.add_row_number
t2.should_fail_with (Unsupported_Database_Operation.Error "add_row_number")
add_row_number_specs suite_builder setup =
prefix = setup.prefix
materialize = setup.materialize
create_connection_fn = setup.create_connection_func

View File

@ -5,6 +5,7 @@ from Standard.Table.Aggregate_Column.Aggregate_Column import all
import Standard.Table.Expression.Expression_Error
from Standard.Table.Errors import all
import Standard.Database.Feature.Feature
from Standard.Database.Errors import Unsupported_Database_Operation, Aggregagtion_Requires_Order
from Standard.Test import all
@ -35,8 +36,17 @@ type Data
type Lazy_Ref
Value ~get
## Runs the common aggregate tests.
add_specs suite_builder setup =
if setup.is_feature_supported Feature.Aggregate then (add_aggregate_specs suite_builder setup) else
suite_builder.group setup.prefix+"Table.select_columns" group_builder->
group_builder.specify "aggregate should report unsupported" <|
table_builder = setup.light_table_builder
t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", [100, 3, Nothing, 4, 12]], ["Y", [100, 4, 2, Nothing, 11]]]
grouped = t.aggregate columns=[Count]
grouped.should_fail_with (Unsupported_Database_Operation.Error "aggregate")
## Runs the common aggregate tests.
add_aggregate_specs suite_builder setup =
prefix = setup.prefix
table_fn = setup.table_fn
empty_table_fn = setup.empty_table_fn

View File

@ -0,0 +1,84 @@
from Standard.Base import all
from Standard.Table import all hiding Table
from Standard.Table.Errors import No_Common_Type
from Standard.Database.Errors import Integrity_Error
from Standard.Test import all
from project.Common_Table_Operations.Util import run_default_backend
main filter=Nothing = run_default_backend add_specs filter
add_specs suite_builder setup =
if setup.is_feature_supported ..Column_Operations then (add_coalesce_specs suite_builder setup)
add_coalesce_specs suite_builder setup =
prefix = setup.prefix
table_builder = setup.table_builder
suite_builder.group prefix+"Table.coalesce" group_builder->
group_builder.specify "2 columns" <|
t = table_builder [["A", [1, 2, Nothing, Nothing]], ["B", [3, Nothing, 4, Nothing]]]
colA = t.get "A"
colB = t.get "B"
result = colA.coalesce colB
result.to_vector . should_equal [1, 2, 4, Nothing]
group_builder.specify "2 columns passing second as vector" <|
t = table_builder [["A", [1, 2, Nothing, Nothing]], ["B", [3, Nothing, 4, Nothing]]]
colA = t.get "A"
colB = t.get "B"
result = colA.coalesce [colB]
result.to_vector . should_equal [1, 2, 4, Nothing]
group_builder.specify "2 columns passing second and third as vector" <|
t = table_builder [["A", [1, 2, Nothing, Nothing]], ["B", [3, Nothing, 4, Nothing]], ["C", [5, 6, 7, 8]]]
colA = t.get "A"
colB = t.get "B"
colC = t.get "C"
result = colA.coalesce [colB, colC]
result.to_vector . should_equal [1, 2, 4, 8]
group_builder.specify "column and constant" <|
t = table_builder [["A", [1, 2, Nothing, Nothing]]]
colA = t.get "A"
result = colA.coalesce 42
result.to_vector . should_equal [1, 2, 42, 42]
group_builder.specify "column and constant and column" <|
t = table_builder [["A", [1, 2, Nothing, Nothing]], ["B", [3, Nothing, 4, Nothing]]]
colA = t.get "A"
colB = t.get "B"
result = colA.coalesce [42, colB]
result.to_vector . should_equal [1, 2, 42, 42]
group_builder.specify "2 columns of diffferent types" <|
t = table_builder [["A", [1, 2, Nothing, Nothing]], ["B", ["3", Nothing, "4", Nothing]]]
colA = t.get "A"
colB = t.get "B"
result = colA.coalesce colB
result.should_fail_with No_Common_Type
group_builder.specify "2 columns from different tables only works In-Memory" <|
t1 = table_builder [["A", [1, 2, Nothing, Nothing]], ["B", [Nothing, Nothing, Nothing, 99]]]
t2 = table_builder [["A", [99, Nothing, Nothing, Nothing]], ["B", [3, Nothing, 4, Nothing]]]
colA = t1.get "A"
colB = t2.get "B"
result = colA.coalesce colB
result2 = colB.coalesce colA
case setup.is_database of
True ->
result.should_fail_with Integrity_Error
result2.should_fail_with Integrity_Error
False ->
result.to_vector . should_equal [1, 2, 4, Nothing]
result2.to_vector . should_equal [3, 2, 4, Nothing]
group_builder.specify "2 columns from different length tables only works In-Memory" <|
t1 = table_builder [["A", [1, 2, Nothing, Nothing, 3]], ["B", [Nothing, Nothing, Nothing, 99, 99]]]
t2 = table_builder [["A", [99, Nothing, Nothing, Nothing]], ["B", [3, Nothing, 4, Nothing]]]
colA = t1.get "A"
colB = t2.get "B"
result = colA.coalesce colB
result2 = colB.coalesce colA
case setup.is_database of
True ->
result.should_fail_with Integrity_Error
result2.should_fail_with Integrity_Error
False ->
result.to_vector . should_equal [1, 2, 4, Nothing, 3]
result2.to_vector . should_equal [3, 2, 4, Nothing]

View File

@ -5,6 +5,7 @@ from Standard.Table import all hiding Table
from Standard.Table.Errors import Clashing_Column_Name, Duplicate_Output_Column_Names
from Standard.Database.Errors import Unsupported_Database_Operation
import Standard.Database.Feature.Feature
from Standard.Test import all
@ -21,8 +22,10 @@ type Data
teardown self =
self.connection.close
add_specs suite_builder setup =
if setup.is_feature_supported Feature.Integration_Tests then (add_column_name_edge_cases_specs suite_builder setup)
add_column_name_edge_cases_specs suite_builder setup =
materialize = setup.materialize
create_connection_fn = setup.create_connection_func
is_case_sensitive = setup.test_selection.supports_case_sensitive_columns

View File

@ -12,6 +12,7 @@ from Standard.Table import Bits, Value_Type, Column_Ref, Column
from Standard.Table.Errors import all
from Standard.Database.Errors import all
import Standard.Database.Feature.Feature
import Standard.Database.DB_Table.DB_Table
from Standard.Test import all
@ -145,6 +146,20 @@ type Connection_Data
self.connection.close
add_specs suite_builder setup =
if setup.is_feature_supported Feature.Filter then (add_column_operation_specs suite_builder setup) else
suite_builder.group setup.prefix+"(Column_Operations_Spec)" group_builder->
group_builder.specify "at should report unsupported" <|
table_builder = setup.light_table_builder
t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", [100, 3, Nothing, 4, 12]], ["Y", [100, 4, 2, Nothing, 11]]]
t2 = t.at "X"
t2.should_fail_with (Unsupported_Database_Operation.Error "at")
group_builder.specify "get should report unsupported" <|
table_builder = setup.light_table_builder
t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", [100, 3, Nothing, 4, 12]], ["Y", [100, 4, 2, Nothing, 11]]]
t2 = t.get "X"
t2.should_fail_with (Unsupported_Database_Operation.Error "get")
add_column_operation_specs suite_builder setup =
prefix = setup.prefix
table_builder = setup.table_builder
create_connection_fn = setup.create_connection_func
@ -1138,6 +1153,27 @@ add_specs suite_builder setup =
table = table_builder [["x", [0, 3, -3, 1, -2]]]
table.at "x" . round 16 . should_fail_with Illegal_Argument
if setup.test_selection.supports_decimal_type then
group_builder.specify "ceil, floor and truncate should work correctly on Integers outside the java Long range" <|
positive_values = [9223372036854775806, 9223372036854775807, 9223372036854775808, 9223372036854775809, 9223372036854775807000000]
values = positive_values + positive_values.map .negate
values.map x->
c = table_builder [["x", [x, -x]]] . at "x"
c.ceil . to_vector . should_equal [x, -x]
c.floor . to_vector . should_equal [x, -x]
c.truncate . to_vector . should_equal [x, -x]
if setup.is_database.not then
group_builder.specify "ceil, floor and truncate should work correctly on Floats outside the java Long range" <|
positive_values = [9223372036854775000.0, 9223372036854776000.0, 9223372036854775807000000.0]
values = positive_values + positive_values.map .negate
values.map x->
x_int = x.truncate
c = table_builder [["x", [x, -x]]] . at "x"
c.ceil . to_vector . should_equal [x_int, -x_int]
c.floor . to_vector . should_equal [x_int, -x_int]
c.truncate . to_vector . should_equal [x_int, -x_int]
if setup.test_selection.supports_decimal_type then
group_builder.specify "ceil, floor and truncate should work correctly on Decimals" <|
c = table_builder [["X", [Decimal.new "123492233720368547758075678.25", Decimal.new "179243023788662739454197523.625", Decimal.new "-123492233720368547758075678.25", Decimal.new "-179243023788662739454197523.625"]]] . at "X"

View File

@ -6,6 +6,8 @@ from Standard.Table.Errors import Loss_Of_Decimal_Precision, Loss_Of_Integer_Pre
from Standard.Database.Errors import Unsupported_Database_Operation, Unsupported_Database_Type
import Standard.Database.Feature.Feature
from Standard.Test import all
from project.Common_Table_Operations.Util import run_default_backend, build_sorted_table
@ -24,6 +26,9 @@ type Lazy_Ref
Value ~get
add_specs suite_builder setup =
if setup.is_feature_supported Feature.Column_Operations then (add_conversion_specs suite_builder setup)
add_conversion_specs suite_builder setup =
prefix = setup.prefix
materialize = setup.materialize
supports_dates = setup.test_selection.date_time

View File

@ -9,6 +9,7 @@ import Standard.Table.Expression.Expression_Error
from Standard.Database import all
import Standard.Database.Internal.Internals_Access
from Standard.Database.Errors import Integrity_Error
import Standard.Database.Feature.Feature
from Standard.Test import all
@ -49,6 +50,9 @@ type Read_Data
[t_big, t_small]
add_specs suite_builder setup =
if setup.is_feature_supported Feature.Integration_Tests then (add_core_specs suite_builder setup)
add_core_specs suite_builder setup =
prefix = setup.prefix
build_sorted_table = Util.build_sorted_table setup
data = Data.setup build_sorted_table

View File

@ -522,12 +522,12 @@ add_specs suite_builder setup =
if setup.test_selection.date_time.not then
suite_builder.group prefix+"partial Date-Time support" group_builder->
group_builder.specify "will fail when uploading a Table containing Dates" <|
group_builder.specify "will fail when uploading a Table containing Dates" pending="TODO" <|
d = Date.new 2020 10 24
table = table_builder [["A", [d]], ["X", [123]]]
table.should_fail_with Unsupported_Database_Type
group_builder.specify "should report a type error when date operations are invoked on a non-date column" <|
group_builder.specify "should report a type error when date operations are invoked on a non-date column" pending="TODO" <|
t = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]], ["C", [True, False, True]]]
r1 = t.at "A" . year
r1.should_fail_with Invalid_Value_Type

View File

@ -6,6 +6,9 @@ import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
from Standard.Table import all
from Standard.Table.Errors import all
from Standard.Database.Errors import all
import Standard.Database.Feature.Feature
from Standard.Test import all
@ -14,6 +17,15 @@ from project.Common_Table_Operations.Util import all
main filter=Nothing = run_default_backend add_specs filter
add_specs suite_builder setup =
if setup.is_feature_supported Feature.Set then (add_derived_columns_specs suite_builder setup) else
suite_builder.group setup.prefix+"(Derived_Columns_Spec) Table.set" group_builder->
group_builder.specify "set should report unsupported" <|
table_builder = setup.light_table_builder
t1 = table_builder [["X", [1, 2, 3]], ["Y", [4, 5, 6]]]
t2 = t1.set (Simple_Expression.Simple_Expr (Column_Ref.Name "A") Simple_Calculation.Copy) "C"
t2.should_fail_with (Unsupported_Database_Operation.Error "set")
add_derived_columns_specs suite_builder setup =
prefix = setup.prefix
table_builder = setup.light_table_builder
pending_datetime = if setup.test_selection.date_time.not then "Date/Time operations are not supported by this backend."

View File

@ -5,6 +5,8 @@ from Standard.Table.Errors import all
from Standard.Test import all
from Standard.Database.Errors import all
import Standard.Database.Feature.Feature
from project.Common_Table_Operations.Util import run_default_backend
@ -19,8 +21,16 @@ type Data
teardown self =
self.connection.close
add_specs suite_builder setup =
if setup.is_feature_supported Feature.Distinct then (add_distinct_specs suite_builder setup) else
suite_builder.group setup.prefix+"Table.distinct" group_builder->
group_builder.specify "distinct should report unsupported" <|
table_builder = setup.light_table_builder
t1 = table_builder [["X", [1, 2, 3]], ["Y", [4, 5, 6]]]
t2 = t1.distinct ["X"]
t2.should_fail_with (Unsupported_Database_Operation.Error "distinct")
add_distinct_specs suite_builder setup =
materialize = setup.materialize
create_connection_fn = setup.create_connection_func
suite_builder.group setup.prefix+"Table.distinct" group_builder->

View File

@ -8,6 +8,7 @@ from Standard.Table.Errors import all
import Standard.Table.Expression.Expression_Error
from Standard.Database.Errors import SQL_Error
import Standard.Database.Feature.Feature
from Standard.Test import all
@ -20,6 +21,9 @@ type Lazy_Ref
Value ~get
add_specs suite_builder detailed setup =
if setup.is_feature_supported Feature.Column_Operations then (add_expression_specs suite_builder detailed setup)
add_expression_specs suite_builder detailed setup =
prefix = setup.prefix
table_builder = build_sorted_table setup
column_a_description = ["A", [1, 2, 3, 4, 5]]

View File

@ -11,7 +11,8 @@ from Standard.Table.Errors import all
import Standard.Table.Expression.Expression_Error
import Standard.Database.Internal.IR.SQL_Expression.SQL_Expression
from Standard.Database.Errors import SQL_Error
import Standard.Database.Feature.Feature
from Standard.Database.Errors import all
from Standard.Test import all
@ -21,12 +22,20 @@ from project.Common_Table_Operations.Util import run_default_backend
main filter=Nothing = run_default_backend add_specs filter
add_specs suite_builder setup =
if setup.is_feature_supported Feature.Filter then (add_filter_specs suite_builder setup) else
suite_builder.group setup.prefix+"Table.filter" group_builder->
group_builder.specify "filter should report unsupported" <|
table_builder = setup.light_table_builder
t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", [100, 3, Nothing, 4, 12]], ["Y", [100, 4, 2, Nothing, 11]]]
t2 = t.filter "X" (Filter_Condition.Less than=10)
t2.should_fail_with (Unsupported_Database_Operation.Error "filter")
## Currently these tests rely on filtering preserving the insertion ordering
within tables. This is not necessarily guaranteed by RDBMS, so we may adapt
this in the future. For now we implicitly assume the ordering is preserved,
as that seems to be the case.
add_specs suite_builder setup =
add_filter_specs suite_builder setup =
prefix = setup.prefix
test_selection = setup.test_selection
table_builder = setup.light_table_builder

View File

@ -5,7 +5,7 @@ from Standard.Table import all hiding Table
from Standard.Test import all
import Standard.Database.Feature.Feature
from project.Common_Table_Operations.Util import run_default_backend
from project.Common_Table_Operations.Core_Spec import weird_names
@ -21,8 +21,10 @@ type Data
teardown self =
self.connection.close
add_specs suite_builder setup =
if setup.is_feature_supported Feature.Integration_Tests then (add_integration_specs suite_builder setup)
add_integration_specs suite_builder setup =
create_connection_fn = setup.create_connection_func
materialize = setup.materialize
suite_builder.group setup.prefix+" Interactions Between various operations" group_builder->

View File

@ -8,7 +8,8 @@ from Standard.Table.Errors import all
import Standard.Table.Internal.Join_Kind_Cross.Join_Kind_Cross
from Standard.Database import all
from Standard.Database.Errors import Unsupported_Database_Operation, Integrity_Error
import Standard.Database.Feature.Feature
from Standard.Database.Errors import all
from Standard.Test import all
@ -18,6 +19,16 @@ type Lazy_Ref
Value ~get
add_specs suite_builder setup =
if setup.is_feature_supported Feature.Cross_Join then (add_cross_join_specs suite_builder setup) else
suite_builder.group setup.prefix+"Table.cross_join" group_builder->
group_builder.specify "cross_join should report unsupported" <|
table_builder = setup.light_table_builder
t1 = table_builder [['x', [1, 2, 3, 4, 2]], ['y', ['a', 'b', 'c', 'd', 'e']]]
t2 = table_builder [['x', [2, 1, 4, 3]], ['z', [20, 10, 40, 30]]]
t3 = t1.cross_join t2
t3.should_fail_with (Unsupported_Database_Operation.Error "cross_join")
add_cross_join_specs suite_builder setup =
prefix = setup.prefix
materialize = setup.materialize
table_builder = setup.table_builder

View File

@ -6,7 +6,8 @@ from Standard.Table import all
from Standard.Table.Errors import all
from Standard.Database import all
from Standard.Database.Errors import Unsupported_Database_Operation, Integrity_Error
from Standard.Database.Errors import all
import Standard.Database.Feature.Feature
from Standard.Test import all
@ -41,6 +42,16 @@ type Data
[t1, t2, t3, t4]
add_specs suite_builder setup =
if setup.is_feature_supported Feature.Join then (add_join_specs suite_builder setup) else
suite_builder.group setup.prefix+"Table.join" group_builder->
group_builder.specify "join should report unsupported" <|
table_builder = setup.light_table_builder
t1 = table_builder [["X", [1, 2, 3]], ["Y", [4, 5, 6]]]
t2 = table_builder [["Z", [2, 3, 2, 4]], ["W", [4, 5, 6, 7]]]
t3 = t1.join t2
t3.should_fail_with (Unsupported_Database_Operation.Error "join")
add_join_specs suite_builder setup =
prefix = setup.prefix
table_builder = setup.table_builder
materialize = setup.materialize

View File

@ -5,7 +5,8 @@ from Standard.Table import all
from Standard.Table.Errors import all
from Standard.Database import all
from Standard.Database.Errors import Unsupported_Database_Operation, Invariant_Violation, Integrity_Error
import Standard.Database.Feature.Feature
from Standard.Database.Errors import all
from Standard.Test import all
@ -14,8 +15,17 @@ import project.Util
main filter=Nothing = run_default_backend add_specs filter
add_specs suite_builder setup =
if setup.is_feature_supported Feature.Merge then (add_merge_specs suite_builder setup) else
suite_builder.group setup.prefix+"Table.merge" group_builder->
group_builder.specify "merge should report unsupported" <|
table_builder = setup.light_table_builder
t1 = table_builder [["X", [1, 2, 3, 2]], ["Y", ["Z", "ZZ", "ZZZ", "ZZZZ"]], ["Z", [10, 20, 30, 40]]]
t2 = table_builder [["Y", ["A", "B", "A"]], ["X", [1, 2, 3]]]
t3 = t1.merge t2 key_columns="X"
t3.should_fail_with (Unsupported_Database_Operation.Error "merge")
add_merge_specs suite_builder setup =
prefix = setup.prefix
materialize = setup.materialize
table_builder = setup.table_builder

View File

@ -6,6 +6,8 @@ from Standard.Table import all
from Standard.Table.Errors import all
from Standard.Database import all
import Standard.Database.Feature.Feature
from Standard.Database.Errors import all
from Standard.Test import all
@ -18,6 +20,16 @@ type Lazy_Ref
Value ~get
add_specs suite_builder setup =
if setup.is_feature_supported Feature.Replace then (add_replace_specs suite_builder setup) else
suite_builder.group setup.prefix+"Table.replace" group_builder->
group_builder.specify "replace should report unsupported" <|
table_builder = setup.light_table_builder
t1 = table_builder [['x', [1, 2, 3, 4, 2]], ['y', ['a', 'b', 'c', 'd', 'e']]]
t2 = table_builder [['x', [2, 1, 4, 3]], ['z', [20, 10, 40, 30]]]
t3 = t1.replace t2 'x'
t3.should_fail_with (Unsupported_Database_Operation.Error "replace")
add_replace_specs suite_builder setup =
prefix = setup.prefix
suite_builder.group prefix+"Table.replace" group_builder->
table_builder = build_sorted_table setup

View File

@ -6,7 +6,8 @@ from Standard.Table import all
from Standard.Table.Errors import all
from Standard.Database import all
from Standard.Database.Errors import Unsupported_Database_Operation, Integrity_Error
from Standard.Database.Errors import all
import Standard.Database.Feature.Feature
from Standard.Test import all
@ -31,6 +32,16 @@ call_member_union tables =
first.union rest ...
add_specs suite_builder setup =
if setup.is_feature_supported Feature.Union then (add_union_specs suite_builder setup) else
suite_builder.group setup.prefix+"Table.union" group_builder->
group_builder.specify "union should report unsupported" <|
table_builder = setup.light_table_builder
t1 = table_builder [["X", [1, 2, 3]], ["Y", [4, 5, 6]]]
t2 = table_builder [["Z", [2, 3, 2, 4]], ["W", [4, 5, 6, 7]]]
t3 = t1.union t2
t3.should_fail_with (Unsupported_Database_Operation.Error "union")
add_union_specs suite_builder setup =
prefix = setup.prefix
suite_builder.group prefix+"Table.union" group_builder->

View File

@ -6,6 +6,7 @@ from Standard.Test import Test
import project.Common_Table_Operations.Add_Row_Number_Spec
import project.Common_Table_Operations.Aggregate_Spec
import project.Common_Table_Operations.Coalesce_Spec
import project.Common_Table_Operations.Column_Name_Edge_Cases_Spec
import project.Common_Table_Operations.Column_Operations_Spec
import project.Common_Table_Operations.Core_Spec
@ -68,7 +69,7 @@ type Test_Setup
Note that it has less guarantees about column types, but is faster.
- is_integer_type: A predicate specifying if the given backend treats the
given type as an integer type. See `SQL_Type_Mapping.is_integer_type` for more information.
Config prefix table_fn empty_table_fn (table_builder : (Vector Any -> (Any|Nothing)) -> Any) materialize is_database test_selection aggregate_test_selection create_connection_func light_table_builder is_integer_type=(.is_integer)
Config prefix table_fn empty_table_fn (table_builder : (Vector Any -> (Any|Nothing)) -> Any) materialize is_database test_selection aggregate_test_selection create_connection_func light_table_builder is_integer_type=(.is_integer) is_feature_supported
## Specifies if the given Table backend supports custom Enso types.
@ -92,7 +93,6 @@ type Test_Selection
case-sensitive column names. If `False`, the backend will match column
names in a case insensitive way, so that "Foo" and "foo" will refer to
the same column.
- order_by: Specifies if the backend supports ordering operations.
- natural_ordering: Specifies if the backend supports natural ordering
operations.
- case_insensitive_ordering: Specifies if the backend supports case
@ -157,7 +157,7 @@ type Test_Selection
- supports_date_time_without_timezone: Specifies if the backend supports
date/time operations without a timezone (true for most Database backends).
Defaults to `.is_integer`.
Config supports_case_sensitive_columns=True order_by=True natural_ordering=False case_insensitive_ordering=True order_by_unicode_normalization_by_default=False case_insensitive_ascii_only=False allows_mixed_type_comparisons=True supports_unicode_normalization=False is_nan_and_nothing_distinct=True is_nan_comparable=False distinct_returns_first_row_from_group_if_ordered=True date_time=True text_length_limited_columns=False fixed_length_text_columns=False length_restricted_text_columns=True removes_trailing_whitespace_casting_from_char_to_varchar=False char_max_size_after_substring:Char_Max_Size_After_Substring_Behavior=..Kept different_size_integer_types=True supports_8bit_integer=False supports_decimal_type=False supports_time_duration=False supports_nanoseconds_in_time=False supports_mixed_columns=False supported_replace_params=Nothing run_advanced_edge_case_tests_by_default=True supports_date_time_without_timezone=False
Config supports_case_sensitive_columns=True natural_ordering=False case_insensitive_ordering=True order_by_unicode_normalization_by_default=False case_insensitive_ascii_only=False allows_mixed_type_comparisons=True supports_unicode_normalization=False is_nan_and_nothing_distinct=True is_nan_comparable=False distinct_returns_first_row_from_group_if_ordered=True date_time=True text_length_limited_columns=False fixed_length_text_columns=False length_restricted_text_columns=True removes_trailing_whitespace_casting_from_char_to_varchar=False char_max_size_after_substring:Char_Max_Size_After_Substring_Behavior=..Kept different_size_integer_types=True supports_8bit_integer=False supports_decimal_type=False supports_time_duration=False supports_nanoseconds_in_time=False supports_mixed_columns=False supported_replace_params=Nothing run_advanced_edge_case_tests_by_default=True supports_date_time_without_timezone=False
## Specifies if the advanced edge case tests shall be run.
@ -207,5 +207,6 @@ add_specs suite_builder setup =
Temp_Column_Spec.add_specs suite_builder setup
Nothing_Spec.add_specs suite_builder setup
Text_Cleanse_Spec.add_specs suite_builder setup
Coalesce_Spec.add_specs suite_builder setup
main filter=Nothing = run_default_backend add_specs filter

View File

@ -6,6 +6,8 @@ import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
from Standard.Table import Value_Type
from Standard.Database.DB_Table import MAX_LITERAL_ELEMENT_COUNT
from Standard.Database.Errors import all
import Standard.Database.Feature.Feature
from Standard.Test import all
@ -20,6 +22,19 @@ type Data
Data.Value (table_builder [['x', [1]]])
add_specs suite_builder setup =
if setup.is_feature_supported Feature.Make_Table_From then (add_map_to_table_specs suite_builder setup) else
suite_builder.group setup.prefix+"Table.make_table_from_dictionary/vectors" group_builder->
data = Data.setup setup.table_builder
group_builder.specify "make_table_from_vectors should report unsupported" <|
vecs = [[0], [3.5], ['ABC']]
t = data.dummy_table.make_table_from_vectors vecs ['x', 'y', 'z']
t.should_fail_with (Unsupported_Database_Operation.Error "make_table_from_vectors")
group_builder.specify "make_table_from_dictionary should report unsupported" <|
map = Dictionary.from_vector [['x', 1], ['y', 2], ['z', 3]]
t = data.dummy_table.make_table_from_dictionary map 'k' 'v' . sort 'v'
t.should_fail_with (Unsupported_Database_Operation.Error "make_table_from_dictionary")
add_map_to_table_specs suite_builder setup =
prefix = setup.prefix
suite_builder.group prefix+"Table.make_table_from_dictionary/vectors" group_builder->
data = Data.setup setup.table_builder

View File

@ -4,6 +4,7 @@ from Standard.Table import Value_Type, Column_Ref, Previous_Value, Blank_Selecto
from Standard.Table.Errors import all
from Standard.Database.Errors import Unsupported_Database_Operation
import Standard.Database.Feature.Feature
from Standard.Test import all
@ -16,6 +17,9 @@ type Lazy_Ref
Value ~get
add_specs suite_builder setup =
if setup.is_feature_supported Feature.Integration_Tests then (add_missing_value_specs suite_builder setup)
add_missing_value_specs suite_builder setup =
prefix = setup.prefix
test_selection = setup.test_selection
table_builder = build_sorted_table setup

View File

@ -3,6 +3,7 @@ from Standard.Base import all
from Standard.Table import all
import Standard.Database.DB_Column.DB_Column
import Standard.Database.Feature.Feature
from Standard.Test import Test
import Standard.Test.Extensions
@ -18,6 +19,9 @@ type Lazy_Ref
Value ~get
add_specs suite_builder setup =
if setup.is_feature_supported Feature.Sort then (add_nothing_specs suite_builder setup)
add_nothing_specs suite_builder setup =
prefix = setup.prefix
# We cannot create a column of Nothing/NULL in the database without casting it to a non-mixed type.

View File

@ -7,6 +7,8 @@ from Standard.Table.Errors import all
from Standard.Test import all
import Standard.Database.Feature.Feature
from Standard.Database.Errors import all
from project.Common_Table_Operations.Util import run_default_backend
@ -41,16 +43,22 @@ type Data
type My_Type
Foo x
add_specs suite_builder setup =
if setup.is_feature_supported Feature.Sort then (add_order_specs suite_builder setup) else
suite_builder.group setup.prefix+"Table.sort" group_builder->
group_builder.specify "sort should report unsupported" <|
table_builder = setup.light_table_builder
t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", [100, 3, Nothing, 4, 12]], ["Y", [100, 4, 2, Nothing, 11]]]
table_2 = t.sort ["ix"]
table_2.should_fail_with (Unsupported_Database_Operation.Error "sort")
add_order_specs suite_builder setup =
prefix = setup.prefix
create_connection_fn = setup.create_connection_func
test_selection = setup.test_selection
sort_pending = if test_selection.order_by.not then
"ToDo: sort is not yet supported by this backend."
pending_collation = "Need more reliable testing for collation locale support"
suite_builder.group prefix+"Table.sort" pending=sort_pending group_builder->
suite_builder.group prefix+"Table.sort" group_builder->
data = Data.setup create_connection_fn setup.table_builder
group_builder.teardown <|

View File

@ -1,6 +1,8 @@
from Standard.Base import all
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
import Standard.Database.Feature.Feature
from Standard.Database.Errors import all
from Standard.Table import Position, Value_Type, Bits, Table
from Standard.Table.Errors import all
@ -74,6 +76,15 @@ type Rename_Columns_Data
[table]
add_specs suite_builder setup =
if setup.is_feature_supported Feature.Select_Columns then (add_select_column_specs suite_builder setup) else
suite_builder.group setup.prefix+"Table.select_columns" group_builder->
group_builder.specify "select_columns should report unsupported" <|
table_builder = Util.build_sorted_table setup
data = Select_Columns_Data.setup table_builder
table_2 = data.table.select_columns ["bar", "foo"] reorder=True
table_2.should_fail_with (Unsupported_Database_Operation.Error "select_columns")
add_select_column_specs suite_builder setup =
prefix = setup.prefix
table_builder = Util.build_sorted_table setup
test_selection = setup.test_selection

View File

@ -7,6 +7,9 @@ import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
from Standard.Table import Aggregate_Column
from Standard.Table.Errors import all
import Standard.Database.Feature.Feature
from Standard.Database.Errors import all
from Standard.Test import all
from project.Util import all
@ -62,8 +65,21 @@ type Column_Take_Drop_Data
teardown self =
self.connection.close
add_specs suite_builder setup =
if setup.is_feature_supported Feature.Sample then (add_take_drop_specs suite_builder setup) else
suite_builder.group setup.prefix+"Table.take/drop" group_builder->
group_builder.specify "take should report unsupported" <|
table_builder = setup.light_table_builder
t1 = table_builder [["X", [1, 2, 3, 2]]]
t2 = t1.take
t2.should_fail_with (Unsupported_Database_Operation.Error "take")
group_builder.specify "drop should report unsupported" <|
table_builder = setup.light_table_builder
t1 = table_builder [["X", [1, 2, 3, 2]]]
t2 = t1.drop
t2.should_fail_with (Unsupported_Database_Operation.Error "drop")
add_take_drop_specs suite_builder setup =
prefix = setup.prefix
create_connection_fn = setup.create_connection_func

View File

@ -9,7 +9,9 @@ import Standard.Base.Errors.Common.Type_Error
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
import Standard.Test.Extensions
from Standard.Database.Errors import Unsupported_Database_Operation
from Standard.Database.Errors import all
import Standard.Database.Feature.Feature
from Standard.Table import Column, Table, Value_Type, Auto, Bits
from Standard.Table.Errors import Invalid_Value_Type, Invalid_Column_Names
from project.Common_Table_Operations.Util import run_default_backend, build_sorted_table
@ -22,6 +24,15 @@ type Lazy_Ref
main filter=Nothing = run_default_backend add_specs filter
add_specs suite_builder setup =
if setup.is_feature_supported Feature.Text_Cleanse then (add_text_cleanse_specs suite_builder setup) else
suite_builder.group setup.prefix+"Table.text_cleanse" group_builder->
group_builder.specify "text_cleanse should report unsupported" <|
table_builder = setup.light_table_builder
t1 = table_builder [["X", ["", "", ""]], ["Y", [4, 5, 6]]]
t2 = t1.text_cleanse ["X"] [..Leading_Whitespace]
t2.should_fail_with (Unsupported_Database_Operation.Error "text_cleanse")
add_text_cleanse_specs suite_builder setup =
prefix = setup.prefix
materialize = setup.materialize
suite_builder.group prefix+"Table Text Cleanse" group_builder->

View File

@ -9,6 +9,8 @@ from Standard.Test import all
import enso_dev.Base_Tests.Network.Enso_Cloud.Cloud_Tests_Setup.Cloud_Tests_Setup
from enso_dev.Base_Tests.Network.Enso_Cloud.Audit_Log_Spec import Audit_Log_Event, get_audit_log_events
import project.Database.Postgres_Spec.Temporary_Data_Link_File
from project.Database.Postgres_Spec import get_configured_connection_details
from project.Util import all
polyglot java import java.lang.Thread
@ -90,3 +92,11 @@ add_specs suite_builder prefix ~datalink_to_connection database_pending =
3. switch to mock cloud (if wanted) and run some queries
4. inspect logs and search for the asset id
Error.throw "TODO"
main filter=Nothing =
connection_details = get_configured_connection_details
pending = if connection_details.is_nothing then "PostgreSQL test database is not configured. See README.md for instructions."
data_link_file = Temporary_Data_Link_File.make connection_details
suite = Test.build suite_builder->
add_specs suite_builder "[PostgreSQL] " data_link_file.get database_pending=pending
suite.run_with_filter filter

View File

@ -15,6 +15,7 @@ import Standard.Test.Suite.Suite_Builder
import project.Database.Common.Default_Ordering_Spec
import project.Database.Common.Names_Length_Limits_Spec
import Standard.Database.Feature.Feature
import project.Util
import project.Database.Helpers.Name_Generator
@ -142,14 +143,16 @@ type Missing_Values_Data
teardown self =
drop_table self.connection self.t4.name
add_specs (suite_builder : Suite_Builder) (prefix : Text) (create_connection_fn : (Nothing -> Any)) default_connection setup =
if setup.is_feature_supported Feature.Integration_Tests then (add_commmon_specs suite_builder prefix create_connection_fn default_connection setup)
## Adds common database tests specs to the suite builder.
Arguments:
- create_connection_fn: A function that creates an appropriate Connection to the database backend.
add_specs (suite_builder : Suite_Builder) (prefix : Text) (create_connection_fn : (Nothing -> Any)) default_connection =
add_commmon_specs (suite_builder : Suite_Builder) (prefix : Text) (create_connection_fn : (Nothing -> Any)) default_connection setup =
Default_Ordering_Spec.add_specs suite_builder prefix create_connection_fn
Default_Ordering_Spec.add_specs suite_builder prefix create_connection_fn setup
Names_Length_Limits_Spec.add_specs suite_builder prefix create_connection_fn
suite_builder.group (prefix + "Basic Table Access") group_builder->

View File

@ -7,6 +7,7 @@ from Standard.Table.Errors import all
from Standard.Database import all
from Standard.Database.Errors import all
import Standard.Database.Feature.Feature
from Standard.Test import all
import Standard.Test.Suite.Suite_Builder
@ -43,7 +44,7 @@ type Data
Arguments:
- suite_builder: A Suite_Builder in which a new group will be created
- create_connection_fn: A function that creates an appropriate Connection to the database backend.
add_specs (suite_builder : Suite_Builder) (prefix : Text) (create_connection_fn : Any) =
add_specs (suite_builder : Suite_Builder) (prefix : Text) (create_connection_fn : Any) setup =
group_name = prefix + "Table.default_ordering"
suite_builder.group group_name group_builder->
@ -66,20 +67,25 @@ add_specs (suite_builder : Suite_Builder) (prefix : Text) (create_connection_fn
v2.length . should_equal 1
v2.first.expression.name . should_equal "X"
group_builder.specify "will return Nothing for composite tables (join, aggregate)"
data.db_table_with_key.join data.db_table_with_key . default_ordering . should_equal Nothing
data.db_table_with_key.aggregate ["X"] . default_ordering . should_equal Nothing
if setup.is_feature_supported Feature.Join then
group_builder.specify "will return Nothing for composite tables (join)"
data.db_table_with_key.join data.db_table_with_key . default_ordering . should_equal Nothing
group_builder.specify "will return the ordering determined by sort" <|
v1 = data.db_table_with_key.sort ["Y", ..Name "X" ..Descending] . default_ordering
v1.length . should_equal 2
v1.first.expression.name . should_equal "Y"
v1.first.direction . should_equal Sort_Direction.Ascending
v1.second.expression.name . should_equal "X"
v1.second.direction . should_equal Sort_Direction.Descending
if setup.is_feature_supported Feature.Aggregate then
group_builder.specify "will return Nothing for composite tables (aggregate)"
data.db_table_with_key.aggregate ["X"] . default_ordering . should_equal Nothing
v2 = data.db_table_without_key.sort ["Y"] . default_ordering
v2.length . should_equal 1
v2.first.expression.name . should_equal "Y"
v2.first.direction . should_equal Sort_Direction.Ascending
if setup.is_feature_supported Feature.Sort then
group_builder.specify "will return the ordering determined by sort" <|
v1 = data.db_table_with_key.sort ["Y", ..Name "X" ..Descending] . default_ordering
v1.length . should_equal 2
v1.first.expression.name . should_equal "Y"
v1.first.direction . should_equal Sort_Direction.Ascending
v1.second.expression.name . should_equal "X"
v1.second.direction . should_equal Sort_Direction.Descending
v2 = data.db_table_without_key.sort ["Y"] . default_ordering
v2.length . should_equal 1
v2.first.expression.name . should_equal "Y"
v2.first.direction . should_equal Sort_Direction.Ascending

View File

@ -6,6 +6,7 @@ from Standard.Table.Errors import No_Such_Column, Name_Too_Long, Truncated_Colum
from Standard.Database import all
from Standard.Database.Dialect import Temp_Table_Style
from Standard.Database.Errors import Table_Not_Found, Unsupported_Database_Operation
import Standard.Database.Feature.Feature
from Standard.Test import all
from Standard.Test.Execution_Context_Helpers import run_with_and_without_output
@ -22,7 +23,7 @@ type Data
teardown self = self.connection.close
add_specs suite_builder prefix create_connection_func =
add_specs suite_builder prefix create_connection_func setup =
suite_builder.group prefix+"Support for Long Column/Table names" group_builder->
data = Data.setup create_connection_func
@ -127,61 +128,62 @@ add_specs suite_builder prefix create_connection_func =
data.connection.query longer_name_with_same_prefix . should_fail_with Name_Too_Long
data.connection.query (SQL_Query.Table_Name longer_name_with_same_prefix) . should_fail_with Name_Too_Long
group_builder.specify "should be fine joining tables with long names" <|
## If we know the maximum length, we choose a length that will be
just short enough to fit in the limit, but long enough that after
concatenating two of such names, the result will exceed the limit.
If we don't know the limit, we just choose a very long name.
name_length = case max_table_name_length of
Nothing -> big_name_length
max : Integer -> max
if setup.is_feature_supported Feature.Join then
group_builder.specify "should be fine joining tables with long names" <|
## If we know the maximum length, we choose a length that will be
just short enough to fit in the limit, but long enough that after
concatenating two of such names, the result will exceed the limit.
If we don't know the limit, we just choose a very long name.
name_length = case max_table_name_length of
Nothing -> big_name_length
max : Integer -> max
name_1 = ("x" * (name_length - 1)) + "1"
name_2 = ("x" * (name_length - 1)) + "2"
name_3 = ("x" * (name_length - 1)) + "3"
t1 = (Table.new [["X", [1, 2]]]).select_into_database_table data.connection name_1 temporary=True
t2 = (Table.new [["X", [1, 2]]]).select_into_database_table data.connection name_2 temporary=True
t3 = (Table.new [["X", [1, 2]]]).select_into_database_table data.connection name_3 temporary=True
name_1 = ("x" * (name_length - 1)) + "1"
name_2 = ("x" * (name_length - 1)) + "2"
name_3 = ("x" * (name_length - 1)) + "3"
t1 = (Table.new [["X", [1, 2]]]).select_into_database_table data.connection name_1 temporary=True
t2 = (Table.new [["X", [1, 2]]]).select_into_database_table data.connection name_2 temporary=True
t3 = (Table.new [["X", [1, 2]]]).select_into_database_table data.connection name_3 temporary=True
Test.with_clue "join: " <|
t12 = t1.join t2
t13 = t1.join t3
t1213 = t12.join t13
t1213.row_count . should_equal 2
t1213.at "X" . to_vector . should_equal_ignoring_order [1, 2]
Problems.expect_only_warning Duplicate_Output_Column_Names t1213
Test.with_clue "join: " <|
t12 = t1.join t2
t13 = t1.join t3
t1213 = t12.join t13
t1213.row_count . should_equal 2
t1213.at "X" . to_vector . should_equal_ignoring_order [1, 2]
Problems.expect_only_warning Duplicate_Output_Column_Names t1213
t11 = t1.join t1
t11.row_count . should_equal 2
t11.at "X" . to_vector . should_equal_ignoring_order [1, 2]
Problems.expect_only_warning Duplicate_Output_Column_Names t1213
t11 = t1.join t1
t11.row_count . should_equal 2
t11.at "X" . to_vector . should_equal_ignoring_order [1, 2]
Problems.expect_only_warning Duplicate_Output_Column_Names t1213
Test.with_clue "cross_join: " <|
t12 = t1.cross_join t2
t13 = t1.cross_join t3
t1213 = t12.cross_join t13
t1213.row_count . should_equal 16
t1213.at "X" . to_vector . distinct . should_equal_ignoring_order [1, 2]
Problems.expect_only_warning Duplicate_Output_Column_Names t1213
Test.with_clue "cross_join: " <|
t12 = t1.cross_join t2
t13 = t1.cross_join t3
t1213 = t12.cross_join t13
t1213.row_count . should_equal 16
t1213.at "X" . to_vector . distinct . should_equal_ignoring_order [1, 2]
Problems.expect_only_warning Duplicate_Output_Column_Names t1213
t11 = t1.cross_join t1
t11.row_count . should_equal 4
t11.at "X" . to_vector . should_equal_ignoring_order [1, 2, 1, 2]
Problems.expect_only_warning Duplicate_Output_Column_Names t1213
t11 = t1.cross_join t1
t11.row_count . should_equal 4
t11.at "X" . to_vector . should_equal_ignoring_order [1, 2, 1, 2]
Problems.expect_only_warning Duplicate_Output_Column_Names t1213
Test.with_clue "union: " <|
t12 = t1.union t2 # 4 rows
t13 = t1.union t3 # 4 rows
tx = t1.union [t12, t13] # 2 + 2*4 = 10 rows
ty = tx.union tx # 10 + 10 = 20 rows
ty.row_count . should_equal 20
ty.at "X" . to_vector . distinct . should_equal_ignoring_order [1, 2]
Problems.assume_no_problems ty
Test.with_clue "union: " <|
t12 = t1.union t2 # 4 rows
t13 = t1.union t3 # 4 rows
tx = t1.union [t12, t13] # 2 + 2*4 = 10 rows
ty = tx.union tx # 10 + 10 = 20 rows
ty.row_count . should_equal 20
ty.at "X" . to_vector . distinct . should_equal_ignoring_order [1, 2]
Problems.assume_no_problems ty
t11 = t1.union t1
t11.row_count . should_equal 4
t11.at "X" . to_vector . should_equal_ignoring_order [1, 2, 1, 2]
Problems.assume_no_problems t11
t11 = t1.union t1
t11.row_count . should_equal 4
t11.at "X" . to_vector . should_equal_ignoring_order [1, 2, 1, 2]
Problems.assume_no_problems t11
group_builder.specify "should be fine operating on columns with long names" <|
## If we know the maximum length, we choose a length that will be

View File

@ -11,6 +11,7 @@ from Standard.Table.Errors import Invalid_Column_Names, Inexact_Type_Coercion, D
import Standard.Database.DB_Column.DB_Column
import Standard.Database.DB_Table.DB_Table
import Standard.Database.Feature.Feature
import Standard.Database.SQL_Type.SQL_Type
import Standard.Database.Internal.Postgres.Pgpass
import Standard.Database.Internal.Replace_Params.Replace_Params
@ -722,7 +723,7 @@ add_postgres_specs suite_builder create_connection_fn db_name =
materialize = .read
Common_Spec.add_specs suite_builder prefix create_connection_fn default_connection
common_selection = Common_Table_Operations.Main.Test_Selection.Config supports_case_sensitive_columns=True order_by_unicode_normalization_by_default=True allows_mixed_type_comparisons=False text_length_limited_columns=True fixed_length_text_columns=True removes_trailing_whitespace_casting_from_char_to_varchar=True char_max_size_after_substring=..Reset supports_decimal_type=True supported_replace_params=supported_replace_params run_advanced_edge_case_tests_by_default=True supports_date_time_without_timezone=True is_nan_comparable=True
aggregate_selection = Common_Table_Operations.Aggregate_Spec.Test_Selection.Config first_last_row_order=False aggregation_problems=False
@ -734,8 +735,11 @@ add_postgres_specs suite_builder create_connection_fn db_name =
empty_agg_table_fn = _->
(agg_in_memory_table.take (..First 0)).select_into_database_table default_connection.get (Name_Generator.random_name "Agg_Empty") primary_key=Nothing temporary=True
setup = Common_Table_Operations.Main.Test_Setup.Config prefix agg_table_fn empty_agg_table_fn table_builder materialize is_database=True test_selection=common_selection aggregate_test_selection=aggregate_selection create_connection_func=create_connection_fn light_table_builder=light_table_builder
is_feature_supported_fn feature:Feature = default_connection.get.dialect.is_feature_supported feature
setup = Common_Table_Operations.Main.Test_Setup.Config prefix agg_table_fn empty_agg_table_fn table_builder materialize is_database=True test_selection=common_selection aggregate_test_selection=aggregate_selection create_connection_func=create_connection_fn light_table_builder=light_table_builder is_feature_supported=is_feature_supported_fn
Common_Spec.add_specs suite_builder prefix create_connection_fn default_connection setup
postgres_specific_spec suite_builder create_connection_fn db_name setup
Common_Table_Operations.Main.add_specs suite_builder setup
Upload_Spec.add_specs suite_builder setup create_connection_fn

View File

@ -8,6 +8,7 @@ from Standard.Table import Table, Value_Type, Bits
from Standard.Table.Errors import Invalid_Column_Names, Duplicate_Output_Column_Names
import Standard.Database.Internal.Replace_Params.Replace_Params
import Standard.Database.Feature.Feature
from Standard.Database import all
from Standard.Database.Errors import SQL_Error, Unsupported_Database_Operation, Unsupported_Database_Type
@ -330,8 +331,6 @@ sqlite_spec suite_builder prefix create_connection_func persistent_connector =
materialize = .read
Common_Spec.add_specs suite_builder prefix create_connection_func default_connection
common_selection = Common_Table_Operations.Main.Test_Selection.Config supports_case_sensitive_columns=False natural_ordering=False case_insensitive_ordering=True case_insensitive_ascii_only=True is_nan_and_nothing_distinct=False date_time=False supported_replace_params=supported_replace_params different_size_integer_types=False length_restricted_text_columns=False char_max_size_after_substring=..Reset run_advanced_edge_case_tests_by_default=True
## For now `advanced_stats`, `text_shortest_longest` and
@ -351,7 +350,11 @@ sqlite_spec suite_builder prefix create_connection_func persistent_connector =
empty_agg_table_fn = _ ->
(agg_in_memory_table.take (..First 0)).select_into_database_table default_connection.get (Name_Generator.random_name "Agg_Empty") primary_key=Nothing temporary=True
setup = Common_Table_Operations.Main.Test_Setup.Config prefix agg_table_fn empty_agg_table_fn table_builder materialize is_database=True test_selection=common_selection aggregate_test_selection=aggregate_selection create_connection_func=create_connection_func light_table_builder=light_table_builder
is_feature_supported_fn feature:Feature = default_connection.get.dialect.is_feature_supported feature
setup = Common_Table_Operations.Main.Test_Setup.Config prefix agg_table_fn empty_agg_table_fn table_builder materialize is_database=True test_selection=common_selection aggregate_test_selection=aggregate_selection create_connection_func=create_connection_func light_table_builder=light_table_builder is_feature_supported=is_feature_supported_fn
Common_Spec.add_specs suite_builder prefix create_connection_func default_connection setup
sqlite_specific_spec suite_builder prefix create_connection_func setup
Common_Table_Operations.Main.add_specs suite_builder setup
Upload_Spec.add_specs suite_builder setup create_connection_func persistent_connector=persistent_connector

View File

@ -370,6 +370,19 @@ add_specs suite_builder =
warnings . should_contain <| Arithmetic_Error.Error 'Value is Infinity (at rows [3]).'
warnings . should_contain <| Arithmetic_Error.Error 'Value is NaN (at rows [2]).'
suite_builder.group "InferredIntegerBuilder" group_builder->
group_builder.specify "Should be able to handle Nothings" <|
c = Column.from_vector "x" [Nothing, 1, Nothing, Nothing, 2, 3, Nothing, Nothing, Nothing, Nothing, 4, 5, Nothing, Nothing, 6, Nothing, 7, Nothing]
c.truncate.to_vector . should_equal c.to_vector
c.truncate.length . should_equal c.length
group_builder.specify "Should be able to handle mixed integer / biginteger" <|
c0 = Column.from_vector "x" [1, 2, 3, 4, 5, 9223372036854775807001, 9223372036854775807002, 9223372036854775807003, 6, 7, 8]
c1 = Column.from_vector "x" [9223372036854775807001, 9223372036854775807002, 1, 2, 3, 4, 5, 9223372036854775807003, 6, 7, 8]
[c0, c1].map c->
c.truncate.to_vector . should_equal c.to_vector
c.truncate.length . should_equal c.length
suite_builder.group "Date_Time truncate" group_builder->
group_builder.specify "should be able to truncate a column of Date_Times" <|
c = Column.from_vector "foo" [Date_Time.new 2020 10 24 1 2 3, Date_Time.new 2020 10 24 1 2 3]

View File

@ -33,8 +33,10 @@ in_memory_setup =
Table.new columns
create_connection_func _ =
Dummy_Connection.Value
is_feature_supported_fn _ =
True
Common_Table_Operations.Main.Test_Setup.Config "[In-Memory] " agg_table_fn empty_table_fn table_builder materialize is_database=False test_selection=selection aggregate_test_selection=aggregate_selection create_connection_func=create_connection_func light_table_builder=light_table_builder
Common_Table_Operations.Main.Test_Setup.Config "[In-Memory] " agg_table_fn empty_table_fn table_builder materialize is_database=False test_selection=selection aggregate_test_selection=aggregate_selection create_connection_func=create_connection_func light_table_builder=light_table_builder is_feature_supported=is_feature_supported_fn
add_specs suite_builder =
Common_Table_Operations.Main.add_specs suite_builder in_memory_setup