mirror of
https://github.com/enso-org/enso.git
synced 2024-11-24 16:44:48 +03:00
Batch Two of Problem_Behavior (#10454)
Part 2 of removing the public constructors. Completes review of the libraries. Tests left to do then can remove the export.
This commit is contained in:
parent
5cd028eead
commit
2124cd022a
@ -163,7 +163,7 @@ type S3_File
|
|||||||
If set to `Ignore`, the operation proceeds without errors or warnings.
|
If set to `Ignore`, the operation proceeds without errors or warnings.
|
||||||
@format File_Format.default_widget
|
@format File_Format.default_widget
|
||||||
read : File_Format -> Problem_Behavior -> Any ! S3_Error
|
read : File_Format -> Problem_Behavior -> Any ! S3_Error
|
||||||
read self format=Auto_Detect (on_problems : Problem_Behavior = Problem_Behavior.Report_Warning) =
|
read self format=Auto_Detect (on_problems : Problem_Behavior = ..Report_Warning) =
|
||||||
if Data_Link.is_data_link self then Data_Link_Helpers.read_data_link self format on_problems else
|
if Data_Link.is_data_link self then Data_Link_Helpers.read_data_link self format on_problems else
|
||||||
if self.is_directory then Error.throw (Illegal_Argument.Error "Cannot `read` a directory, use `list`.") else
|
if self.is_directory then Error.throw (Illegal_Argument.Error "Cannot `read` a directory, use `list`.") else
|
||||||
case format of
|
case format of
|
||||||
@ -195,7 +195,7 @@ type S3_File
|
|||||||
If set to `Ignore`, the operation proceeds without errors or warnings.
|
If set to `Ignore`, the operation proceeds without errors or warnings.
|
||||||
@encoding Encoding.default_widget
|
@encoding Encoding.default_widget
|
||||||
read_text : Encoding -> Problem_Behavior -> Text ! File_Error
|
read_text : Encoding -> Problem_Behavior -> Text ! File_Error
|
||||||
read_text self (encoding : Encoding = Encoding.default) (on_problems : Problem_Behavior = Problem_Behavior.Report_Warning) =
|
read_text self (encoding : Encoding = Encoding.default) (on_problems : Problem_Behavior = ..Report_Warning) =
|
||||||
self.read (Plain_Text_Format.Plain_Text encoding) on_problems
|
self.read (Plain_Text_Format.Plain_Text encoding) on_problems
|
||||||
|
|
||||||
## ICON data_output
|
## ICON data_output
|
||||||
|
@ -310,7 +310,7 @@ type Connection
|
|||||||
a `Dry_Run_Operation` warning attached.
|
a `Dry_Run_Operation` warning attached.
|
||||||
@structure make_structure_creator
|
@structure make_structure_creator
|
||||||
create_table : Text -> Vector Column_Description | DB_Table | Table -> Vector Text | Nothing -> Boolean -> Boolean -> Problem_Behavior -> DB_Table ! Table_Already_Exists
|
create_table : Text -> Vector Column_Description | DB_Table | Table -> Vector Text | Nothing -> Boolean -> Boolean -> Problem_Behavior -> DB_Table ! Table_Already_Exists
|
||||||
create_table self (table_name : Text) (structure : Vector Column_Description | DB_Table | Table) (primary_key : (Vector Text | Nothing) = [first_column_name_in_structure structure]) (temporary : Boolean = False) (allow_existing : Boolean = False) (on_problems:Problem_Behavior = Problem_Behavior.Report_Warning) =
|
create_table self (table_name : Text) (structure : Vector Column_Description | DB_Table | Table) (primary_key : (Vector Text | Nothing) = [first_column_name_in_structure structure]) (temporary : Boolean = False) (allow_existing : Boolean = False) (on_problems:Problem_Behavior = ..Report_Warning) =
|
||||||
create_table_implementation self table_name structure primary_key temporary allow_existing on_problems
|
create_table_implementation self table_name structure primary_key temporary allow_existing on_problems
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
|
@ -55,7 +55,7 @@ type SQLite_Format
|
|||||||
## PRIVATE
|
## PRIVATE
|
||||||
Implements the `File.read` for this `File_Format`
|
Implements the `File.read` for this `File_Format`
|
||||||
read : File -> Problem_Behavior -> Any
|
read : File -> Problem_Behavior -> Any
|
||||||
read self file on_problems =
|
read self file on_problems:Problem_Behavior =
|
||||||
_ = [on_problems]
|
_ = [on_problems]
|
||||||
Database.connect (SQLite.From_File file)
|
Database.connect (SQLite.From_File file)
|
||||||
|
|
||||||
|
@ -1732,7 +1732,7 @@ type DB_Column
|
|||||||
@type (Widget_Helpers.parse_type_selector include_auto=False)
|
@type (Widget_Helpers.parse_type_selector include_auto=False)
|
||||||
@format (make_format_chooser include_number=False)
|
@format (make_format_chooser include_number=False)
|
||||||
parse : Value_Type | Auto -> Text | Data_Formatter -> Problem_Behavior -> DB_Column
|
parse : Value_Type | Auto -> Text | Data_Formatter -> Problem_Behavior -> DB_Column
|
||||||
parse self type:(Value_Type | Auto) format:(Text | Data_Formatter)="" on_problems=Report_Warning =
|
parse self type:(Value_Type | Auto) format:(Text | Data_Formatter)="" on_problems:Problem_Behavior=..Report_Warning =
|
||||||
if type == Auto then Error.throw (Unsupported_Database_Operation.Error "The `Auto` parse type is not supported by the Database backend. Either pick a specific type or materialize the table to memory using `.read`.") else
|
if type == Auto then Error.throw (Unsupported_Database_Operation.Error "The `Auto` parse type is not supported by the Database backend. Either pick a specific type or materialize the table to memory using `.read`.") else
|
||||||
if format != "" then Error.throw (Unsupported_Database_Operation.Error "Custom formatting is not supported by the Database backend. Please set the format to `''` to use the default settings, or if custom formatting is needed, materialize the table to memory using `.read` first.") else
|
if format != "" then Error.throw (Unsupported_Database_Operation.Error "Custom formatting is not supported by the Database backend. Please set the format to `''` to use the default settings, or if custom formatting is needed, materialize the table to memory using `.read` first.") else
|
||||||
Value_Type.expect_text self <|
|
Value_Type.expect_text self <|
|
||||||
@ -1800,7 +1800,7 @@ type DB_Column
|
|||||||
types. Due to this, a Mixed column containing values `[2, "3"]` will
|
types. Due to this, a Mixed column containing values `[2, "3"]` will
|
||||||
actually be converted into `[2, Nothing]` when casting to Integer type.
|
actually be converted into `[2, Nothing]` when casting to Integer type.
|
||||||
cast : Value_Type -> Problem_Behavior -> DB_Column ! Illegal_Argument | Inexact_Type_Coercion | Conversion_Failure
|
cast : Value_Type -> Problem_Behavior -> DB_Column ! Illegal_Argument | Inexact_Type_Coercion | Conversion_Failure
|
||||||
cast self value_type on_problems=Problem_Behavior.Report_Warning =
|
cast self value_type on_problems:Problem_Behavior=..Report_Warning =
|
||||||
check_cast_compatibility self.value_type value_type <|
|
check_cast_compatibility self.value_type value_type <|
|
||||||
self.internal_do_cast value_type on_problems
|
self.internal_do_cast value_type on_problems
|
||||||
|
|
||||||
@ -1819,7 +1819,7 @@ type DB_Column
|
|||||||
## PRIVATE
|
## PRIVATE
|
||||||
Shares the core CAST logic between `cast` and `parse`.
|
Shares the core CAST logic between `cast` and `parse`.
|
||||||
internal_do_cast : Value_Type -> Problem_Behavior -> DB_Column
|
internal_do_cast : Value_Type -> Problem_Behavior -> DB_Column
|
||||||
internal_do_cast self value_type on_problems =
|
internal_do_cast self value_type on_problems:Problem_Behavior =
|
||||||
dialect = self.connection.dialect
|
dialect = self.connection.dialect
|
||||||
type_mapping = dialect.get_type_mapping
|
type_mapping = dialect.get_type_mapping
|
||||||
target_sql_type = type_mapping.value_type_to_sql value_type on_problems
|
target_sql_type = type_mapping.value_type_to_sql value_type on_problems
|
||||||
|
@ -57,7 +57,7 @@ from project.Internal.Upload_Table import all
|
|||||||
rows, so errors may still occur when the output action is enabled.
|
rows, so errors may still occur when the output action is enabled.
|
||||||
@primary_key Widget_Helpers.make_column_name_vector_selector
|
@primary_key Widget_Helpers.make_column_name_vector_selector
|
||||||
DB_Table.select_into_database_table : Connection -> Text -> Vector Text | Nothing -> Boolean -> Problem_Behavior -> DB_Table ! Table_Already_Exists | Inexact_Type_Coercion | Missing_Input_Columns | Non_Unique_Key | SQL_Error | Illegal_Argument
|
DB_Table.select_into_database_table : Connection -> Text -> Vector Text | Nothing -> Boolean -> Problem_Behavior -> DB_Table ! Table_Already_Exists | Inexact_Type_Coercion | Missing_Input_Columns | Non_Unique_Key | SQL_Error | Illegal_Argument
|
||||||
DB_Table.select_into_database_table self connection (table_name : Text) primary_key=[self.columns.first.name] temporary=False on_problems=Problem_Behavior.Report_Warning =
|
DB_Table.select_into_database_table self connection (table_name : Text) primary_key=[self.columns.first.name] temporary=False on_problems:Problem_Behavior=..Report_Warning =
|
||||||
select_into_table_implementation self connection table_name primary_key temporary on_problems
|
select_into_table_implementation self connection table_name primary_key temporary on_problems
|
||||||
|
|
||||||
## GROUP Standard.Base.Output
|
## GROUP Standard.Base.Output
|
||||||
@ -132,7 +132,7 @@ DB_Table.select_into_database_table self connection (table_name : Text) primary_
|
|||||||
only on a sample of rows, so errors may still occur when the output action
|
only on a sample of rows, so errors may still occur when the output action
|
||||||
is enabled.
|
is enabled.
|
||||||
DB_Table.update_rows : DB_Table | Table -> Update_Action -> Vector Text | Nothing -> Boolean -> Problem_Behavior -> DB_Table ! Table_Not_Found | Unmatched_Columns | Missing_Input_Columns | Column_Type_Mismatch | SQL_Error | Illegal_Argument
|
DB_Table.update_rows : DB_Table | Table -> Update_Action -> Vector Text | Nothing -> Boolean -> Problem_Behavior -> DB_Table ! Table_Not_Found | Unmatched_Columns | Missing_Input_Columns | Column_Type_Mismatch | SQL_Error | Illegal_Argument
|
||||||
DB_Table.update_rows self (source_table : DB_Table | Table) (update_action : Update_Action = Update_Action.Update_Or_Insert) (key_columns : Vector | Nothing = default_key_columns self) (error_on_missing_columns : Boolean = False) (on_problems : Problem_Behavior = Problem_Behavior.Report_Warning) =
|
DB_Table.update_rows self (source_table : DB_Table | Table) (update_action : Update_Action = Update_Action.Update_Or_Insert) (key_columns : Vector | Nothing = default_key_columns self) (error_on_missing_columns : Boolean = False) (on_problems : Problem_Behavior = ..Report_Warning) =
|
||||||
common_update_table source_table self update_action key_columns error_on_missing_columns on_problems
|
common_update_table source_table self update_action key_columns error_on_missing_columns on_problems
|
||||||
|
|
||||||
## GROUP Standard.Base.Output
|
## GROUP Standard.Base.Output
|
||||||
|
@ -57,7 +57,7 @@ from project.Internal.Upload_Table import all
|
|||||||
rows, so errors may still occur when the output action is enabled.
|
rows, so errors may still occur when the output action is enabled.
|
||||||
@primary_key Widget_Helpers.make_column_name_vector_selector
|
@primary_key Widget_Helpers.make_column_name_vector_selector
|
||||||
Table.select_into_database_table : Connection -> Text -> Vector Text | Nothing -> Boolean -> Problem_Behavior -> DB_Table ! Table_Already_Exists | Inexact_Type_Coercion | Missing_Input_Columns | Non_Unique_Key | SQL_Error | Illegal_Argument
|
Table.select_into_database_table : Connection -> Text -> Vector Text | Nothing -> Boolean -> Problem_Behavior -> DB_Table ! Table_Already_Exists | Inexact_Type_Coercion | Missing_Input_Columns | Non_Unique_Key | SQL_Error | Illegal_Argument
|
||||||
Table.select_into_database_table self connection (table_name : Text) primary_key=[self.columns.first.name] temporary=False on_problems=Problem_Behavior.Report_Warning =
|
Table.select_into_database_table self connection (table_name : Text) primary_key=[self.columns.first.name] temporary=False on_problems:Problem_Behavior=..Report_Warning =
|
||||||
select_into_table_implementation self connection table_name primary_key temporary on_problems
|
select_into_table_implementation self connection table_name primary_key temporary on_problems
|
||||||
|
|
||||||
## GROUP Standard.Base.Output
|
## GROUP Standard.Base.Output
|
||||||
@ -123,7 +123,7 @@ Table.select_into_database_table self connection (table_name : Text) primary_key
|
|||||||
only on a sample of rows, so errors may still occur when the output action
|
only on a sample of rows, so errors may still occur when the output action
|
||||||
is enabled.
|
is enabled.
|
||||||
Table.update_rows : DB_Table | Table -> Update_Action -> Vector Text | Nothing -> Boolean -> Problem_Behavior -> DB_Table ! Table_Not_Found | Unmatched_Columns | Missing_Input_Columns | Column_Type_Mismatch | SQL_Error | Illegal_Argument
|
Table.update_rows : DB_Table | Table -> Update_Action -> Vector Text | Nothing -> Boolean -> Problem_Behavior -> DB_Table ! Table_Not_Found | Unmatched_Columns | Missing_Input_Columns | Column_Type_Mismatch | SQL_Error | Illegal_Argument
|
||||||
Table.update_rows self (source_table : DB_Table | Table) (update_action : Update_Action = Update_Action.Update_Or_Insert) (key_columns : Vector | Nothing = Nothing) (error_on_missing_columns : Boolean = False) (on_problems : Problem_Behavior = Problem_Behavior.Report_Warning) =
|
Table.update_rows self (source_table : DB_Table | Table) (update_action : Update_Action = Update_Action.Update_Or_Insert) (key_columns : Vector | Nothing = Nothing) (error_on_missing_columns : Boolean = False) (on_problems : Problem_Behavior = ..Report_Warning) =
|
||||||
_ = [source_table, update_action, key_columns, error_on_missing_columns, on_problems]
|
_ = [source_table, update_action, key_columns, error_on_missing_columns, on_problems]
|
||||||
Error.throw (Illegal_Argument.Error "Table.update_rows modifies the underlying table, so it is only supported for Database tables - in-memory tables are immutable. Consider using `join` or `merge` for a similar operation that creates a new Table instead.")
|
Error.throw (Illegal_Argument.Error "Table.update_rows modifies the underlying table, so it is only supported for Database tables - in-memory tables are immutable. Consider using `join` or `merge` for a similar operation that creates a new Table instead.")
|
||||||
|
|
||||||
|
@ -20,7 +20,7 @@ from project.Internal.Upload_Table import check_for_null_keys
|
|||||||
Implementation of `lookup_and_replace` for Database backend.
|
Implementation of `lookup_and_replace` for Database backend.
|
||||||
See `Table.lookup_and_replace` for more details.
|
See `Table.lookup_and_replace` for more details.
|
||||||
build_lookup_query : DB_Table -> DB_Table -> (Vector (Integer | Text | Regex) | Text | Integer | Regex) -> Boolean -> Boolean -> Problem_Behavior -> DB_Table ! Missing_Input_Columns | Non_Unique_Key | Unmatched_Rows_In_Lookup
|
build_lookup_query : DB_Table -> DB_Table -> (Vector (Integer | Text | Regex) | Text | Integer | Regex) -> Boolean -> Boolean -> Problem_Behavior -> DB_Table ! Missing_Input_Columns | Non_Unique_Key | Unmatched_Rows_In_Lookup
|
||||||
build_lookup_query base_table lookup_table key_columns add_new_columns allow_unmatched_rows on_problems =
|
build_lookup_query base_table lookup_table key_columns add_new_columns allow_unmatched_rows on_problems:Problem_Behavior =
|
||||||
lookup_columns = Lookup_Helpers.prepare_columns_for_lookup base_table lookup_table key_columns add_new_columns allow_unmatched_rows on_problems
|
lookup_columns = Lookup_Helpers.prepare_columns_for_lookup base_table lookup_table key_columns add_new_columns allow_unmatched_rows on_problems
|
||||||
lookup_columns.if_not_error <| check_initial_invariants base_table lookup_table lookup_columns allow_unmatched_rows <|
|
lookup_columns.if_not_error <| check_initial_invariants base_table lookup_table lookup_columns allow_unmatched_rows <|
|
||||||
column_naming_helper = base_table.connection.base_connection.column_naming_helper
|
column_naming_helper = base_table.connection.base_connection.column_naming_helper
|
||||||
|
@ -227,7 +227,7 @@ type Postgres_Connection
|
|||||||
a `Dry_Run_Operation` warning attached.
|
a `Dry_Run_Operation` warning attached.
|
||||||
@structure make_structure_creator
|
@structure make_structure_creator
|
||||||
create_table : Text -> Vector Column_Description | DB_Table | Table -> Vector Text | Nothing -> Boolean -> Boolean -> Problem_Behavior -> DB_Table ! Table_Already_Exists
|
create_table : Text -> Vector Column_Description | DB_Table | Table -> Vector Text | Nothing -> Boolean -> Boolean -> Problem_Behavior -> DB_Table ! Table_Already_Exists
|
||||||
create_table self (table_name : Text) (structure : Vector Column_Description | DB_Table | Table) (primary_key : (Vector Text | Nothing) = [first_column_name_in_structure structure]) (temporary : Boolean = False) (allow_existing : Boolean = False) (on_problems:Problem_Behavior = Problem_Behavior.Report_Warning) =
|
create_table self (table_name : Text) (structure : Vector Column_Description | DB_Table | Table) (primary_key : (Vector Text | Nothing) = [first_column_name_in_structure structure]) (temporary : Boolean = False) (allow_existing : Boolean = False) (on_problems:Problem_Behavior = ..Report_Warning) =
|
||||||
self.connection.create_table table_name structure primary_key temporary allow_existing on_problems
|
self.connection.create_table table_name structure primary_key temporary allow_existing on_problems
|
||||||
|
|
||||||
## ADVANCED
|
## ADVANCED
|
||||||
|
@ -19,7 +19,7 @@ polyglot java import java.sql.Types
|
|||||||
type Postgres_Type_Mapping
|
type Postgres_Type_Mapping
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
value_type_to_sql : Value_Type -> Problem_Behavior -> SQL_Type
|
value_type_to_sql : Value_Type -> Problem_Behavior -> SQL_Type
|
||||||
value_type_to_sql value_type on_problems =
|
value_type_to_sql value_type on_problems:Problem_Behavior =
|
||||||
result = case value_type of
|
result = case value_type of
|
||||||
Value_Type.Boolean ->
|
Value_Type.Boolean ->
|
||||||
SQL_Type.Value Types.BIT "bool" precision=1
|
SQL_Type.Value Types.BIT "bool" precision=1
|
||||||
|
@ -25,7 +25,7 @@ type SQL_Type_Mapping
|
|||||||
If the conversion is exact, it should be reversible, i.e.
|
If the conversion is exact, it should be reversible, i.e.
|
||||||
`sql_type_to_value_type (value_type_to_sql x Problem_Behavior.Report_Error) = x`.
|
`sql_type_to_value_type (value_type_to_sql x Problem_Behavior.Report_Error) = x`.
|
||||||
value_type_to_sql : Value_Type -> Problem_Behavior -> SQL_Type ! Inexact_Type_Coercion
|
value_type_to_sql : Value_Type -> Problem_Behavior -> SQL_Type ! Inexact_Type_Coercion
|
||||||
value_type_to_sql value_type on_problems =
|
value_type_to_sql value_type on_problems:Problem_Behavior =
|
||||||
_ = [value_type, on_problems]
|
_ = [value_type, on_problems]
|
||||||
Unimplemented.throw "This is an interface only."
|
Unimplemented.throw "This is an interface only."
|
||||||
|
|
||||||
|
@ -213,7 +213,7 @@ type SQLite_Connection
|
|||||||
a `Dry_Run_Operation` warning attached.
|
a `Dry_Run_Operation` warning attached.
|
||||||
@structure make_structure_creator
|
@structure make_structure_creator
|
||||||
create_table : Text -> Vector Column_Description | DB_Table | Table -> Vector Text | Nothing -> Boolean -> Boolean -> Problem_Behavior -> DB_Table ! Table_Already_Exists
|
create_table : Text -> Vector Column_Description | DB_Table | Table -> Vector Text | Nothing -> Boolean -> Boolean -> Problem_Behavior -> DB_Table ! Table_Already_Exists
|
||||||
create_table self (table_name : Text) (structure : Vector Column_Description | DB_Table | Table) (primary_key : (Vector Text | Nothing) = [first_column_name_in_structure structure]) (temporary : Boolean = False) (allow_existing : Boolean = False) (on_problems:Problem_Behavior = Problem_Behavior.Report_Warning) =
|
create_table self (table_name : Text) (structure : Vector Column_Description | DB_Table | Table) (primary_key : (Vector Text | Nothing) = [first_column_name_in_structure structure]) (temporary : Boolean = False) (allow_existing : Boolean = False) (on_problems:Problem_Behavior = ..Report_Warning) =
|
||||||
self.connection.create_table table_name structure primary_key temporary allow_existing on_problems
|
self.connection.create_table table_name structure primary_key temporary allow_existing on_problems
|
||||||
|
|
||||||
## ADVANCED
|
## ADVANCED
|
||||||
|
@ -48,7 +48,7 @@ polyglot java import java.sql.Types
|
|||||||
type SQLite_Type_Mapping
|
type SQLite_Type_Mapping
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
value_type_to_sql : Value_Type -> Problem_Behavior -> SQL_Type
|
value_type_to_sql : Value_Type -> Problem_Behavior -> SQL_Type
|
||||||
value_type_to_sql value_type on_problems =
|
value_type_to_sql value_type on_problems:Problem_Behavior =
|
||||||
result = case value_type of
|
result = case value_type of
|
||||||
Value_Type.Boolean -> SQLite_Types.boolean
|
Value_Type.Boolean -> SQLite_Types.boolean
|
||||||
Value_Type.Byte -> SQLite_Types.integer
|
Value_Type.Byte -> SQLite_Types.integer
|
||||||
|
@ -29,7 +29,7 @@ from project.Internal.Result_Set import result_set_to_table
|
|||||||
of the created table.
|
of the created table.
|
||||||
|
|
||||||
The user-facing function that handles the dry-run logic.
|
The user-facing function that handles the dry-run logic.
|
||||||
create_table_implementation connection table_name structure primary_key temporary allow_existing on_problems = Panic.recover SQL_Error <|
|
create_table_implementation connection table_name structure primary_key temporary allow_existing on_problems:Problem_Behavior = Panic.recover SQL_Error <|
|
||||||
connection.base_connection.maybe_run_maintenance
|
connection.base_connection.maybe_run_maintenance
|
||||||
table_naming_helper = connection.base_connection.table_naming_helper
|
table_naming_helper = connection.base_connection.table_naming_helper
|
||||||
table_naming_helper.verify_table_name table_name <| connection.jdbc_connection.run_within_transaction <|
|
table_naming_helper.verify_table_name table_name <| connection.jdbc_connection.run_within_transaction <|
|
||||||
@ -58,7 +58,7 @@ create_table_implementation connection table_name structure primary_key temporar
|
|||||||
Assumes the output context is enabled for it to work.
|
Assumes the output context is enabled for it to work.
|
||||||
Does not check if the table already exists - so if it does, it may fail with
|
Does not check if the table already exists - so if it does, it may fail with
|
||||||
`SQL_Error`. The caller should perform the check for better error handling.
|
`SQL_Error`. The caller should perform the check for better error handling.
|
||||||
internal_create_table_structure connection table_name structure primary_key temporary on_problems =
|
internal_create_table_structure connection table_name structure primary_key temporary on_problems:Problem_Behavior =
|
||||||
aligned_structure = align_structure connection structure
|
aligned_structure = align_structure connection structure
|
||||||
resolved_primary_key = resolve_primary_key aligned_structure primary_key
|
resolved_primary_key = resolve_primary_key aligned_structure primary_key
|
||||||
validate_structure connection.base_connection.column_naming_helper aligned_structure <|
|
validate_structure connection.base_connection.column_naming_helper aligned_structure <|
|
||||||
@ -85,7 +85,7 @@ internal_create_table_structure connection table_name structure primary_key temp
|
|||||||
- on_problems: the behavior to be used when reporting problems.
|
- on_problems: the behavior to be used when reporting problems.
|
||||||
- row_limit: if set, only the first `row_limit` rows will be uploaded.
|
- row_limit: if set, only the first `row_limit` rows will be uploaded.
|
||||||
internal_upload_table : DB_Table | Table -> Connection -> Text -> Nothing | Vector Text -> Boolean -> Vector Column_Description -> Problem_Behavior -> Integer|Nothing -> DB_Table
|
internal_upload_table : DB_Table | Table -> Connection -> Text -> Nothing | Vector Text -> Boolean -> Vector Column_Description -> Problem_Behavior -> Integer|Nothing -> DB_Table
|
||||||
internal_upload_table source_table connection table_name primary_key temporary structure_hint=Nothing on_problems=Problem_Behavior.Report_Error row_limit=Nothing =
|
internal_upload_table source_table connection table_name primary_key temporary structure_hint=Nothing on_problems:Problem_Behavior=..Report_Error row_limit=Nothing =
|
||||||
case source_table of
|
case source_table of
|
||||||
_ : Table ->
|
_ : Table ->
|
||||||
internal_upload_in_memory_table source_table connection table_name primary_key temporary structure_hint on_problems row_limit
|
internal_upload_in_memory_table source_table connection table_name primary_key temporary structure_hint on_problems row_limit
|
||||||
@ -95,7 +95,7 @@ internal_upload_table source_table connection table_name primary_key temporary s
|
|||||||
Panic.throw <| Illegal_Argument.Error ("Unsupported table type: " + Meta.get_qualified_type_name source_table)
|
Panic.throw <| Illegal_Argument.Error ("Unsupported table type: " + Meta.get_qualified_type_name source_table)
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
select_into_table_implementation source_table connection table_name primary_key temporary on_problems =
|
select_into_table_implementation source_table connection table_name primary_key temporary on_problems:Problem_Behavior =
|
||||||
connection.base_connection.maybe_run_maintenance
|
connection.base_connection.maybe_run_maintenance
|
||||||
table_naming_helper = connection.base_connection.table_naming_helper
|
table_naming_helper = connection.base_connection.table_naming_helper
|
||||||
table_naming_helper.verify_table_name table_name <|
|
table_naming_helper.verify_table_name table_name <|
|
||||||
@ -123,7 +123,7 @@ select_into_table_implementation source_table connection table_name primary_key
|
|||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
It should be run within a transaction and wrapped in `handle_upload_errors`.
|
It should be run within a transaction and wrapped in `handle_upload_errors`.
|
||||||
internal_upload_in_memory_table source_table connection table_name primary_key temporary structure_hint on_problems row_limit =
|
internal_upload_in_memory_table source_table connection table_name primary_key temporary structure_hint on_problems:Problem_Behavior row_limit =
|
||||||
In_Transaction.ensure_in_transaction <|
|
In_Transaction.ensure_in_transaction <|
|
||||||
verify_structure_hint structure_hint source_table.column_names
|
verify_structure_hint structure_hint source_table.column_names
|
||||||
structure = structure_hint.if_nothing source_table
|
structure = structure_hint.if_nothing source_table
|
||||||
@ -147,7 +147,7 @@ internal_upload_in_memory_table source_table connection table_name primary_key t
|
|||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
It should be run within a transaction and wrapped in `handle_upload_errors`.
|
It should be run within a transaction and wrapped in `handle_upload_errors`.
|
||||||
internal_upload_database_table source_table connection table_name primary_key temporary structure_hint on_problems row_limit =
|
internal_upload_database_table source_table connection table_name primary_key temporary structure_hint on_problems:Problem_Behavior row_limit =
|
||||||
In_Transaction.ensure_in_transaction <|
|
In_Transaction.ensure_in_transaction <|
|
||||||
connection_check = if source_table.connection.jdbc_connection == connection.jdbc_connection then True else
|
connection_check = if source_table.connection.jdbc_connection == connection.jdbc_connection then True else
|
||||||
Error.throw (Unsupported_Database_Operation.Error "The Database table to be uploaded must be coming from the same connection as the connection on which the new table is being created. Cross-connection uploads are currently not supported. To work around this, you can first `.read` the table into memory and then upload it from memory to a different connection.")
|
Error.throw (Unsupported_Database_Operation.Error "The Database table to be uploaded must be coming from the same connection as the connection on which the new table is being created. Cross-connection uploads are currently not supported. To work around this, you can first `.read` the table into memory and then upload it from memory to a different connection.")
|
||||||
@ -299,7 +299,7 @@ first_column_name_in_structure structure = case structure of
|
|||||||
responsibility of the caller to ensure that, otherwise the generated
|
responsibility of the caller to ensure that, otherwise the generated
|
||||||
statement will be invalid.
|
statement will be invalid.
|
||||||
prepare_create_table_statement : Connection -> Text -> Vector Column_Description -> Vector Text -> Boolean -> Problem_Behavior -> SQL_Statement
|
prepare_create_table_statement : Connection -> Text -> Vector Column_Description -> Vector Text -> Boolean -> Problem_Behavior -> SQL_Statement
|
||||||
prepare_create_table_statement connection table_name columns primary_key temporary on_problems =
|
prepare_create_table_statement connection table_name columns primary_key temporary on_problems:Problem_Behavior =
|
||||||
type_mapping = connection.dialect.get_type_mapping
|
type_mapping = connection.dialect.get_type_mapping
|
||||||
column_descriptors = columns.map on_problems=No_Wrap def->
|
column_descriptors = columns.map on_problems=No_Wrap def->
|
||||||
sql_type = type_mapping.value_type_to_sql def.value_type on_problems
|
sql_type = type_mapping.value_type_to_sql def.value_type on_problems
|
||||||
@ -323,7 +323,7 @@ make_batched_insert_template connection table_name column_names =
|
|||||||
template
|
template
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
common_update_table (source_table : DB_Table | Table) (target_table : DB_Table) update_action key_columns error_on_missing_columns on_problems =
|
common_update_table (source_table : DB_Table | Table) (target_table : DB_Table) update_action key_columns error_on_missing_columns on_problems:Problem_Behavior =
|
||||||
check_target_table_for_update target_table <|
|
check_target_table_for_update target_table <|
|
||||||
connection = target_table.connection
|
connection = target_table.connection
|
||||||
Panic.recover SQL_Error <| handle_upload_errors <|
|
Panic.recover SQL_Error <| handle_upload_errors <|
|
||||||
@ -460,7 +460,7 @@ type Append_Helper
|
|||||||
- all columns in `source_table` have a corresponding column in `target_table`
|
- all columns in `source_table` have a corresponding column in `target_table`
|
||||||
(with the same name),
|
(with the same name),
|
||||||
- all `key_columns` are present in both source and target tables.
|
- all `key_columns` are present in both source and target tables.
|
||||||
check_update_arguments_structure_match source_table target_table key_columns update_action error_on_missing_columns on_problems ~action =
|
check_update_arguments_structure_match source_table target_table key_columns update_action error_on_missing_columns on_problems:Problem_Behavior ~action =
|
||||||
check_source_column source_column =
|
check_source_column source_column =
|
||||||
# The column must exist because it was verified earlier.
|
# The column must exist because it was verified earlier.
|
||||||
target_column = target_table.get source_column.name
|
target_column = target_table.get source_column.name
|
||||||
|
@ -41,7 +41,7 @@ type Image_File_Format
|
|||||||
## PRIVATE
|
## PRIVATE
|
||||||
Implements the `File.read` for this `File_Format`
|
Implements the `File.read` for this `File_Format`
|
||||||
read : File -> Problem_Behavior -> Any
|
read : File -> Problem_Behavior -> Any
|
||||||
read self file on_problems =
|
read self file on_problems:Problem_Behavior =
|
||||||
_ = [on_problems]
|
_ = [on_problems]
|
||||||
Image.read file
|
Image.read file
|
||||||
|
|
||||||
|
@ -247,7 +247,7 @@ type Snowflake_Connection
|
|||||||
a `Dry_Run_Operation` warning attached.
|
a `Dry_Run_Operation` warning attached.
|
||||||
@structure make_structure_creator
|
@structure make_structure_creator
|
||||||
create_table : Text -> Vector Column_Description | DB_Table | Table -> Vector Text | Nothing -> Boolean -> Boolean -> Problem_Behavior -> DB_Table ! Table_Already_Exists
|
create_table : Text -> Vector Column_Description | DB_Table | Table -> Vector Text | Nothing -> Boolean -> Boolean -> Problem_Behavior -> DB_Table ! Table_Already_Exists
|
||||||
create_table self (table_name : Text) (structure : Vector Column_Description | DB_Table | Table) (primary_key : (Vector Text | Nothing) = [first_column_name_in_structure structure]) (temporary : Boolean = False) (allow_existing : Boolean = False) (on_problems:Problem_Behavior = Problem_Behavior.Report_Warning) =
|
create_table self (table_name : Text) (structure : Vector Column_Description | DB_Table | Table) (primary_key : (Vector Text | Nothing) = [first_column_name_in_structure structure]) (temporary : Boolean = False) (allow_existing : Boolean = False) (on_problems:Problem_Behavior = ..Report_Warning) =
|
||||||
self.connection.create_table table_name structure primary_key temporary allow_existing on_problems
|
self.connection.create_table table_name structure primary_key temporary allow_existing on_problems
|
||||||
|
|
||||||
## ADVANCED
|
## ADVANCED
|
||||||
|
@ -22,7 +22,7 @@ polyglot java import java.sql.Types
|
|||||||
type Snowflake_Type_Mapping
|
type Snowflake_Type_Mapping
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
value_type_to_sql : Value_Type -> Problem_Behavior -> SQL_Type
|
value_type_to_sql : Value_Type -> Problem_Behavior -> SQL_Type
|
||||||
value_type_to_sql value_type on_problems =
|
value_type_to_sql value_type on_problems:Problem_Behavior =
|
||||||
result = case value_type of
|
result = case value_type of
|
||||||
Value_Type.Boolean -> SQL_Type.Value Types.BOOLEAN "boolean"
|
Value_Type.Boolean -> SQL_Type.Value Types.BOOLEAN "boolean"
|
||||||
# All integer types in Snowflake become NUMERIC(38,0).
|
# All integer types in Snowflake become NUMERIC(38,0).
|
||||||
|
@ -1800,7 +1800,7 @@ type Column
|
|||||||
@type Widget_Helpers.parse_type_selector
|
@type Widget_Helpers.parse_type_selector
|
||||||
@format (make_format_chooser include_number=False)
|
@format (make_format_chooser include_number=False)
|
||||||
parse : Value_Type | Auto -> Text | Data_Formatter -> Problem_Behavior -> Column
|
parse : Value_Type | Auto -> Text | Data_Formatter -> Problem_Behavior -> Column
|
||||||
parse self type:(Value_Type | Auto)=Auto format:(Text | Data_Formatter)="" on_problems=Report_Warning =
|
parse self type:(Value_Type | Auto)=Auto format:(Text | Data_Formatter)="" on_problems:Problem_Behavior=..Report_Warning =
|
||||||
Value_Type.expect_text self <|
|
Value_Type.expect_text self <|
|
||||||
formatter = case format of
|
formatter = case format of
|
||||||
_ : Text -> if format == "" then Data_Formatter.Value else Data_Formatter.Value.with_format type format
|
_ : Text -> if format == "" then Data_Formatter.Value else Data_Formatter.Value.with_format type format
|
||||||
@ -1953,7 +1953,7 @@ type Column
|
|||||||
types. Due to this, a Mixed column containing values `[2, "3"]` will
|
types. Due to this, a Mixed column containing values `[2, "3"]` will
|
||||||
actually be converted into `[2, Nothing]` when casting to Integer type.
|
actually be converted into `[2, Nothing]` when casting to Integer type.
|
||||||
cast : Value_Type -> Problem_Behavior -> Column ! Illegal_Argument | Inexact_Type_Coercion | Conversion_Failure
|
cast : Value_Type -> Problem_Behavior -> Column ! Illegal_Argument | Inexact_Type_Coercion | Conversion_Failure
|
||||||
cast self value_type on_problems=Problem_Behavior.Report_Warning =
|
cast self value_type on_problems:Problem_Behavior=..Report_Warning =
|
||||||
Cast_Helpers.check_cast_compatibility self.value_type value_type <|
|
Cast_Helpers.check_cast_compatibility self.value_type value_type <|
|
||||||
target_storage_type = Storage.from_value_type value_type on_problems
|
target_storage_type = Storage.from_value_type value_type on_problems
|
||||||
Java_Problems.with_problem_aggregator on_problems java_problem_aggregator->
|
Java_Problems.with_problem_aggregator on_problems java_problem_aggregator->
|
||||||
|
@ -78,7 +78,7 @@ type Data_Formatter
|
|||||||
If set to `Ignore`, the operation proceeds without errors or warnings.
|
If set to `Ignore`, the operation proceeds without errors or warnings.
|
||||||
@type Widget_Helpers.parse_type_selector
|
@type Widget_Helpers.parse_type_selector
|
||||||
parse : Text -> (Value_Type | Auto) -> Problem_Behavior -> Any
|
parse : Text -> (Value_Type | Auto) -> Problem_Behavior -> Any
|
||||||
parse self text type:(Value_Type | Auto)=Auto on_problems=Problem_Behavior.Report_Warning =
|
parse self text type:(Value_Type | Auto)=Auto on_problems:Problem_Behavior=..Report_Warning =
|
||||||
parser = self.make_value_type_parser type
|
parser = self.make_value_type_parser type
|
||||||
Java_Problems.with_problem_aggregator on_problems java_problem_aggregator->
|
Java_Problems.with_problem_aggregator on_problems java_problem_aggregator->
|
||||||
related_column_name = Nothing
|
related_column_name = Nothing
|
||||||
|
@ -99,7 +99,7 @@ type Delimited_Format
|
|||||||
ADVANCED
|
ADVANCED
|
||||||
Implements the `File.read` for this `File_Format`
|
Implements the `File.read` for this `File_Format`
|
||||||
read : File -> Problem_Behavior -> Any
|
read : File -> Problem_Behavior -> Any
|
||||||
read self file on_problems =
|
read self file on_problems:Problem_Behavior =
|
||||||
Delimited_Reader.read_file self file on_problems
|
Delimited_Reader.read_file self file on_problems
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
@ -107,13 +107,13 @@ type Delimited_Format
|
|||||||
read_stream : Input_Stream -> File_Format_Metadata -> Any
|
read_stream : Input_Stream -> File_Format_Metadata -> Any
|
||||||
read_stream self stream:Input_Stream (metadata : File_Format_Metadata = File_Format_Metadata.no_information) =
|
read_stream self stream:Input_Stream (metadata : File_Format_Metadata = File_Format_Metadata.no_information) =
|
||||||
_ = metadata
|
_ = metadata
|
||||||
Delimited_Reader.read_stream self stream on_problems=Report_Warning
|
Delimited_Reader.read_stream self stream on_problems=..Report_Warning
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
ADVANCED
|
ADVANCED
|
||||||
Implements the `Table.write` for this `File_Format`.
|
Implements the `Table.write` for this `File_Format`.
|
||||||
write_table : File -> Table -> Existing_File_Behavior -> Match_Columns -> Problem_Behavior -> File
|
write_table : File -> Table -> Existing_File_Behavior -> Match_Columns -> Problem_Behavior -> File
|
||||||
write_table self file table on_existing_file match_columns on_problems =
|
write_table self file table on_existing_file match_columns on_problems:Problem_Behavior =
|
||||||
Delimited_Writer.write_file table self file on_existing_file match_columns on_problems
|
Delimited_Writer.write_file table self file on_existing_file match_columns on_problems
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
|
@ -109,7 +109,7 @@ type Excel_Format
|
|||||||
ADVANCED
|
ADVANCED
|
||||||
Implements the `File.read` for this `File_Format`
|
Implements the `File.read` for this `File_Format`
|
||||||
read : File -> Problem_Behavior -> Any
|
read : File -> Problem_Behavior -> Any
|
||||||
read self file on_problems =
|
read self file on_problems:Problem_Behavior =
|
||||||
format = should_treat_as_xls_format self.xls_format file
|
format = should_treat_as_xls_format self.xls_format file
|
||||||
case self of
|
case self of
|
||||||
Excel_Format.Workbook _ _ -> Excel_Workbook.new file format
|
Excel_Format.Workbook _ _ -> Excel_Workbook.new file format
|
||||||
@ -160,7 +160,7 @@ type Excel_Format
|
|||||||
- match_columns: How to match columns between the table and the file.
|
- match_columns: How to match columns between the table and the file.
|
||||||
- on_problems: What to do if there are problems reading the file.
|
- on_problems: What to do if there are problems reading the file.
|
||||||
write_table : File -> Table -> Existing_File_Behavior -> Match_Columns -> Problem_Behavior -> File
|
write_table : File -> Table -> Existing_File_Behavior -> Match_Columns -> Problem_Behavior -> File
|
||||||
write_table self file table on_existing_file match_columns on_problems =
|
write_table self file table on_existing_file match_columns on_problems:Problem_Behavior =
|
||||||
format = should_treat_as_xls_format self.xls_format file
|
format = should_treat_as_xls_format self.xls_format file
|
||||||
Excel_Writer.write_file file table on_existing_file (as_section self) match_columns on_problems format
|
Excel_Writer.write_file file table on_existing_file (as_section self) match_columns on_problems format
|
||||||
|
|
||||||
|
@ -108,7 +108,7 @@ Table.from_objects value (fields : Vector | Nothing = Nothing) =
|
|||||||
will be named `Column <N>` where `N` is the number of the marked group.
|
will be named `Column <N>` where `N` is the number of the marked group.
|
||||||
(Group 0 is not included.)
|
(Group 0 is not included.)
|
||||||
Text.parse_to_table : Text | Regex -> Case_Sensitivity -> Boolean -> Problem_Behavior -> Table ! Type_Error | Regex_Syntax_Error | Illegal_Argument
|
Text.parse_to_table : Text | Regex -> Case_Sensitivity -> Boolean -> Problem_Behavior -> Table ! Type_Error | Regex_Syntax_Error | Illegal_Argument
|
||||||
Text.parse_to_table self (pattern : Text | Regex) case_sensitivity=Case_Sensitivity.Sensitive parse_values=True on_problems=Report_Warning =
|
Text.parse_to_table self (pattern : Text | Regex) case_sensitivity=Case_Sensitivity.Sensitive parse_values=True on_problems:Problem_Behavior=..Report_Warning =
|
||||||
Parse_To_Table.parse_text_to_table self pattern case_sensitivity parse_values on_problems
|
Parse_To_Table.parse_text_to_table self pattern case_sensitivity parse_values on_problems
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
@ -123,7 +123,7 @@ Text.parse_to_table self (pattern : Text | Regex) case_sensitivity=Case_Sensitiv
|
|||||||
Not used for JSON.
|
Not used for JSON.
|
||||||
- on_problems: What to do if there are problems reading the file.
|
- on_problems: What to do if there are problems reading the file.
|
||||||
JSON_Format.write_table : Writable_File -> Table -> Existing_File_Behavior -> Match_Columns -> Problem_Behavior -> File
|
JSON_Format.write_table : Writable_File -> Table -> Existing_File_Behavior -> Match_Columns -> Problem_Behavior -> File
|
||||||
JSON_Format.write_table self file:Writable_File table on_existing_file match_columns on_problems =
|
JSON_Format.write_table self file:Writable_File table on_existing_file match_columns on_problems:Problem_Behavior=..Report_Warning =
|
||||||
_ = match_columns
|
_ = match_columns
|
||||||
if file.exists.not then table.to_json.write file else
|
if file.exists.not then table.to_json.write file else
|
||||||
case on_existing_file of
|
case on_existing_file of
|
||||||
@ -135,7 +135,7 @@ JSON_Format.write_table self file:Writable_File table on_existing_file match_col
|
|||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
Handles appending to a file containing a JSON table by removing the closing bracket and appending to it.
|
Handles appending to a file containing a JSON table by removing the closing bracket and appending to it.
|
||||||
append_to_json_table file:File table on_problems =
|
append_to_json_table file:File table on_problems:Problem_Behavior =
|
||||||
old_text = file.read_text.trim
|
old_text = file.read_text.trim
|
||||||
case old_text.ends_with "]" && old_text.starts_with "[" of
|
case old_text.ends_with "]" && old_text.starts_with "[" of
|
||||||
True ->
|
True ->
|
||||||
|
@ -17,7 +17,7 @@ polyglot java import org.enso.table.operations.AddRowNumber
|
|||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
add_row_number : Table -> Text -> Integer -> Integer -> Text | Integer | Regex | Vector (Integer | Text | Regex) -> Vector (Text | Sort_Column) | Text | Sort_Column -> Problem_Behavior -> Table
|
add_row_number : Table -> Text -> Integer -> Integer -> Text | Integer | Regex | Vector (Integer | Text | Regex) -> Vector (Text | Sort_Column) | Text | Sort_Column -> Problem_Behavior -> Table
|
||||||
add_row_number table name from step group_by order_by on_problems =
|
add_row_number table name from step group_by order_by on_problems:Problem_Behavior =
|
||||||
problem_builder = Problem_Builder.new error_on_missing_columns=True
|
problem_builder = Problem_Builder.new error_on_missing_columns=True
|
||||||
grouping_columns = table.columns_helper.select_columns_helper group_by Case_Sensitivity.Default True problem_builder
|
grouping_columns = table.columns_helper.select_columns_helper group_by Case_Sensitivity.Default True problem_builder
|
||||||
ordering = Table_Helpers.resolve_order_by table.columns order_by problem_builder
|
ordering = Table_Helpers.resolve_order_by table.columns order_by problem_builder
|
||||||
@ -43,7 +43,7 @@ add_row_number table name from step group_by order_by on_problems =
|
|||||||
## PRIVATE
|
## PRIVATE
|
||||||
If the table already contains a column called `name` it will be renamed to a
|
If the table already contains a column called `name` it will be renamed to a
|
||||||
unique name, so that a new column with this name can be added.
|
unique name, so that a new column with this name can be added.
|
||||||
rename_columns_if_needed table name on_problems build_table_from_columns =
|
rename_columns_if_needed table name on_problems:Problem_Behavior build_table_from_columns =
|
||||||
column_names = table.column_names
|
column_names = table.column_names
|
||||||
if column_names.contains name . not then table else
|
if column_names.contains name . not then table else
|
||||||
problems = [Duplicate_Output_Column_Names.Error [name]]
|
problems = [Duplicate_Output_Column_Names.Error [name]]
|
||||||
|
@ -20,7 +20,7 @@ polyglot java import org.enso.table.operations.AddRunning
|
|||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
add_running : Table -> Statistic -> (Text|Integer) -> Text -> Vector (Text | Integer | Regex) | Text | Integer | Regex -> Vector (Text | Sort_Column) | Text -> Problem_Behavior -> Table
|
add_running : Table -> Statistic -> (Text|Integer) -> Text -> Vector (Text | Integer | Regex) | Text | Integer | Regex -> Vector (Text | Sort_Column) | Text -> Problem_Behavior -> Table
|
||||||
add_running table (statistic:Statistic=Statistic.Count) (of:Text|Integer=0) (as:Text='') (group_by:(Vector | Text | Integer | Regex)=[]) (order_by:(Vector | Text)=[]) (on_problems:Problem_Behavior=Problem_Behavior.Report_Warning) =
|
add_running table (statistic:Statistic=Statistic.Count) (of:Text|Integer=0) (as:Text='') (group_by:(Vector | Text | Integer | Regex)=[]) (order_by:(Vector | Text)=[]) (on_problems:Problem_Behavior=..Report_Warning) =
|
||||||
check_running_support [statistic] <|
|
check_running_support [statistic] <|
|
||||||
of_col = table.at of
|
of_col = table.at of
|
||||||
new_name = if as.is_empty then 'Running ' + statistic.to_text + ' of ' + of_col.name else as
|
new_name = if as.is_empty then 'Running ' + statistic.to_text + ' of ' + of_col.name else as
|
||||||
|
@ -9,7 +9,7 @@ import project.Internal.Problem_Builder.Problem_Builder
|
|||||||
Map a text-returning function over the column values, using Storage directly.
|
Map a text-returning function over the column values, using Storage directly.
|
||||||
The output column has the same name as the input.
|
The output column has the same name as the input.
|
||||||
map_over_storage : Column -> (Any -> Text) -> (Integer -> Any) -> Boolean -> Problem_Behavior -> Column
|
map_over_storage : Column -> (Any -> Text) -> (Integer -> Any) -> Boolean -> Problem_Behavior -> Column
|
||||||
map_over_storage input_column function builder skip_nothing=True on_problems=Report_Warning =
|
map_over_storage input_column function builder skip_nothing=True on_problems:Problem_Behavior=..Report_Warning =
|
||||||
problem_builder = Problem_Builder.new
|
problem_builder = Problem_Builder.new
|
||||||
input_storage = input_column.java_column.getStorage
|
input_storage = input_column.java_column.getStorage
|
||||||
num_input_rows = input_storage.size
|
num_input_rows = input_storage.size
|
||||||
|
@ -37,7 +37,7 @@ polyglot java import org.enso.table.read.QuoteStrippingParser
|
|||||||
If set to `Report_Error`, the operation fails with a dataflow error.
|
If set to `Report_Error`, the operation fails with a dataflow error.
|
||||||
If set to `Ignore`, the operation proceeds without errors or warnings.
|
If set to `Ignore`, the operation proceeds without errors or warnings.
|
||||||
read_file : Delimited_Format -> File -> Problem_Behavior -> Any
|
read_file : Delimited_Format -> File -> Problem_Behavior -> Any
|
||||||
read_file format file on_problems =
|
read_file format file on_problems:Problem_Behavior =
|
||||||
## We use the default `max_columns` setting. If we want to be able to
|
## We use the default `max_columns` setting. If we want to be able to
|
||||||
read files with unlimited column limits (risking OutOfMemory
|
read files with unlimited column limits (risking OutOfMemory
|
||||||
exceptions), we can catch the exception indicating the limit has been
|
exceptions), we can catch the exception indicating the limit has been
|
||||||
@ -50,7 +50,7 @@ read_file format file on_problems =
|
|||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
read_text : Text -> Delimited_Format -> Problem_Behavior -> Table
|
read_text : Text -> Delimited_Format -> Problem_Behavior -> Table
|
||||||
read_text text format on_problems =
|
read_text text format on_problems:Problem_Behavior =
|
||||||
java_reader = StringReader.new text
|
java_reader = StringReader.new text
|
||||||
read_from_reader format java_reader on_problems
|
read_from_reader format java_reader on_problems
|
||||||
|
|
||||||
@ -70,7 +70,7 @@ read_text text format on_problems =
|
|||||||
- related_file: The file related to the provided `java_stream`, if available,
|
- related_file: The file related to the provided `java_stream`, if available,
|
||||||
or `Nothing`. It is used for more detailed error reporting.
|
or `Nothing`. It is used for more detailed error reporting.
|
||||||
read_stream : Delimited_Format -> Input_Stream -> Problem_Behavior -> Integer -> File | Nothing -> Any
|
read_stream : Delimited_Format -> Input_Stream -> Problem_Behavior -> Integer -> File | Nothing -> Any
|
||||||
read_stream format stream on_problems max_columns=default_max_columns related_file=Nothing =
|
read_stream format stream on_problems:Problem_Behavior max_columns=default_max_columns related_file=Nothing =
|
||||||
handle_io_exception related_file <|
|
handle_io_exception related_file <|
|
||||||
stream.with_stream_decoder format.encoding on_problems reporting_stream_decoder->
|
stream.with_stream_decoder format.encoding on_problems reporting_stream_decoder->
|
||||||
read_from_reader format reporting_stream_decoder on_problems max_columns
|
read_from_reader format reporting_stream_decoder on_problems max_columns
|
||||||
@ -92,7 +92,7 @@ read_stream format stream on_problems max_columns=default_max_columns related_fi
|
|||||||
avoid `OutOfMemory` errors on malformed files. It must be a positive
|
avoid `OutOfMemory` errors on malformed files. It must be a positive
|
||||||
integer.
|
integer.
|
||||||
read_from_reader : Delimited_Format -> Reader -> Problem_Behavior -> Integer -> Any
|
read_from_reader : Delimited_Format -> Reader -> Problem_Behavior -> Integer -> Any
|
||||||
read_from_reader format java_reader on_problems max_columns=4096 =
|
read_from_reader format java_reader on_problems:Problem_Behavior max_columns=4096 =
|
||||||
Illegal_Argument.handle_java_exception <| handle_parsing_failure <| handle_parsing_exception <| Empty_File_Error.handle_java_exception <|
|
Illegal_Argument.handle_java_exception <| handle_parsing_failure <| handle_parsing_exception <| Empty_File_Error.handle_java_exception <|
|
||||||
Java_Problems.with_problem_aggregator on_problems java_problem_aggregator->
|
Java_Problems.with_problem_aggregator on_problems java_problem_aggregator->
|
||||||
reader = prepare_reader format max_columns on_problems java_problem_aggregator
|
reader = prepare_reader format max_columns on_problems java_problem_aggregator
|
||||||
@ -100,7 +100,7 @@ read_from_reader format java_reader on_problems max_columns=4096 =
|
|||||||
format.row_limit.attach_warning (Table.Value java_table)
|
format.row_limit.attach_warning (Table.Value java_table)
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
prepare_reader format:Delimited_Format max_columns on_problems java_problem_aggregator newline_override=Nothing =
|
prepare_reader format:Delimited_Format max_columns on_problems:Problem_Behavior java_problem_aggregator newline_override=Nothing =
|
||||||
java_headers = case format.headers of
|
java_headers = case format.headers of
|
||||||
Headers.Has_Headers -> DelimitedReader.HeaderBehavior.USE_FIRST_ROW_AS_HEADERS
|
Headers.Has_Headers -> DelimitedReader.HeaderBehavior.USE_FIRST_ROW_AS_HEADERS
|
||||||
Headers.Detect_Headers -> DelimitedReader.HeaderBehavior.INFER
|
Headers.Detect_Headers -> DelimitedReader.HeaderBehavior.INFER
|
||||||
@ -207,7 +207,7 @@ detect_metadata file format =
|
|||||||
newline_at_eof : File -> Encoding -> Text|Nothing
|
newline_at_eof : File -> Encoding -> Text|Nothing
|
||||||
newline_at_eof file encoding =
|
newline_at_eof file encoding =
|
||||||
newlines = ['\r\n', '\n', '\r']
|
newlines = ['\r\n', '\n', '\r']
|
||||||
newline_bytes = newlines.map (x-> x.bytes encoding Report_Error)
|
newline_bytes = newlines.map (x-> x.bytes encoding Problem_Behavior.Report_Error)
|
||||||
most_bytes = newline_bytes.map .length . compute Statistic.Maximum
|
most_bytes = newline_bytes.map .length . compute Statistic.Maximum
|
||||||
file_last_bytes = file.read_last_bytes most_bytes
|
file_last_bytes = file.read_last_bytes most_bytes
|
||||||
result = newlines.zip newline_bytes . find if_missing=[Nothing] pair->
|
result = newlines.zip newline_bytes . find if_missing=[Nothing] pair->
|
||||||
|
@ -37,7 +37,7 @@ polyglot java import org.enso.table.write.WriteQuoteBehavior
|
|||||||
If set to `Report_Error`, the operation fails with a dataflow error.
|
If set to `Report_Error`, the operation fails with a dataflow error.
|
||||||
If set to `Ignore`, the operation proceeds without errors or warnings.
|
If set to `Ignore`, the operation proceeds without errors or warnings.
|
||||||
write_file : Table -> Delimited_Format -> Writable_File -> Existing_File_Behavior -> Match_Columns -> Problem_Behavior -> Any
|
write_file : Table -> Delimited_Format -> Writable_File -> Existing_File_Behavior -> Match_Columns -> Problem_Behavior -> Any
|
||||||
write_file table format (file : Writable_File) on_existing_file match_columns on_problems =
|
write_file table format (file : Writable_File) on_existing_file match_columns on_problems:Problem_Behavior =
|
||||||
case on_existing_file of
|
case on_existing_file of
|
||||||
Existing_File_Behavior.Append ->
|
Existing_File_Behavior.Append ->
|
||||||
# The default encoding may be used for detecting the effective encoding in append mode.
|
# The default encoding may be used for detecting the effective encoding in append mode.
|
||||||
@ -56,7 +56,7 @@ write_file table format (file : Writable_File) on_existing_file match_columns on
|
|||||||
|
|
||||||
If the file does not exist or is empty, it acts like a regular overwrite.
|
If the file does not exist or is empty, it acts like a regular overwrite.
|
||||||
append_to_file : Table -> Delimited_Format -> Writable_File -> Match_Columns -> Problem_Behavior -> Any
|
append_to_file : Table -> Delimited_Format -> Writable_File -> Match_Columns -> Problem_Behavior -> Any
|
||||||
append_to_file table format (file : Writable_File) match_columns on_problems =
|
append_to_file table format (file : Writable_File) match_columns on_problems:Problem_Behavior =
|
||||||
if file.is_local then append_to_local_file table format file.file match_columns on_problems else
|
if file.is_local then append_to_local_file table format file.file match_columns on_problems else
|
||||||
## TODO in the future, if we have remote backends that _do support_ proper append, we could avoid downloading
|
## TODO in the future, if we have remote backends that _do support_ proper append, we could avoid downloading
|
||||||
the full file here - we only need to read the first few first and last bytes (to infer the headers and newline at EOF).
|
the full file here - we only need to read the first few first and last bytes (to infer the headers and newline at EOF).
|
||||||
@ -65,7 +65,7 @@ append_to_file table format (file : Writable_File) match_columns on_problems =
|
|||||||
append_to_local_file table format local_temp_file match_columns on_problems . if_not_error file.file_for_return
|
append_to_local_file table format local_temp_file match_columns on_problems . if_not_error file.file_for_return
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
append_to_local_file table format (file : File) match_columns on_problems =
|
append_to_local_file table format (file : File) match_columns on_problems:Problem_Behavior =
|
||||||
Column_Name_Mismatch.handle_java_exception <| Column_Count_Mismatch.handle_java_exception <| Panic.recover Illegal_Argument <|
|
Column_Name_Mismatch.handle_java_exception <| Column_Count_Mismatch.handle_java_exception <| Panic.recover Illegal_Argument <|
|
||||||
inferring_format = format.with_line_endings Infer
|
inferring_format = format.with_line_endings Infer
|
||||||
metadata = Delimited_Reader.detect_metadata file inferring_format
|
metadata = Delimited_Reader.detect_metadata file inferring_format
|
||||||
@ -130,7 +130,7 @@ write_text table format =
|
|||||||
- separator_override: An optional override for the line separator to use
|
- separator_override: An optional override for the line separator to use
|
||||||
instead of the one from `format`.
|
instead of the one from `format`.
|
||||||
write_to_stream : Table -> Delimited_Format -> Output_Stream -> Problem_Behavior -> File | Nothing -> Text | Nothing -> Boolean -> Any
|
write_to_stream : Table -> Delimited_Format -> Output_Stream -> Problem_Behavior -> File | Nothing -> Text | Nothing -> Boolean -> Any
|
||||||
write_to_stream table format stream on_problems related_file=Nothing separator_override=Nothing needs_leading_newline=False =
|
write_to_stream table format stream on_problems:Problem_Behavior related_file=Nothing separator_override=Nothing needs_leading_newline=False =
|
||||||
handle_io_exception ~action = Panic.catch IOException action caught_panic->
|
handle_io_exception ~action = Panic.catch IOException action caught_panic->
|
||||||
File_Error.wrap_io_exception related_file caught_panic.payload
|
File_Error.wrap_io_exception related_file caught_panic.payload
|
||||||
|
|
||||||
|
@ -51,7 +51,7 @@ handle_reader file reader =
|
|||||||
- xls_format: If `True` then the file is read in using Excel 95-2003 format
|
- xls_format: If `True` then the file is read in using Excel 95-2003 format
|
||||||
otherwise reads in Excel 2007+ format.
|
otherwise reads in Excel 2007+ format.
|
||||||
read_file : File -> Excel_Section -> Problem_Behavior -> Boolean -> (Table | Vector)
|
read_file : File -> Excel_Section -> Problem_Behavior -> Boolean -> (Table | Vector)
|
||||||
read_file file section on_problems xls_format=False =
|
read_file file section on_problems:Problem_Behavior xls_format=False =
|
||||||
file_format = if xls_format then ExcelFileFormat.XLS else ExcelFileFormat.XLSX
|
file_format = if xls_format then ExcelFileFormat.XLS else ExcelFileFormat.XLSX
|
||||||
reader java_file = case section of
|
reader java_file = case section of
|
||||||
Excel_Section.Worksheet sheet headers skip_rows row_limit ->
|
Excel_Section.Worksheet sheet headers skip_rows row_limit ->
|
||||||
|
@ -26,7 +26,7 @@ polyglot java import org.enso.table.data.mask.OrderMask
|
|||||||
- column_count: The number of columns to split to.
|
- column_count: The number of columns to split to.
|
||||||
If `All_Columns` then columns will be added to fit all data.
|
If `All_Columns` then columns will be added to fit all data.
|
||||||
fan_out_to_columns : Table -> Text | Integer -> (Any -> Vector Any) -> Columns_To_Add -> (Integer -> Any) -> Problem_Behavior -> Table | Nothing
|
fan_out_to_columns : Table -> Text | Integer -> (Any -> Vector Any) -> Columns_To_Add -> (Integer -> Any) -> Problem_Behavior -> Table | Nothing
|
||||||
fan_out_to_columns table input_column_id function column_count column_builder=make_string_builder on_problems=Report_Error =
|
fan_out_to_columns table input_column_id function column_count column_builder=make_string_builder on_problems:Problem_Behavior=..Report_Error =
|
||||||
input_column = table.get input_column_id
|
input_column = table.get input_column_id
|
||||||
problem_builder = Problem_Builder.new
|
problem_builder = Problem_Builder.new
|
||||||
new_columns_unrenamed = map_columns_to_multiple input_column function column_count=column_count.columns_to_split column_builder=column_builder problem_builder=problem_builder
|
new_columns_unrenamed = map_columns_to_multiple input_column function column_count=column_count.columns_to_split column_builder=column_builder problem_builder=problem_builder
|
||||||
@ -51,7 +51,7 @@ fan_out_to_columns table input_column_id function column_count column_builder=ma
|
|||||||
single row is output with `Nothing` for the transformed column. If false,
|
single row is output with `Nothing` for the transformed column. If false,
|
||||||
the row is not output at all.
|
the row is not output at all.
|
||||||
fan_out_to_rows : Table -> Text -> (Any -> Vector Any) -> Vector | Function -> Boolean -> (Integer -> Any) -> Problem_Behavior -> Table
|
fan_out_to_rows : Table -> Text -> (Any -> Vector Any) -> Vector | Function -> Boolean -> (Integer -> Any) -> Problem_Behavior -> Table
|
||||||
fan_out_to_rows table input_column_id:Text function column_names=[input_column_id] at_least_one_row=False column_builder=make_string_builder on_problems=Report_Error =
|
fan_out_to_rows table input_column_id:Text function column_names=[input_column_id] at_least_one_row=False column_builder=make_string_builder on_problems:Problem_Behavior=..Report_Error =
|
||||||
## Treat this as a special case of fan_out_to_rows_and_columns, with one
|
## Treat this as a special case of fan_out_to_rows_and_columns, with one
|
||||||
column. Wrap the provided function to convert each value to a singleton
|
column. Wrap the provided function to convert each value to a singleton
|
||||||
`Vector`.
|
`Vector`.
|
||||||
@ -102,7 +102,7 @@ fan_out_to_rows table input_column_id:Text function column_names=[input_column_i
|
|||||||
- column_names: The names for the generated columns.
|
- column_names: The names for the generated columns.
|
||||||
- on_problems: Specifies the behavior when a problem occurs.
|
- on_problems: Specifies the behavior when a problem occurs.
|
||||||
fan_out_to_rows_and_columns : Table -> Text | Integer -> (Any -> Vector (Vector Any)) -> Vector Text -> Boolean -> (Integer -> Any) -> Problem_Behavior -> Table
|
fan_out_to_rows_and_columns : Table -> Text | Integer -> (Any -> Vector (Vector Any)) -> Vector Text -> Boolean -> (Integer -> Any) -> Problem_Behavior -> Table
|
||||||
fan_out_to_rows_and_columns table input_column_id function column_names at_least_one_row=False column_builder=make_string_builder on_problems=Report_Error =
|
fan_out_to_rows_and_columns table input_column_id function column_names at_least_one_row=False column_builder=make_string_builder on_problems:Problem_Behavior=..Report_Error =
|
||||||
problem_builder = Problem_Builder.new
|
problem_builder = Problem_Builder.new
|
||||||
unique = table.column_naming_helper.create_unique_name_strategy
|
unique = table.column_naming_helper.create_unique_name_strategy
|
||||||
|
|
||||||
@ -240,7 +240,6 @@ fan_out_to_rows_and_columns_dynamic input_storage function at_least_one_row colu
|
|||||||
If `Nothing` then columns will be added to fit all data.
|
If `Nothing` then columns will be added to fit all data.
|
||||||
If the data exceeds the `column_count`, a `Column_Count_Exceeded` error
|
If the data exceeds the `column_count`, a `Column_Count_Exceeded` error
|
||||||
will follow the `on_problems` behavior.
|
will follow the `on_problems` behavior.
|
||||||
- on_problems: Specifies the behavior when a problem occurs.
|
|
||||||
map_columns_to_multiple : Column -> (Any -> Vector Any) -> Integer | Nothing -> (Integer -> Any) -> Problem_Builder -> Vector Column
|
map_columns_to_multiple : Column -> (Any -> Vector Any) -> Integer | Nothing -> (Integer -> Any) -> Problem_Builder -> Vector Column
|
||||||
map_columns_to_multiple input_column function column_count column_builder=make_string_builder problem_builder =
|
map_columns_to_multiple input_column function column_count column_builder=make_string_builder problem_builder =
|
||||||
num_rows = input_column.length
|
num_rows = input_column.length
|
||||||
|
@ -12,7 +12,7 @@ from project.Errors import Nothing_Value_In_Filter_Condition
|
|||||||
|
|
||||||
It also performs validation and will throw errors if unexpected column types
|
It also performs validation and will throw errors if unexpected column types
|
||||||
are encountered.
|
are encountered.
|
||||||
make_filter_column source_column filter_condition on_problems =
|
make_filter_column source_column filter_condition on_problems:Problem_Behavior =
|
||||||
base_column = case filter_condition of
|
base_column = case filter_condition of
|
||||||
# Equality
|
# Equality
|
||||||
Equal value _ ->
|
Equal value _ ->
|
||||||
|
@ -95,7 +95,7 @@ unpack_problem_summary problem_summary =
|
|||||||
## PRIVATE
|
## PRIVATE
|
||||||
This is the new preferred way of running Java methods that may report problems.
|
This is the new preferred way of running Java methods that may report problems.
|
||||||
with_problem_aggregator : Problem_Behavior -> (ProblemAggregator -> Any) -> Any
|
with_problem_aggregator : Problem_Behavior -> (ProblemAggregator -> Any) -> Any
|
||||||
with_problem_aggregator on_problems f =
|
with_problem_aggregator on_problems:Problem_Behavior f =
|
||||||
## This should be the only call to the `makeTopLevelAggregator` method in the whole codebase.
|
## This should be the only call to the `makeTopLevelAggregator` method in the whole codebase.
|
||||||
aggregator = ProblemAggregator.makeTopLevelAggregator
|
aggregator = ProblemAggregator.makeTopLevelAggregator
|
||||||
result = f aggregator
|
result = f aggregator
|
||||||
@ -103,7 +103,7 @@ with_problem_aggregator on_problems f =
|
|||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
with_map_operation_problem_aggregator : (Text | Nothing) -> Problem_Behavior -> (MapOperationProblemAggregator -> Any) -> Any
|
with_map_operation_problem_aggregator : (Text | Nothing) -> Problem_Behavior -> (MapOperationProblemAggregator -> Any) -> Any
|
||||||
with_map_operation_problem_aggregator (location : Text | Nothing) on_problems f =
|
with_map_operation_problem_aggregator (location : Text | Nothing) on_problems:Problem_Behavior f =
|
||||||
with_problem_aggregator on_problems java_aggregator->
|
with_problem_aggregator on_problems java_aggregator->
|
||||||
problem_builder = MapOperationProblemAggregator.new java_aggregator location
|
problem_builder = MapOperationProblemAggregator.new java_aggregator location
|
||||||
f problem_builder
|
f problem_builder
|
||||||
|
@ -23,7 +23,7 @@ type Join_Condition_Resolver
|
|||||||
Besides, a list of redundant columns from equality joins is aggregated
|
Besides, a list of redundant columns from equality joins is aggregated
|
||||||
which can be used to deduplicate them.
|
which can be used to deduplicate them.
|
||||||
resolve : Join_Condition | Text | Vector (Join_Condition | Text) -> Problem_Behavior -> Join_Condition_Resolution
|
resolve : Join_Condition | Text | Vector (Join_Condition | Text) -> Problem_Behavior -> Join_Condition_Resolution
|
||||||
resolve self conditions on_problems =
|
resolve self conditions on_problems:Problem_Behavior =
|
||||||
redundant_names = Builder.new
|
redundant_names = Builder.new
|
||||||
left_problem_builder = Problem_Builder.new missing_input_columns_location="the left table" types_to_always_throw=[Missing_Input_Columns]
|
left_problem_builder = Problem_Builder.new missing_input_columns_location="the left table" types_to_always_throw=[Missing_Input_Columns]
|
||||||
right_problem_builder = Problem_Builder.new missing_input_columns_location="the right table" types_to_always_throw=[Missing_Input_Columns]
|
right_problem_builder = Problem_Builder.new missing_input_columns_location="the right table" types_to_always_throw=[Missing_Input_Columns]
|
||||||
|
@ -42,7 +42,7 @@ type Lookup_Column
|
|||||||
indicating which columns will be updated and which will be added.
|
indicating which columns will be updated and which will be added.
|
||||||
|
|
||||||
It also reports any errors or warnings related to selecting these columns.
|
It also reports any errors or warnings related to selecting these columns.
|
||||||
prepare_columns_for_lookup base_table lookup_table key_columns_selector add_new_columns allow_unmatched_rows on_problems =
|
prepare_columns_for_lookup base_table lookup_table key_columns_selector add_new_columns allow_unmatched_rows on_problems:Problem_Behavior =
|
||||||
key_columns = base_table.select_columns key_columns_selector . column_names . catch No_Output_Columns _->
|
key_columns = base_table.select_columns key_columns_selector . column_names . catch No_Output_Columns _->
|
||||||
Error.throw (Illegal_Argument.Error "At least one key column must be provided for `merge`.")
|
Error.throw (Illegal_Argument.Error "At least one key column must be provided for `merge`.")
|
||||||
lookup_table_key_columns = lookup_table.select_columns key_columns . catch Missing_Input_Columns error->
|
lookup_table_key_columns = lookup_table.select_columns key_columns . catch Missing_Input_Columns error->
|
||||||
|
@ -17,7 +17,7 @@ from project.Errors import Duplicate_Output_Column_Names
|
|||||||
|
|
||||||
See Text.parse_to_table.
|
See Text.parse_to_table.
|
||||||
parse_text_to_table : Text | Regex -> Text -> Case_Sensitivity -> Boolean -> Problem_Behavior -> Table ! Type_Error | Regex_Syntax_Error | Illegal_Argument
|
parse_text_to_table : Text | Regex -> Text -> Case_Sensitivity -> Boolean -> Problem_Behavior -> Table ! Type_Error | Regex_Syntax_Error | Illegal_Argument
|
||||||
parse_text_to_table text regex_or_pattern="." case_sensitivity=Case_Sensitivity.Default parse_values=True on_problems=Report_Warning =
|
parse_text_to_table text regex_or_pattern="." case_sensitivity=Case_Sensitivity.Default parse_values=True on_problems=..Report_Warning =
|
||||||
case_insensitive = case_sensitivity.is_case_insensitive_in_memory
|
case_insensitive = case_sensitivity.is_case_insensitive_in_memory
|
||||||
pattern = case regex_or_pattern of
|
pattern = case regex_or_pattern of
|
||||||
_ : Regex -> regex_or_pattern.recompile case_sensitivity
|
_ : Regex -> regex_or_pattern.recompile case_sensitivity
|
||||||
|
@ -59,7 +59,7 @@ type Problem_Builder
|
|||||||
If in `Report_Error` mode and there are any problems gathered, the first
|
If in `Report_Error` mode and there are any problems gathered, the first
|
||||||
one will be returned as error without even running the computation.
|
one will be returned as error without even running the computation.
|
||||||
attach_problems_before : Problem_Behavior -> Any -> Any
|
attach_problems_before : Problem_Behavior -> Any -> Any
|
||||||
attach_problems_before self problem_behavior ~computation =
|
attach_problems_before self problem_behavior:Problem_Behavior ~computation =
|
||||||
case self.get_problemset_throwing_distinguished_errors of
|
case self.get_problemset_throwing_distinguished_errors of
|
||||||
problems ->
|
problems ->
|
||||||
problem_behavior.attach_problems_before problems computation
|
problem_behavior.attach_problems_before problems computation
|
||||||
|
@ -11,7 +11,7 @@ from project.Errors import Missing_Input_Columns, No_Such_Column, Non_Unique_Key
|
|||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
replace : Table -> (Table | Map) -> (Text | Integer | By_Type | Vector (Text | Integer | By_Type)) -> (Text | Integer | Nothing) -> (Text | Integer | Nothing) -> Boolean -> Problem_Behavior -> Table ! Missing_Input_Columns | Non_Unique_Key | Unmatched_Rows_In_Lookup
|
replace : Table -> (Table | Map) -> (Text | Integer | By_Type | Vector (Text | Integer | By_Type)) -> (Text | Integer | Nothing) -> (Text | Integer | Nothing) -> Boolean -> Problem_Behavior -> Table ! Missing_Input_Columns | Non_Unique_Key | Unmatched_Rows_In_Lookup
|
||||||
replace base_table lookup_table columns:(Text | Integer | By_Type | Vector (Text | Integer | By_Type)) from_column:(Text | Integer | Nothing)=Nothing to_column:(Text | Integer | Nothing)=Nothing allow_unmatched_rows:Boolean=True on_problems:Problem_Behavior=Problem_Behavior.Report_Warning =
|
replace base_table lookup_table columns:(Text | Integer | By_Type | Vector (Text | Integer | By_Type)) from_column:(Text | Integer | Nothing)=Nothing to_column:(Text | Integer | Nothing)=Nothing allow_unmatched_rows:Boolean=True on_problems:Problem_Behavior=..Report_Warning =
|
||||||
case columns of
|
case columns of
|
||||||
_ : Vector ->
|
_ : Vector ->
|
||||||
problem_builder = Problem_Builder.new error_on_missing_columns=True
|
problem_builder = Problem_Builder.new error_on_missing_columns=True
|
||||||
|
@ -9,7 +9,7 @@ from project.Internal.Fan_Out import all
|
|||||||
Splits a column of text into a set of new columns.
|
Splits a column of text into a set of new columns.
|
||||||
See `Table.split_to_columns`.
|
See `Table.split_to_columns`.
|
||||||
split_to_columns : Table -> Text | Integer -> Text -> Columns_To_Add -> Problem_Behavior -> Table
|
split_to_columns : Table -> Text | Integer -> Text -> Columns_To_Add -> Problem_Behavior -> Table
|
||||||
split_to_columns table input_column_id delimiter="," column_count:Columns_To_Add=..All_Columns on_problems=Report_Error =
|
split_to_columns table input_column_id delimiter="," column_count:Columns_To_Add=..All_Columns on_problems:Problem_Behavior=..Report_Error =
|
||||||
column = table.at input_column_id
|
column = table.at input_column_id
|
||||||
Value_Type.expect_text column <|
|
Value_Type.expect_text column <|
|
||||||
fan_out_to_columns table column.name (handle_nothing (_.split delimiter)) column_count on_problems=on_problems
|
fan_out_to_columns table column.name (handle_nothing (_.split delimiter)) column_count on_problems=on_problems
|
||||||
@ -28,7 +28,7 @@ split_to_rows table input_column_id:(Text | Integer) delimiter="," =
|
|||||||
expression.
|
expression.
|
||||||
See `Table.tokenize_to_columns`.
|
See `Table.tokenize_to_columns`.
|
||||||
tokenize_to_columns : Table -> Text | Integer -> Text -> Case_Sensitivity -> Columns_To_Add -> Problem_Behavior -> Table
|
tokenize_to_columns : Table -> Text | Integer -> Text -> Case_Sensitivity -> Columns_To_Add -> Problem_Behavior -> Table
|
||||||
tokenize_to_columns table input_column_id pattern case_sensitivity column_count on_problems =
|
tokenize_to_columns table input_column_id pattern case_sensitivity column_count on_problems:Problem_Behavior =
|
||||||
column = table.at input_column_id
|
column = table.at input_column_id
|
||||||
Value_Type.expect_text column <|
|
Value_Type.expect_text column <|
|
||||||
fan_out_to_columns table column.name (handle_nothing (_.tokenize pattern case_sensitivity)) column_count on_problems=on_problems
|
fan_out_to_columns table column.name (handle_nothing (_.tokenize pattern case_sensitivity)) column_count on_problems=on_problems
|
||||||
@ -48,7 +48,7 @@ tokenize_to_rows table input_column_id pattern="." case_sensitivity=Case_Sensiti
|
|||||||
pattern.
|
pattern.
|
||||||
See `Table.parse_to_columns`.
|
See `Table.parse_to_columns`.
|
||||||
parse_to_columns : Table -> Text | Integer -> Text | Regex -> Case_Sensitivity -> Boolean -> Problem_Behavior -> Table
|
parse_to_columns : Table -> Text | Integer -> Text | Regex -> Case_Sensitivity -> Boolean -> Problem_Behavior -> Table
|
||||||
parse_to_columns table input_column_id (pattern:(Text | Regex)=".") case_sensitivity=Case_Sensitivity.Sensitive parse_values=True on_problems=Report_Error =
|
parse_to_columns table input_column_id (pattern:(Text | Regex)=".") case_sensitivity=Case_Sensitivity.Sensitive parse_values=True on_problems:Problem_Behavior=..Report_Error =
|
||||||
regex = case pattern of
|
regex = case pattern of
|
||||||
_ : Regex -> pattern.recompile case_sensitivity
|
_ : Regex -> pattern.recompile case_sensitivity
|
||||||
_ : Text ->
|
_ : Text ->
|
||||||
|
@ -95,7 +95,7 @@ java_to_enso x = case x of
|
|||||||
Converts a value type to an in-memory storage type, possibly approximating it
|
Converts a value type to an in-memory storage type, possibly approximating it
|
||||||
to the closest supported type.
|
to the closest supported type.
|
||||||
from_value_type : Value_Type -> Problem_Behavior -> StorageType
|
from_value_type : Value_Type -> Problem_Behavior -> StorageType
|
||||||
from_value_type value_type on_problems =
|
from_value_type value_type on_problems:Problem_Behavior =
|
||||||
approximate_storage = closest_storage_type value_type
|
approximate_storage = closest_storage_type value_type
|
||||||
approximated_value_type = to_value_type approximate_storage
|
approximated_value_type = to_value_type approximate_storage
|
||||||
problems = if approximated_value_type == value_type then [] else
|
problems = if approximated_value_type == value_type then [] else
|
||||||
|
@ -577,7 +577,7 @@ replace_columns_with_columns table old_columns new_columns =
|
|||||||
transforming the old columns. The old columns must all exist in the table,
|
transforming the old columns. The old columns must all exist in the table,
|
||||||
and the new columns must all have the same name.
|
and the new columns must all have the same name.
|
||||||
replace_columns_with_transformed_columns : Table -> Text | Integer | Regex | Vector (Integer | Text | Regex) -> (Column -> Column) -> Boolean -> Problem_Behavior -> Table
|
replace_columns_with_transformed_columns : Table -> Text | Integer | Regex | Vector (Integer | Text | Regex) -> (Column -> Column) -> Boolean -> Problem_Behavior -> Table
|
||||||
replace_columns_with_transformed_columns table selectors transformer error_on_missing_columns=True on_problems=Report_Warning =
|
replace_columns_with_transformed_columns table selectors transformer error_on_missing_columns=True on_problems:Problem_Behavior=..Report_Warning =
|
||||||
internal_columns = table.columns_helper.select_columns selectors Case_Sensitivity.Default reorder=False error_on_missing_columns=error_on_missing_columns on_problems=on_problems
|
internal_columns = table.columns_helper.select_columns selectors Case_Sensitivity.Default reorder=False error_on_missing_columns=error_on_missing_columns on_problems=on_problems
|
||||||
columns = internal_columns.map table.columns_helper.make_column
|
columns = internal_columns.map table.columns_helper.make_column
|
||||||
new_columns = columns.map on_problems=No_Wrap transformer
|
new_columns = columns.map on_problems=No_Wrap transformer
|
||||||
|
@ -33,7 +33,7 @@ type Table_Ref
|
|||||||
- on_problems: Specifies how to handle non-fatal problems, attaching a
|
- on_problems: Specifies how to handle non-fatal problems, attaching a
|
||||||
warning by default.
|
warning by default.
|
||||||
evaluate_expression : Expression -> Problem_Behavior -> Any ! No_Such_Column | Invalid_Value_Type | Expression_Error
|
evaluate_expression : Expression -> Problem_Behavior -> Any ! No_Such_Column | Invalid_Value_Type | Expression_Error
|
||||||
evaluate_expression self expression:Expression on_problems=Report_Warning = self.underlying.evaluate_expression expression on_problems=on_problems
|
evaluate_expression self expression:Expression on_problems:Problem_Behavior=..Report_Warning = self.underlying.evaluate_expression expression on_problems=on_problems
|
||||||
|
|
||||||
## PRIVATE
|
## PRIVATE
|
||||||
Resolve a Column_Ref to a Column, keeping any other values as-is.
|
Resolve a Column_Ref to a Column, keeping any other values as-is.
|
||||||
@ -82,7 +82,7 @@ type Table_Ref
|
|||||||
## PRIVATE
|
## PRIVATE
|
||||||
Set a column.
|
Set a column.
|
||||||
set : Any -> Text -> Set_Mode -> Problem_Behavior -> Table_Ref ! Existing_Column | Missing_Column | No_Such_Column | Expression_Error
|
set : Any -> Text -> Set_Mode -> Problem_Behavior -> Table_Ref ! Existing_Column | Missing_Column | No_Such_Column | Expression_Error
|
||||||
set self column as:Text set_mode:Set_Mode=Set_Mode.Add_Or_Update on_problems:Problem_Behavior=Report_Warning =
|
set self column as:Text set_mode:Set_Mode=Set_Mode.Add_Or_Update on_problems:Problem_Behavior=..Report_Warning =
|
||||||
new_underlying = self.underlying.set column as set_mode=set_mode on_problems=on_problems
|
new_underlying = self.underlying.set column as set_mode=set_mode on_problems=on_problems
|
||||||
Table_Ref.from new_underlying
|
Table_Ref.from new_underlying
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user