diff --git a/distribution/lib/Standard/AWS/0.0.0-dev/src/S3/S3_File.enso b/distribution/lib/Standard/AWS/0.0.0-dev/src/S3/S3_File.enso index 8002d93249..3c0785b1c2 100644 --- a/distribution/lib/Standard/AWS/0.0.0-dev/src/S3/S3_File.enso +++ b/distribution/lib/Standard/AWS/0.0.0-dev/src/S3/S3_File.enso @@ -163,7 +163,7 @@ type S3_File If set to `Ignore`, the operation proceeds without errors or warnings. @format File_Format.default_widget read : File_Format -> Problem_Behavior -> Any ! S3_Error - read self format=Auto_Detect (on_problems : Problem_Behavior = Problem_Behavior.Report_Warning) = + read self format=Auto_Detect (on_problems : Problem_Behavior = ..Report_Warning) = if Data_Link.is_data_link self then Data_Link_Helpers.read_data_link self format on_problems else if self.is_directory then Error.throw (Illegal_Argument.Error "Cannot `read` a directory, use `list`.") else case format of @@ -195,7 +195,7 @@ type S3_File If set to `Ignore`, the operation proceeds without errors or warnings. @encoding Encoding.default_widget read_text : Encoding -> Problem_Behavior -> Text ! File_Error - read_text self (encoding : Encoding = Encoding.default) (on_problems : Problem_Behavior = Problem_Behavior.Report_Warning) = + read_text self (encoding : Encoding = Encoding.default) (on_problems : Problem_Behavior = ..Report_Warning) = self.read (Plain_Text_Format.Plain_Text encoding) on_problems ## ICON data_output diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Connection.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Connection.enso index 6b6b076908..c30cbf5515 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Connection.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Connection.enso @@ -310,7 +310,7 @@ type Connection a `Dry_Run_Operation` warning attached. @structure make_structure_creator create_table : Text -> Vector Column_Description | DB_Table | Table -> Vector Text | Nothing -> Boolean -> Boolean -> Problem_Behavior -> DB_Table ! Table_Already_Exists - create_table self (table_name : Text) (structure : Vector Column_Description | DB_Table | Table) (primary_key : (Vector Text | Nothing) = [first_column_name_in_structure structure]) (temporary : Boolean = False) (allow_existing : Boolean = False) (on_problems:Problem_Behavior = Problem_Behavior.Report_Warning) = + create_table self (table_name : Text) (structure : Vector Column_Description | DB_Table | Table) (primary_key : (Vector Text | Nothing) = [first_column_name_in_structure structure]) (temporary : Boolean = False) (allow_existing : Boolean = False) (on_problems:Problem_Behavior = ..Report_Warning) = create_table_implementation self table_name structure primary_key temporary allow_existing on_problems ## PRIVATE diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/SQLite_Format.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/SQLite_Format.enso index 852330197b..c827f584a9 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/SQLite_Format.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/SQLite_Format.enso @@ -55,7 +55,7 @@ type SQLite_Format ## PRIVATE Implements the `File.read` for this `File_Format` read : File -> Problem_Behavior -> Any - read self file on_problems = + read self file on_problems:Problem_Behavior = _ = [on_problems] Database.connect (SQLite.From_File file) diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/DB_Column.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/DB_Column.enso index bccb2e4fb7..d3f6bad1a4 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/DB_Column.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/DB_Column.enso @@ -1732,7 +1732,7 @@ type DB_Column @type (Widget_Helpers.parse_type_selector include_auto=False) @format (make_format_chooser include_number=False) parse : Value_Type | Auto -> Text | Data_Formatter -> Problem_Behavior -> DB_Column - parse self type:(Value_Type | Auto) format:(Text | Data_Formatter)="" on_problems=Report_Warning = + parse self type:(Value_Type | Auto) format:(Text | Data_Formatter)="" on_problems:Problem_Behavior=..Report_Warning = if type == Auto then Error.throw (Unsupported_Database_Operation.Error "The `Auto` parse type is not supported by the Database backend. Either pick a specific type or materialize the table to memory using `.read`.") else if format != "" then Error.throw (Unsupported_Database_Operation.Error "Custom formatting is not supported by the Database backend. Please set the format to `''` to use the default settings, or if custom formatting is needed, materialize the table to memory using `.read` first.") else Value_Type.expect_text self <| @@ -1800,7 +1800,7 @@ type DB_Column types. Due to this, a Mixed column containing values `[2, "3"]` will actually be converted into `[2, Nothing]` when casting to Integer type. cast : Value_Type -> Problem_Behavior -> DB_Column ! Illegal_Argument | Inexact_Type_Coercion | Conversion_Failure - cast self value_type on_problems=Problem_Behavior.Report_Warning = + cast self value_type on_problems:Problem_Behavior=..Report_Warning = check_cast_compatibility self.value_type value_type <| self.internal_do_cast value_type on_problems @@ -1819,7 +1819,7 @@ type DB_Column ## PRIVATE Shares the core CAST logic between `cast` and `parse`. internal_do_cast : Value_Type -> Problem_Behavior -> DB_Column - internal_do_cast self value_type on_problems = + internal_do_cast self value_type on_problems:Problem_Behavior = dialect = self.connection.dialect type_mapping = dialect.get_type_mapping target_sql_type = type_mapping.value_type_to_sql value_type on_problems diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Extensions/Upload_Database_Table.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Extensions/Upload_Database_Table.enso index f3338f0e82..53f4e6b119 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Extensions/Upload_Database_Table.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Extensions/Upload_Database_Table.enso @@ -57,7 +57,7 @@ from project.Internal.Upload_Table import all rows, so errors may still occur when the output action is enabled. @primary_key Widget_Helpers.make_column_name_vector_selector DB_Table.select_into_database_table : Connection -> Text -> Vector Text | Nothing -> Boolean -> Problem_Behavior -> DB_Table ! Table_Already_Exists | Inexact_Type_Coercion | Missing_Input_Columns | Non_Unique_Key | SQL_Error | Illegal_Argument -DB_Table.select_into_database_table self connection (table_name : Text) primary_key=[self.columns.first.name] temporary=False on_problems=Problem_Behavior.Report_Warning = +DB_Table.select_into_database_table self connection (table_name : Text) primary_key=[self.columns.first.name] temporary=False on_problems:Problem_Behavior=..Report_Warning = select_into_table_implementation self connection table_name primary_key temporary on_problems ## GROUP Standard.Base.Output @@ -132,7 +132,7 @@ DB_Table.select_into_database_table self connection (table_name : Text) primary_ only on a sample of rows, so errors may still occur when the output action is enabled. DB_Table.update_rows : DB_Table | Table -> Update_Action -> Vector Text | Nothing -> Boolean -> Problem_Behavior -> DB_Table ! Table_Not_Found | Unmatched_Columns | Missing_Input_Columns | Column_Type_Mismatch | SQL_Error | Illegal_Argument -DB_Table.update_rows self (source_table : DB_Table | Table) (update_action : Update_Action = Update_Action.Update_Or_Insert) (key_columns : Vector | Nothing = default_key_columns self) (error_on_missing_columns : Boolean = False) (on_problems : Problem_Behavior = Problem_Behavior.Report_Warning) = +DB_Table.update_rows self (source_table : DB_Table | Table) (update_action : Update_Action = Update_Action.Update_Or_Insert) (key_columns : Vector | Nothing = default_key_columns self) (error_on_missing_columns : Boolean = False) (on_problems : Problem_Behavior = ..Report_Warning) = common_update_table source_table self update_action key_columns error_on_missing_columns on_problems ## GROUP Standard.Base.Output diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Extensions/Upload_In_Memory_Table.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Extensions/Upload_In_Memory_Table.enso index 383f0dea2e..b14acd1866 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Extensions/Upload_In_Memory_Table.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Extensions/Upload_In_Memory_Table.enso @@ -57,7 +57,7 @@ from project.Internal.Upload_Table import all rows, so errors may still occur when the output action is enabled. @primary_key Widget_Helpers.make_column_name_vector_selector Table.select_into_database_table : Connection -> Text -> Vector Text | Nothing -> Boolean -> Problem_Behavior -> DB_Table ! Table_Already_Exists | Inexact_Type_Coercion | Missing_Input_Columns | Non_Unique_Key | SQL_Error | Illegal_Argument -Table.select_into_database_table self connection (table_name : Text) primary_key=[self.columns.first.name] temporary=False on_problems=Problem_Behavior.Report_Warning = +Table.select_into_database_table self connection (table_name : Text) primary_key=[self.columns.first.name] temporary=False on_problems:Problem_Behavior=..Report_Warning = select_into_table_implementation self connection table_name primary_key temporary on_problems ## GROUP Standard.Base.Output @@ -123,7 +123,7 @@ Table.select_into_database_table self connection (table_name : Text) primary_key only on a sample of rows, so errors may still occur when the output action is enabled. Table.update_rows : DB_Table | Table -> Update_Action -> Vector Text | Nothing -> Boolean -> Problem_Behavior -> DB_Table ! Table_Not_Found | Unmatched_Columns | Missing_Input_Columns | Column_Type_Mismatch | SQL_Error | Illegal_Argument -Table.update_rows self (source_table : DB_Table | Table) (update_action : Update_Action = Update_Action.Update_Or_Insert) (key_columns : Vector | Nothing = Nothing) (error_on_missing_columns : Boolean = False) (on_problems : Problem_Behavior = Problem_Behavior.Report_Warning) = +Table.update_rows self (source_table : DB_Table | Table) (update_action : Update_Action = Update_Action.Update_Or_Insert) (key_columns : Vector | Nothing = Nothing) (error_on_missing_columns : Boolean = False) (on_problems : Problem_Behavior = ..Report_Warning) = _ = [source_table, update_action, key_columns, error_on_missing_columns, on_problems] Error.throw (Illegal_Argument.Error "Table.update_rows modifies the underlying table, so it is only supported for Database tables - in-memory tables are immutable. Consider using `join` or `merge` for a similar operation that creates a new Table instead.") diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Common/Lookup_Query_Helper.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Common/Lookup_Query_Helper.enso index 4b68919bde..9dfdc63fca 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Common/Lookup_Query_Helper.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Common/Lookup_Query_Helper.enso @@ -20,7 +20,7 @@ from project.Internal.Upload_Table import check_for_null_keys Implementation of `lookup_and_replace` for Database backend. See `Table.lookup_and_replace` for more details. build_lookup_query : DB_Table -> DB_Table -> (Vector (Integer | Text | Regex) | Text | Integer | Regex) -> Boolean -> Boolean -> Problem_Behavior -> DB_Table ! Missing_Input_Columns | Non_Unique_Key | Unmatched_Rows_In_Lookup -build_lookup_query base_table lookup_table key_columns add_new_columns allow_unmatched_rows on_problems = +build_lookup_query base_table lookup_table key_columns add_new_columns allow_unmatched_rows on_problems:Problem_Behavior = lookup_columns = Lookup_Helpers.prepare_columns_for_lookup base_table lookup_table key_columns add_new_columns allow_unmatched_rows on_problems lookup_columns.if_not_error <| check_initial_invariants base_table lookup_table lookup_columns allow_unmatched_rows <| column_naming_helper = base_table.connection.base_connection.column_naming_helper diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Connection.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Connection.enso index 97ed63ab97..03197c36ef 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Connection.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Connection.enso @@ -227,7 +227,7 @@ type Postgres_Connection a `Dry_Run_Operation` warning attached. @structure make_structure_creator create_table : Text -> Vector Column_Description | DB_Table | Table -> Vector Text | Nothing -> Boolean -> Boolean -> Problem_Behavior -> DB_Table ! Table_Already_Exists - create_table self (table_name : Text) (structure : Vector Column_Description | DB_Table | Table) (primary_key : (Vector Text | Nothing) = [first_column_name_in_structure structure]) (temporary : Boolean = False) (allow_existing : Boolean = False) (on_problems:Problem_Behavior = Problem_Behavior.Report_Warning) = + create_table self (table_name : Text) (structure : Vector Column_Description | DB_Table | Table) (primary_key : (Vector Text | Nothing) = [first_column_name_in_structure structure]) (temporary : Boolean = False) (allow_existing : Boolean = False) (on_problems:Problem_Behavior = ..Report_Warning) = self.connection.create_table table_name structure primary_key temporary allow_existing on_problems ## ADVANCED diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Type_Mapping.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Type_Mapping.enso index f73f7b0307..88deded846 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Type_Mapping.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Type_Mapping.enso @@ -19,7 +19,7 @@ polyglot java import java.sql.Types type Postgres_Type_Mapping ## PRIVATE value_type_to_sql : Value_Type -> Problem_Behavior -> SQL_Type - value_type_to_sql value_type on_problems = + value_type_to_sql value_type on_problems:Problem_Behavior = result = case value_type of Value_Type.Boolean -> SQL_Type.Value Types.BIT "bool" precision=1 diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQL_Type_Mapping.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQL_Type_Mapping.enso index d54be212ee..0146f1814c 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQL_Type_Mapping.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQL_Type_Mapping.enso @@ -25,7 +25,7 @@ type SQL_Type_Mapping If the conversion is exact, it should be reversible, i.e. `sql_type_to_value_type (value_type_to_sql x Problem_Behavior.Report_Error) = x`. value_type_to_sql : Value_Type -> Problem_Behavior -> SQL_Type ! Inexact_Type_Coercion - value_type_to_sql value_type on_problems = + value_type_to_sql value_type on_problems:Problem_Behavior = _ = [value_type, on_problems] Unimplemented.throw "This is an interface only." diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Connection.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Connection.enso index 67cab5f25f..ffb40554ff 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Connection.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Connection.enso @@ -213,7 +213,7 @@ type SQLite_Connection a `Dry_Run_Operation` warning attached. @structure make_structure_creator create_table : Text -> Vector Column_Description | DB_Table | Table -> Vector Text | Nothing -> Boolean -> Boolean -> Problem_Behavior -> DB_Table ! Table_Already_Exists - create_table self (table_name : Text) (structure : Vector Column_Description | DB_Table | Table) (primary_key : (Vector Text | Nothing) = [first_column_name_in_structure structure]) (temporary : Boolean = False) (allow_existing : Boolean = False) (on_problems:Problem_Behavior = Problem_Behavior.Report_Warning) = + create_table self (table_name : Text) (structure : Vector Column_Description | DB_Table | Table) (primary_key : (Vector Text | Nothing) = [first_column_name_in_structure structure]) (temporary : Boolean = False) (allow_existing : Boolean = False) (on_problems:Problem_Behavior = ..Report_Warning) = self.connection.create_table table_name structure primary_key temporary allow_existing on_problems ## ADVANCED diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Type_Mapping.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Type_Mapping.enso index 3927321f63..de5770ab18 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Type_Mapping.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Type_Mapping.enso @@ -48,7 +48,7 @@ polyglot java import java.sql.Types type SQLite_Type_Mapping ## PRIVATE value_type_to_sql : Value_Type -> Problem_Behavior -> SQL_Type - value_type_to_sql value_type on_problems = + value_type_to_sql value_type on_problems:Problem_Behavior = result = case value_type of Value_Type.Boolean -> SQLite_Types.boolean Value_Type.Byte -> SQLite_Types.integer diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Upload_Table.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Upload_Table.enso index 6048ea88c6..84f10f1c66 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Upload_Table.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Upload_Table.enso @@ -29,7 +29,7 @@ from project.Internal.Result_Set import result_set_to_table of the created table. The user-facing function that handles the dry-run logic. -create_table_implementation connection table_name structure primary_key temporary allow_existing on_problems = Panic.recover SQL_Error <| +create_table_implementation connection table_name structure primary_key temporary allow_existing on_problems:Problem_Behavior = Panic.recover SQL_Error <| connection.base_connection.maybe_run_maintenance table_naming_helper = connection.base_connection.table_naming_helper table_naming_helper.verify_table_name table_name <| connection.jdbc_connection.run_within_transaction <| @@ -58,7 +58,7 @@ create_table_implementation connection table_name structure primary_key temporar Assumes the output context is enabled for it to work. Does not check if the table already exists - so if it does, it may fail with `SQL_Error`. The caller should perform the check for better error handling. -internal_create_table_structure connection table_name structure primary_key temporary on_problems = +internal_create_table_structure connection table_name structure primary_key temporary on_problems:Problem_Behavior = aligned_structure = align_structure connection structure resolved_primary_key = resolve_primary_key aligned_structure primary_key validate_structure connection.base_connection.column_naming_helper aligned_structure <| @@ -85,7 +85,7 @@ internal_create_table_structure connection table_name structure primary_key temp - on_problems: the behavior to be used when reporting problems. - row_limit: if set, only the first `row_limit` rows will be uploaded. internal_upload_table : DB_Table | Table -> Connection -> Text -> Nothing | Vector Text -> Boolean -> Vector Column_Description -> Problem_Behavior -> Integer|Nothing -> DB_Table -internal_upload_table source_table connection table_name primary_key temporary structure_hint=Nothing on_problems=Problem_Behavior.Report_Error row_limit=Nothing = +internal_upload_table source_table connection table_name primary_key temporary structure_hint=Nothing on_problems:Problem_Behavior=..Report_Error row_limit=Nothing = case source_table of _ : Table -> internal_upload_in_memory_table source_table connection table_name primary_key temporary structure_hint on_problems row_limit @@ -95,7 +95,7 @@ internal_upload_table source_table connection table_name primary_key temporary s Panic.throw <| Illegal_Argument.Error ("Unsupported table type: " + Meta.get_qualified_type_name source_table) ## PRIVATE -select_into_table_implementation source_table connection table_name primary_key temporary on_problems = +select_into_table_implementation source_table connection table_name primary_key temporary on_problems:Problem_Behavior = connection.base_connection.maybe_run_maintenance table_naming_helper = connection.base_connection.table_naming_helper table_naming_helper.verify_table_name table_name <| @@ -123,7 +123,7 @@ select_into_table_implementation source_table connection table_name primary_key ## PRIVATE It should be run within a transaction and wrapped in `handle_upload_errors`. -internal_upload_in_memory_table source_table connection table_name primary_key temporary structure_hint on_problems row_limit = +internal_upload_in_memory_table source_table connection table_name primary_key temporary structure_hint on_problems:Problem_Behavior row_limit = In_Transaction.ensure_in_transaction <| verify_structure_hint structure_hint source_table.column_names structure = structure_hint.if_nothing source_table @@ -147,7 +147,7 @@ internal_upload_in_memory_table source_table connection table_name primary_key t ## PRIVATE It should be run within a transaction and wrapped in `handle_upload_errors`. -internal_upload_database_table source_table connection table_name primary_key temporary structure_hint on_problems row_limit = +internal_upload_database_table source_table connection table_name primary_key temporary structure_hint on_problems:Problem_Behavior row_limit = In_Transaction.ensure_in_transaction <| connection_check = if source_table.connection.jdbc_connection == connection.jdbc_connection then True else Error.throw (Unsupported_Database_Operation.Error "The Database table to be uploaded must be coming from the same connection as the connection on which the new table is being created. Cross-connection uploads are currently not supported. To work around this, you can first `.read` the table into memory and then upload it from memory to a different connection.") @@ -299,7 +299,7 @@ first_column_name_in_structure structure = case structure of responsibility of the caller to ensure that, otherwise the generated statement will be invalid. prepare_create_table_statement : Connection -> Text -> Vector Column_Description -> Vector Text -> Boolean -> Problem_Behavior -> SQL_Statement -prepare_create_table_statement connection table_name columns primary_key temporary on_problems = +prepare_create_table_statement connection table_name columns primary_key temporary on_problems:Problem_Behavior = type_mapping = connection.dialect.get_type_mapping column_descriptors = columns.map on_problems=No_Wrap def-> sql_type = type_mapping.value_type_to_sql def.value_type on_problems @@ -323,7 +323,7 @@ make_batched_insert_template connection table_name column_names = template ## PRIVATE -common_update_table (source_table : DB_Table | Table) (target_table : DB_Table) update_action key_columns error_on_missing_columns on_problems = +common_update_table (source_table : DB_Table | Table) (target_table : DB_Table) update_action key_columns error_on_missing_columns on_problems:Problem_Behavior = check_target_table_for_update target_table <| connection = target_table.connection Panic.recover SQL_Error <| handle_upload_errors <| @@ -460,7 +460,7 @@ type Append_Helper - all columns in `source_table` have a corresponding column in `target_table` (with the same name), - all `key_columns` are present in both source and target tables. -check_update_arguments_structure_match source_table target_table key_columns update_action error_on_missing_columns on_problems ~action = +check_update_arguments_structure_match source_table target_table key_columns update_action error_on_missing_columns on_problems:Problem_Behavior ~action = check_source_column source_column = # The column must exist because it was verified earlier. target_column = target_table.get source_column.name diff --git a/distribution/lib/Standard/Image/0.0.0-dev/src/Image_File_Format.enso b/distribution/lib/Standard/Image/0.0.0-dev/src/Image_File_Format.enso index 35ccaf2682..7654ab9573 100644 --- a/distribution/lib/Standard/Image/0.0.0-dev/src/Image_File_Format.enso +++ b/distribution/lib/Standard/Image/0.0.0-dev/src/Image_File_Format.enso @@ -41,7 +41,7 @@ type Image_File_Format ## PRIVATE Implements the `File.read` for this `File_Format` read : File -> Problem_Behavior -> Any - read self file on_problems = + read self file on_problems:Problem_Behavior = _ = [on_problems] Image.read file diff --git a/distribution/lib/Standard/Snowflake/0.0.0-dev/src/Internal/Snowflake_Connection.enso b/distribution/lib/Standard/Snowflake/0.0.0-dev/src/Internal/Snowflake_Connection.enso index 08ccffe55c..679a24402b 100644 --- a/distribution/lib/Standard/Snowflake/0.0.0-dev/src/Internal/Snowflake_Connection.enso +++ b/distribution/lib/Standard/Snowflake/0.0.0-dev/src/Internal/Snowflake_Connection.enso @@ -247,7 +247,7 @@ type Snowflake_Connection a `Dry_Run_Operation` warning attached. @structure make_structure_creator create_table : Text -> Vector Column_Description | DB_Table | Table -> Vector Text | Nothing -> Boolean -> Boolean -> Problem_Behavior -> DB_Table ! Table_Already_Exists - create_table self (table_name : Text) (structure : Vector Column_Description | DB_Table | Table) (primary_key : (Vector Text | Nothing) = [first_column_name_in_structure structure]) (temporary : Boolean = False) (allow_existing : Boolean = False) (on_problems:Problem_Behavior = Problem_Behavior.Report_Warning) = + create_table self (table_name : Text) (structure : Vector Column_Description | DB_Table | Table) (primary_key : (Vector Text | Nothing) = [first_column_name_in_structure structure]) (temporary : Boolean = False) (allow_existing : Boolean = False) (on_problems:Problem_Behavior = ..Report_Warning) = self.connection.create_table table_name structure primary_key temporary allow_existing on_problems ## ADVANCED diff --git a/distribution/lib/Standard/Snowflake/0.0.0-dev/src/Internal/Snowflake_Type_Mapping.enso b/distribution/lib/Standard/Snowflake/0.0.0-dev/src/Internal/Snowflake_Type_Mapping.enso index de338aebc6..2cae993f48 100644 --- a/distribution/lib/Standard/Snowflake/0.0.0-dev/src/Internal/Snowflake_Type_Mapping.enso +++ b/distribution/lib/Standard/Snowflake/0.0.0-dev/src/Internal/Snowflake_Type_Mapping.enso @@ -22,7 +22,7 @@ polyglot java import java.sql.Types type Snowflake_Type_Mapping ## PRIVATE value_type_to_sql : Value_Type -> Problem_Behavior -> SQL_Type - value_type_to_sql value_type on_problems = + value_type_to_sql value_type on_problems:Problem_Behavior = result = case value_type of Value_Type.Boolean -> SQL_Type.Value Types.BOOLEAN "boolean" # All integer types in Snowflake become NUMERIC(38,0). diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Column.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Column.enso index 2c14641515..4b780357dd 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Column.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Column.enso @@ -1800,7 +1800,7 @@ type Column @type Widget_Helpers.parse_type_selector @format (make_format_chooser include_number=False) parse : Value_Type | Auto -> Text | Data_Formatter -> Problem_Behavior -> Column - parse self type:(Value_Type | Auto)=Auto format:(Text | Data_Formatter)="" on_problems=Report_Warning = + parse self type:(Value_Type | Auto)=Auto format:(Text | Data_Formatter)="" on_problems:Problem_Behavior=..Report_Warning = Value_Type.expect_text self <| formatter = case format of _ : Text -> if format == "" then Data_Formatter.Value else Data_Formatter.Value.with_format type format @@ -1953,7 +1953,7 @@ type Column types. Due to this, a Mixed column containing values `[2, "3"]` will actually be converted into `[2, Nothing]` when casting to Integer type. cast : Value_Type -> Problem_Behavior -> Column ! Illegal_Argument | Inexact_Type_Coercion | Conversion_Failure - cast self value_type on_problems=Problem_Behavior.Report_Warning = + cast self value_type on_problems:Problem_Behavior=..Report_Warning = Cast_Helpers.check_cast_compatibility self.value_type value_type <| target_storage_type = Storage.from_value_type value_type on_problems Java_Problems.with_problem_aggregator on_problems java_problem_aggregator-> diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Data_Formatter.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Data_Formatter.enso index fa0ac81030..d57a19eaaa 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Data_Formatter.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Data_Formatter.enso @@ -78,7 +78,7 @@ type Data_Formatter If set to `Ignore`, the operation proceeds without errors or warnings. @type Widget_Helpers.parse_type_selector parse : Text -> (Value_Type | Auto) -> Problem_Behavior -> Any - parse self text type:(Value_Type | Auto)=Auto on_problems=Problem_Behavior.Report_Warning = + parse self text type:(Value_Type | Auto)=Auto on_problems:Problem_Behavior=..Report_Warning = parser = self.make_value_type_parser type Java_Problems.with_problem_aggregator on_problems java_problem_aggregator-> related_column_name = Nothing diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Delimited/Delimited_Format.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Delimited/Delimited_Format.enso index 9bf60bed30..a064f47132 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Delimited/Delimited_Format.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Delimited/Delimited_Format.enso @@ -99,7 +99,7 @@ type Delimited_Format ADVANCED Implements the `File.read` for this `File_Format` read : File -> Problem_Behavior -> Any - read self file on_problems = + read self file on_problems:Problem_Behavior = Delimited_Reader.read_file self file on_problems ## PRIVATE @@ -107,13 +107,13 @@ type Delimited_Format read_stream : Input_Stream -> File_Format_Metadata -> Any read_stream self stream:Input_Stream (metadata : File_Format_Metadata = File_Format_Metadata.no_information) = _ = metadata - Delimited_Reader.read_stream self stream on_problems=Report_Warning + Delimited_Reader.read_stream self stream on_problems=..Report_Warning ## PRIVATE ADVANCED Implements the `Table.write` for this `File_Format`. write_table : File -> Table -> Existing_File_Behavior -> Match_Columns -> Problem_Behavior -> File - write_table self file table on_existing_file match_columns on_problems = + write_table self file table on_existing_file match_columns on_problems:Problem_Behavior = Delimited_Writer.write_file table self file on_existing_file match_columns on_problems ## PRIVATE diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Excel/Excel_Format.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Excel/Excel_Format.enso index 6985b6801d..d20525d14f 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Excel/Excel_Format.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Excel/Excel_Format.enso @@ -109,7 +109,7 @@ type Excel_Format ADVANCED Implements the `File.read` for this `File_Format` read : File -> Problem_Behavior -> Any - read self file on_problems = + read self file on_problems:Problem_Behavior = format = should_treat_as_xls_format self.xls_format file case self of Excel_Format.Workbook _ _ -> Excel_Workbook.new file format @@ -160,7 +160,7 @@ type Excel_Format - match_columns: How to match columns between the table and the file. - on_problems: What to do if there are problems reading the file. write_table : File -> Table -> Existing_File_Behavior -> Match_Columns -> Problem_Behavior -> File - write_table self file table on_existing_file match_columns on_problems = + write_table self file table on_existing_file match_columns on_problems:Problem_Behavior = format = should_treat_as_xls_format self.xls_format file Excel_Writer.write_file file table on_existing_file (as_section self) match_columns on_problems format diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Extensions/Table_Conversions.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Extensions/Table_Conversions.enso index a0b25c0096..43f0cc705d 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Extensions/Table_Conversions.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Extensions/Table_Conversions.enso @@ -108,7 +108,7 @@ Table.from_objects value (fields : Vector | Nothing = Nothing) = will be named `Column ` where `N` is the number of the marked group. (Group 0 is not included.) Text.parse_to_table : Text | Regex -> Case_Sensitivity -> Boolean -> Problem_Behavior -> Table ! Type_Error | Regex_Syntax_Error | Illegal_Argument -Text.parse_to_table self (pattern : Text | Regex) case_sensitivity=Case_Sensitivity.Sensitive parse_values=True on_problems=Report_Warning = +Text.parse_to_table self (pattern : Text | Regex) case_sensitivity=Case_Sensitivity.Sensitive parse_values=True on_problems:Problem_Behavior=..Report_Warning = Parse_To_Table.parse_text_to_table self pattern case_sensitivity parse_values on_problems ## PRIVATE @@ -123,7 +123,7 @@ Text.parse_to_table self (pattern : Text | Regex) case_sensitivity=Case_Sensitiv Not used for JSON. - on_problems: What to do if there are problems reading the file. JSON_Format.write_table : Writable_File -> Table -> Existing_File_Behavior -> Match_Columns -> Problem_Behavior -> File -JSON_Format.write_table self file:Writable_File table on_existing_file match_columns on_problems = +JSON_Format.write_table self file:Writable_File table on_existing_file match_columns on_problems:Problem_Behavior=..Report_Warning = _ = match_columns if file.exists.not then table.to_json.write file else case on_existing_file of @@ -135,7 +135,7 @@ JSON_Format.write_table self file:Writable_File table on_existing_file match_col ## PRIVATE Handles appending to a file containing a JSON table by removing the closing bracket and appending to it. -append_to_json_table file:File table on_problems = +append_to_json_table file:File table on_problems:Problem_Behavior = old_text = file.read_text.trim case old_text.ends_with "]" && old_text.starts_with "[" of True -> diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Add_Row_Number.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Add_Row_Number.enso index e774c69fc3..cb2ccd86e1 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Add_Row_Number.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Add_Row_Number.enso @@ -17,7 +17,7 @@ polyglot java import org.enso.table.operations.AddRowNumber ## PRIVATE add_row_number : Table -> Text -> Integer -> Integer -> Text | Integer | Regex | Vector (Integer | Text | Regex) -> Vector (Text | Sort_Column) | Text | Sort_Column -> Problem_Behavior -> Table -add_row_number table name from step group_by order_by on_problems = +add_row_number table name from step group_by order_by on_problems:Problem_Behavior = problem_builder = Problem_Builder.new error_on_missing_columns=True grouping_columns = table.columns_helper.select_columns_helper group_by Case_Sensitivity.Default True problem_builder ordering = Table_Helpers.resolve_order_by table.columns order_by problem_builder @@ -43,7 +43,7 @@ add_row_number table name from step group_by order_by on_problems = ## PRIVATE If the table already contains a column called `name` it will be renamed to a unique name, so that a new column with this name can be added. -rename_columns_if_needed table name on_problems build_table_from_columns = +rename_columns_if_needed table name on_problems:Problem_Behavior build_table_from_columns = column_names = table.column_names if column_names.contains name . not then table else problems = [Duplicate_Output_Column_Names.Error [name]] diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Add_Running.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Add_Running.enso index 3e493ea351..f6d28aafa7 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Add_Running.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Add_Running.enso @@ -20,7 +20,7 @@ polyglot java import org.enso.table.operations.AddRunning ## PRIVATE add_running : Table -> Statistic -> (Text|Integer) -> Text -> Vector (Text | Integer | Regex) | Text | Integer | Regex -> Vector (Text | Sort_Column) | Text -> Problem_Behavior -> Table -add_running table (statistic:Statistic=Statistic.Count) (of:Text|Integer=0) (as:Text='') (group_by:(Vector | Text | Integer | Regex)=[]) (order_by:(Vector | Text)=[]) (on_problems:Problem_Behavior=Problem_Behavior.Report_Warning) = +add_running table (statistic:Statistic=Statistic.Count) (of:Text|Integer=0) (as:Text='') (group_by:(Vector | Text | Integer | Regex)=[]) (order_by:(Vector | Text)=[]) (on_problems:Problem_Behavior=..Report_Warning) = check_running_support [statistic] <| of_col = table.at of new_name = if as.is_empty then 'Running ' + statistic.to_text + ' of ' + of_col.name else as diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Column_Ops.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Column_Ops.enso index f0e82c6e1e..8fdb5d2ccb 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Column_Ops.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Column_Ops.enso @@ -9,7 +9,7 @@ import project.Internal.Problem_Builder.Problem_Builder Map a text-returning function over the column values, using Storage directly. The output column has the same name as the input. map_over_storage : Column -> (Any -> Text) -> (Integer -> Any) -> Boolean -> Problem_Behavior -> Column -map_over_storage input_column function builder skip_nothing=True on_problems=Report_Warning = +map_over_storage input_column function builder skip_nothing=True on_problems:Problem_Behavior=..Report_Warning = problem_builder = Problem_Builder.new input_storage = input_column.java_column.getStorage num_input_rows = input_storage.size diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Delimited_Reader.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Delimited_Reader.enso index 95b08d288a..c49909e65f 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Delimited_Reader.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Delimited_Reader.enso @@ -37,7 +37,7 @@ polyglot java import org.enso.table.read.QuoteStrippingParser If set to `Report_Error`, the operation fails with a dataflow error. If set to `Ignore`, the operation proceeds without errors or warnings. read_file : Delimited_Format -> File -> Problem_Behavior -> Any -read_file format file on_problems = +read_file format file on_problems:Problem_Behavior = ## We use the default `max_columns` setting. If we want to be able to read files with unlimited column limits (risking OutOfMemory exceptions), we can catch the exception indicating the limit has been @@ -50,7 +50,7 @@ read_file format file on_problems = ## PRIVATE read_text : Text -> Delimited_Format -> Problem_Behavior -> Table -read_text text format on_problems = +read_text text format on_problems:Problem_Behavior = java_reader = StringReader.new text read_from_reader format java_reader on_problems @@ -70,7 +70,7 @@ read_text text format on_problems = - related_file: The file related to the provided `java_stream`, if available, or `Nothing`. It is used for more detailed error reporting. read_stream : Delimited_Format -> Input_Stream -> Problem_Behavior -> Integer -> File | Nothing -> Any -read_stream format stream on_problems max_columns=default_max_columns related_file=Nothing = +read_stream format stream on_problems:Problem_Behavior max_columns=default_max_columns related_file=Nothing = handle_io_exception related_file <| stream.with_stream_decoder format.encoding on_problems reporting_stream_decoder-> read_from_reader format reporting_stream_decoder on_problems max_columns @@ -92,7 +92,7 @@ read_stream format stream on_problems max_columns=default_max_columns related_fi avoid `OutOfMemory` errors on malformed files. It must be a positive integer. read_from_reader : Delimited_Format -> Reader -> Problem_Behavior -> Integer -> Any -read_from_reader format java_reader on_problems max_columns=4096 = +read_from_reader format java_reader on_problems:Problem_Behavior max_columns=4096 = Illegal_Argument.handle_java_exception <| handle_parsing_failure <| handle_parsing_exception <| Empty_File_Error.handle_java_exception <| Java_Problems.with_problem_aggregator on_problems java_problem_aggregator-> reader = prepare_reader format max_columns on_problems java_problem_aggregator @@ -100,7 +100,7 @@ read_from_reader format java_reader on_problems max_columns=4096 = format.row_limit.attach_warning (Table.Value java_table) ## PRIVATE -prepare_reader format:Delimited_Format max_columns on_problems java_problem_aggregator newline_override=Nothing = +prepare_reader format:Delimited_Format max_columns on_problems:Problem_Behavior java_problem_aggregator newline_override=Nothing = java_headers = case format.headers of Headers.Has_Headers -> DelimitedReader.HeaderBehavior.USE_FIRST_ROW_AS_HEADERS Headers.Detect_Headers -> DelimitedReader.HeaderBehavior.INFER @@ -207,7 +207,7 @@ detect_metadata file format = newline_at_eof : File -> Encoding -> Text|Nothing newline_at_eof file encoding = newlines = ['\r\n', '\n', '\r'] - newline_bytes = newlines.map (x-> x.bytes encoding Report_Error) + newline_bytes = newlines.map (x-> x.bytes encoding Problem_Behavior.Report_Error) most_bytes = newline_bytes.map .length . compute Statistic.Maximum file_last_bytes = file.read_last_bytes most_bytes result = newlines.zip newline_bytes . find if_missing=[Nothing] pair-> diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Delimited_Writer.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Delimited_Writer.enso index c17de9d64a..f14995abd4 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Delimited_Writer.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Delimited_Writer.enso @@ -37,7 +37,7 @@ polyglot java import org.enso.table.write.WriteQuoteBehavior If set to `Report_Error`, the operation fails with a dataflow error. If set to `Ignore`, the operation proceeds without errors or warnings. write_file : Table -> Delimited_Format -> Writable_File -> Existing_File_Behavior -> Match_Columns -> Problem_Behavior -> Any -write_file table format (file : Writable_File) on_existing_file match_columns on_problems = +write_file table format (file : Writable_File) on_existing_file match_columns on_problems:Problem_Behavior = case on_existing_file of Existing_File_Behavior.Append -> # The default encoding may be used for detecting the effective encoding in append mode. @@ -56,7 +56,7 @@ write_file table format (file : Writable_File) on_existing_file match_columns on If the file does not exist or is empty, it acts like a regular overwrite. append_to_file : Table -> Delimited_Format -> Writable_File -> Match_Columns -> Problem_Behavior -> Any -append_to_file table format (file : Writable_File) match_columns on_problems = +append_to_file table format (file : Writable_File) match_columns on_problems:Problem_Behavior = if file.is_local then append_to_local_file table format file.file match_columns on_problems else ## TODO in the future, if we have remote backends that _do support_ proper append, we could avoid downloading the full file here - we only need to read the first few first and last bytes (to infer the headers and newline at EOF). @@ -65,7 +65,7 @@ append_to_file table format (file : Writable_File) match_columns on_problems = append_to_local_file table format local_temp_file match_columns on_problems . if_not_error file.file_for_return ## PRIVATE -append_to_local_file table format (file : File) match_columns on_problems = +append_to_local_file table format (file : File) match_columns on_problems:Problem_Behavior = Column_Name_Mismatch.handle_java_exception <| Column_Count_Mismatch.handle_java_exception <| Panic.recover Illegal_Argument <| inferring_format = format.with_line_endings Infer metadata = Delimited_Reader.detect_metadata file inferring_format @@ -130,7 +130,7 @@ write_text table format = - separator_override: An optional override for the line separator to use instead of the one from `format`. write_to_stream : Table -> Delimited_Format -> Output_Stream -> Problem_Behavior -> File | Nothing -> Text | Nothing -> Boolean -> Any -write_to_stream table format stream on_problems related_file=Nothing separator_override=Nothing needs_leading_newline=False = +write_to_stream table format stream on_problems:Problem_Behavior related_file=Nothing separator_override=Nothing needs_leading_newline=False = handle_io_exception ~action = Panic.catch IOException action caught_panic-> File_Error.wrap_io_exception related_file caught_panic.payload diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Excel_Reader.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Excel_Reader.enso index 81527760e1..7e529266f8 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Excel_Reader.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Excel_Reader.enso @@ -51,7 +51,7 @@ handle_reader file reader = - xls_format: If `True` then the file is read in using Excel 95-2003 format otherwise reads in Excel 2007+ format. read_file : File -> Excel_Section -> Problem_Behavior -> Boolean -> (Table | Vector) -read_file file section on_problems xls_format=False = +read_file file section on_problems:Problem_Behavior xls_format=False = file_format = if xls_format then ExcelFileFormat.XLS else ExcelFileFormat.XLSX reader java_file = case section of Excel_Section.Worksheet sheet headers skip_rows row_limit -> diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Fan_Out.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Fan_Out.enso index 1dd2cf4232..05a98904a6 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Fan_Out.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Fan_Out.enso @@ -26,7 +26,7 @@ polyglot java import org.enso.table.data.mask.OrderMask - column_count: The number of columns to split to. If `All_Columns` then columns will be added to fit all data. fan_out_to_columns : Table -> Text | Integer -> (Any -> Vector Any) -> Columns_To_Add -> (Integer -> Any) -> Problem_Behavior -> Table | Nothing -fan_out_to_columns table input_column_id function column_count column_builder=make_string_builder on_problems=Report_Error = +fan_out_to_columns table input_column_id function column_count column_builder=make_string_builder on_problems:Problem_Behavior=..Report_Error = input_column = table.get input_column_id problem_builder = Problem_Builder.new new_columns_unrenamed = map_columns_to_multiple input_column function column_count=column_count.columns_to_split column_builder=column_builder problem_builder=problem_builder @@ -51,7 +51,7 @@ fan_out_to_columns table input_column_id function column_count column_builder=ma single row is output with `Nothing` for the transformed column. If false, the row is not output at all. fan_out_to_rows : Table -> Text -> (Any -> Vector Any) -> Vector | Function -> Boolean -> (Integer -> Any) -> Problem_Behavior -> Table -fan_out_to_rows table input_column_id:Text function column_names=[input_column_id] at_least_one_row=False column_builder=make_string_builder on_problems=Report_Error = +fan_out_to_rows table input_column_id:Text function column_names=[input_column_id] at_least_one_row=False column_builder=make_string_builder on_problems:Problem_Behavior=..Report_Error = ## Treat this as a special case of fan_out_to_rows_and_columns, with one column. Wrap the provided function to convert each value to a singleton `Vector`. @@ -102,7 +102,7 @@ fan_out_to_rows table input_column_id:Text function column_names=[input_column_i - column_names: The names for the generated columns. - on_problems: Specifies the behavior when a problem occurs. fan_out_to_rows_and_columns : Table -> Text | Integer -> (Any -> Vector (Vector Any)) -> Vector Text -> Boolean -> (Integer -> Any) -> Problem_Behavior -> Table -fan_out_to_rows_and_columns table input_column_id function column_names at_least_one_row=False column_builder=make_string_builder on_problems=Report_Error = +fan_out_to_rows_and_columns table input_column_id function column_names at_least_one_row=False column_builder=make_string_builder on_problems:Problem_Behavior=..Report_Error = problem_builder = Problem_Builder.new unique = table.column_naming_helper.create_unique_name_strategy @@ -240,7 +240,6 @@ fan_out_to_rows_and_columns_dynamic input_storage function at_least_one_row colu If `Nothing` then columns will be added to fit all data. If the data exceeds the `column_count`, a `Column_Count_Exceeded` error will follow the `on_problems` behavior. - - on_problems: Specifies the behavior when a problem occurs. map_columns_to_multiple : Column -> (Any -> Vector Any) -> Integer | Nothing -> (Integer -> Any) -> Problem_Builder -> Vector Column map_columns_to_multiple input_column function column_count column_builder=make_string_builder problem_builder = num_rows = input_column.length diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Filter_Condition_Helpers.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Filter_Condition_Helpers.enso index 32b0e79c49..67907e773f 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Filter_Condition_Helpers.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Filter_Condition_Helpers.enso @@ -12,7 +12,7 @@ from project.Errors import Nothing_Value_In_Filter_Condition It also performs validation and will throw errors if unexpected column types are encountered. -make_filter_column source_column filter_condition on_problems = +make_filter_column source_column filter_condition on_problems:Problem_Behavior = base_column = case filter_condition of # Equality Equal value _ -> diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Java_Problems.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Java_Problems.enso index 283f48776d..9db9d8c4b8 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Java_Problems.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Java_Problems.enso @@ -95,7 +95,7 @@ unpack_problem_summary problem_summary = ## PRIVATE This is the new preferred way of running Java methods that may report problems. with_problem_aggregator : Problem_Behavior -> (ProblemAggregator -> Any) -> Any -with_problem_aggregator on_problems f = +with_problem_aggregator on_problems:Problem_Behavior f = ## This should be the only call to the `makeTopLevelAggregator` method in the whole codebase. aggregator = ProblemAggregator.makeTopLevelAggregator result = f aggregator @@ -103,7 +103,7 @@ with_problem_aggregator on_problems f = ## PRIVATE with_map_operation_problem_aggregator : (Text | Nothing) -> Problem_Behavior -> (MapOperationProblemAggregator -> Any) -> Any -with_map_operation_problem_aggregator (location : Text | Nothing) on_problems f = +with_map_operation_problem_aggregator (location : Text | Nothing) on_problems:Problem_Behavior f = with_problem_aggregator on_problems java_aggregator-> problem_builder = MapOperationProblemAggregator.new java_aggregator location f problem_builder diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Join_Helpers.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Join_Helpers.enso index 1045e9f529..de49a7dc70 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Join_Helpers.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Join_Helpers.enso @@ -23,7 +23,7 @@ type Join_Condition_Resolver Besides, a list of redundant columns from equality joins is aggregated which can be used to deduplicate them. resolve : Join_Condition | Text | Vector (Join_Condition | Text) -> Problem_Behavior -> Join_Condition_Resolution - resolve self conditions on_problems = + resolve self conditions on_problems:Problem_Behavior = redundant_names = Builder.new left_problem_builder = Problem_Builder.new missing_input_columns_location="the left table" types_to_always_throw=[Missing_Input_Columns] right_problem_builder = Problem_Builder.new missing_input_columns_location="the right table" types_to_always_throw=[Missing_Input_Columns] diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Lookup_Helpers.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Lookup_Helpers.enso index b32a13bc18..9c8eef4fcb 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Lookup_Helpers.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Lookup_Helpers.enso @@ -42,7 +42,7 @@ type Lookup_Column indicating which columns will be updated and which will be added. It also reports any errors or warnings related to selecting these columns. -prepare_columns_for_lookup base_table lookup_table key_columns_selector add_new_columns allow_unmatched_rows on_problems = +prepare_columns_for_lookup base_table lookup_table key_columns_selector add_new_columns allow_unmatched_rows on_problems:Problem_Behavior = key_columns = base_table.select_columns key_columns_selector . column_names . catch No_Output_Columns _-> Error.throw (Illegal_Argument.Error "At least one key column must be provided for `merge`.") lookup_table_key_columns = lookup_table.select_columns key_columns . catch Missing_Input_Columns error-> diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Parse_To_Table.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Parse_To_Table.enso index d4628f8009..8641996fc8 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Parse_To_Table.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Parse_To_Table.enso @@ -17,7 +17,7 @@ from project.Errors import Duplicate_Output_Column_Names See Text.parse_to_table. parse_text_to_table : Text | Regex -> Text -> Case_Sensitivity -> Boolean -> Problem_Behavior -> Table ! Type_Error | Regex_Syntax_Error | Illegal_Argument -parse_text_to_table text regex_or_pattern="." case_sensitivity=Case_Sensitivity.Default parse_values=True on_problems=Report_Warning = +parse_text_to_table text regex_or_pattern="." case_sensitivity=Case_Sensitivity.Default parse_values=True on_problems=..Report_Warning = case_insensitive = case_sensitivity.is_case_insensitive_in_memory pattern = case regex_or_pattern of _ : Regex -> regex_or_pattern.recompile case_sensitivity diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Problem_Builder.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Problem_Builder.enso index c829cfe9ef..6e42bf1978 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Problem_Builder.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Problem_Builder.enso @@ -59,7 +59,7 @@ type Problem_Builder If in `Report_Error` mode and there are any problems gathered, the first one will be returned as error without even running the computation. attach_problems_before : Problem_Behavior -> Any -> Any - attach_problems_before self problem_behavior ~computation = + attach_problems_before self problem_behavior:Problem_Behavior ~computation = case self.get_problemset_throwing_distinguished_errors of problems -> problem_behavior.attach_problems_before problems computation diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Replace_Helpers.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Replace_Helpers.enso index 0eb360c722..6fb9591be9 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Replace_Helpers.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Replace_Helpers.enso @@ -11,7 +11,7 @@ from project.Errors import Missing_Input_Columns, No_Such_Column, Non_Unique_Key ## PRIVATE replace : Table -> (Table | Map) -> (Text | Integer | By_Type | Vector (Text | Integer | By_Type)) -> (Text | Integer | Nothing) -> (Text | Integer | Nothing) -> Boolean -> Problem_Behavior -> Table ! Missing_Input_Columns | Non_Unique_Key | Unmatched_Rows_In_Lookup -replace base_table lookup_table columns:(Text | Integer | By_Type | Vector (Text | Integer | By_Type)) from_column:(Text | Integer | Nothing)=Nothing to_column:(Text | Integer | Nothing)=Nothing allow_unmatched_rows:Boolean=True on_problems:Problem_Behavior=Problem_Behavior.Report_Warning = +replace base_table lookup_table columns:(Text | Integer | By_Type | Vector (Text | Integer | By_Type)) from_column:(Text | Integer | Nothing)=Nothing to_column:(Text | Integer | Nothing)=Nothing allow_unmatched_rows:Boolean=True on_problems:Problem_Behavior=..Report_Warning = case columns of _ : Vector -> problem_builder = Problem_Builder.new error_on_missing_columns=True diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Split_Tokenize.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Split_Tokenize.enso index 101446f683..7665a8336d 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Split_Tokenize.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Split_Tokenize.enso @@ -9,7 +9,7 @@ from project.Internal.Fan_Out import all Splits a column of text into a set of new columns. See `Table.split_to_columns`. split_to_columns : Table -> Text | Integer -> Text -> Columns_To_Add -> Problem_Behavior -> Table -split_to_columns table input_column_id delimiter="," column_count:Columns_To_Add=..All_Columns on_problems=Report_Error = +split_to_columns table input_column_id delimiter="," column_count:Columns_To_Add=..All_Columns on_problems:Problem_Behavior=..Report_Error = column = table.at input_column_id Value_Type.expect_text column <| fan_out_to_columns table column.name (handle_nothing (_.split delimiter)) column_count on_problems=on_problems @@ -28,7 +28,7 @@ split_to_rows table input_column_id:(Text | Integer) delimiter="," = expression. See `Table.tokenize_to_columns`. tokenize_to_columns : Table -> Text | Integer -> Text -> Case_Sensitivity -> Columns_To_Add -> Problem_Behavior -> Table -tokenize_to_columns table input_column_id pattern case_sensitivity column_count on_problems = +tokenize_to_columns table input_column_id pattern case_sensitivity column_count on_problems:Problem_Behavior = column = table.at input_column_id Value_Type.expect_text column <| fan_out_to_columns table column.name (handle_nothing (_.tokenize pattern case_sensitivity)) column_count on_problems=on_problems @@ -48,7 +48,7 @@ tokenize_to_rows table input_column_id pattern="." case_sensitivity=Case_Sensiti pattern. See `Table.parse_to_columns`. parse_to_columns : Table -> Text | Integer -> Text | Regex -> Case_Sensitivity -> Boolean -> Problem_Behavior -> Table -parse_to_columns table input_column_id (pattern:(Text | Regex)=".") case_sensitivity=Case_Sensitivity.Sensitive parse_values=True on_problems=Report_Error = +parse_to_columns table input_column_id (pattern:(Text | Regex)=".") case_sensitivity=Case_Sensitivity.Sensitive parse_values=True on_problems:Problem_Behavior=..Report_Error = regex = case pattern of _ : Regex -> pattern.recompile case_sensitivity _ : Text -> diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Storage.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Storage.enso index 62b9c056a5..98499172ae 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Storage.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Storage.enso @@ -95,7 +95,7 @@ java_to_enso x = case x of Converts a value type to an in-memory storage type, possibly approximating it to the closest supported type. from_value_type : Value_Type -> Problem_Behavior -> StorageType -from_value_type value_type on_problems = +from_value_type value_type on_problems:Problem_Behavior = approximate_storage = closest_storage_type value_type approximated_value_type = to_value_type approximate_storage problems = if approximated_value_type == value_type then [] else diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Table_Helpers.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Table_Helpers.enso index add1526a58..ba8eefa57c 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Table_Helpers.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Table_Helpers.enso @@ -577,7 +577,7 @@ replace_columns_with_columns table old_columns new_columns = transforming the old columns. The old columns must all exist in the table, and the new columns must all have the same name. replace_columns_with_transformed_columns : Table -> Text | Integer | Regex | Vector (Integer | Text | Regex) -> (Column -> Column) -> Boolean -> Problem_Behavior -> Table -replace_columns_with_transformed_columns table selectors transformer error_on_missing_columns=True on_problems=Report_Warning = +replace_columns_with_transformed_columns table selectors transformer error_on_missing_columns=True on_problems:Problem_Behavior=..Report_Warning = internal_columns = table.columns_helper.select_columns selectors Case_Sensitivity.Default reorder=False error_on_missing_columns=error_on_missing_columns on_problems=on_problems columns = internal_columns.map table.columns_helper.make_column new_columns = columns.map on_problems=No_Wrap transformer diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Table_Ref.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Table_Ref.enso index 41b33e1cb5..3f7ccf1928 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Table_Ref.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Table_Ref.enso @@ -33,7 +33,7 @@ type Table_Ref - on_problems: Specifies how to handle non-fatal problems, attaching a warning by default. evaluate_expression : Expression -> Problem_Behavior -> Any ! No_Such_Column | Invalid_Value_Type | Expression_Error - evaluate_expression self expression:Expression on_problems=Report_Warning = self.underlying.evaluate_expression expression on_problems=on_problems + evaluate_expression self expression:Expression on_problems:Problem_Behavior=..Report_Warning = self.underlying.evaluate_expression expression on_problems=on_problems ## PRIVATE Resolve a Column_Ref to a Column, keeping any other values as-is. @@ -82,7 +82,7 @@ type Table_Ref ## PRIVATE Set a column. set : Any -> Text -> Set_Mode -> Problem_Behavior -> Table_Ref ! Existing_Column | Missing_Column | No_Such_Column | Expression_Error - set self column as:Text set_mode:Set_Mode=Set_Mode.Add_Or_Update on_problems:Problem_Behavior=Report_Warning = + set self column as:Text set_mode:Set_Mode=Set_Mode.Add_Or_Update on_problems:Problem_Behavior=..Report_Warning = new_underlying = self.underlying.set column as set_mode=set_mode on_problems=on_problems Table_Ref.from new_underlying