Last Bits for Release (#11890)

- Linting clean up before doc site load.
-
This commit is contained in:
James Dunkerley 2024-12-17 21:04:41 +00:00 committed by GitHub
parent d53c51f8c9
commit bec33b4f2e
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
17 changed files with 58 additions and 51 deletions

View File

@ -161,7 +161,7 @@ type Redshift_Dialect
prepare_fetch_types_query : SQL_Expression -> Context -> SQL_Statement
prepare_fetch_types_query self expression context =
Base_Generator.default_fetch_types_query self expression context
## PRIVATE
generate_collate self collation_name:Text -> Text = Base_Generator.default_generate_collate collation_name

View File

@ -305,7 +305,7 @@ Text.find_all self pattern:Text|Regex=".*" case_sensitivity:Case_Sensitivity=..S
compiled_pattern = Regex.compile pattern case_insensitive=case_insensitive
compiled_pattern.match_all self
## ALIAS check matches, regex, like
## ALIAS check matches, like, regex
GROUP Text
ICON preparation
@ -360,7 +360,7 @@ Text.match self pattern:Text|Regex=".*" case_sensitivity:Case_Sensitivity=..Sens
Text.to_regex : Boolean -> Regex ! Regex_Syntax_Error | Illegal_Argument
Text.to_regex self case_insensitive=False = Regex.compile self case_insensitive
## ALIAS tokenize, parse
## ALIAS parse, tokenize
GROUP Conversions
ICON split
@ -426,8 +426,8 @@ Text.split self delimiter="," case_sensitivity:Case_Sensitivity=..Sensitive use_
combined_delimiter = parenthesize (delimiter.map parenthesize . join '|')
self.split combined_delimiter case_sensitivity=case_sensitivity use_regex=True
## ALIAS split, parse, regex
ADVANCED
## ADVANCED
ALIAS parse, regex, split
GROUP Conversions
ICON split
Takes an input string and and a pattern and returns all the matches as a

View File

@ -80,10 +80,12 @@ type Writable_Data_Link
A type class representing a data link that is a link to a file.
If the data link links to a directory, it allows crossing filesystems using `/` and `list` operations.
type File_Like_Data_Link
## PRIVATE
Value underlying as_file
## PRIVATE
Creates a `Writable_Data_Link` from a data link instance, if that data
link supports writing. If it does not, an error is thrown.
find data_link_instance (if_not_supported = (Error.throw (Illegal_Argument.Error "The "+(data_link_name data_link_instance)+" is not a link to a file object."))) -> File_Like_Data_Link ! Illegal_Argument =
handle_no_conversion _ = if_not_supported
Panic.catch No_Such_Conversion (File_Like_Data_Link.from data_link_instance) handle_no_conversion
## PRIVATE
Creates a `Writable_Data_Link` from a data link instance, if that data
link supports writing. If it does not, an error is thrown.
find data_link_instance (if_not_supported = (Error.throw (Illegal_Argument.Error "The "+(data_link_name data_link_instance)+" is not a link to a file object."))) -> File_Like_Data_Link ! Illegal_Argument =
handle_no_conversion _ = if_not_supported
Panic.catch No_Such_Conversion (File_Like_Data_Link.from data_link_instance) handle_no_conversion

View File

@ -3,7 +3,7 @@ import Standard.Base.Meta
import Standard.Base.Nothing.Nothing
import Standard.Base.Runtime.Managed_Resource.Managed_Resource
import Standard.Base.Runtime.Ref.Ref
from Standard.Base.Data.Boolean import Boolean, True, False
from Standard.Base.Data.Boolean import Boolean, False, True
## PRIVATE
This is used by ReloadDetector.java to create a `Managed_Resource` that is
@ -17,10 +17,12 @@ from Standard.Base.Data.Boolean import Boolean, True, False
type Reload_Detector
private Value mr:Managed_Resource
## Create a new reload detector.
new -> Reload_Detector =
mr = Managed_Resource.register (Ref.new 1) (_-> Nothing) True
Reload_Detector.Value mr
## Check if a reload has occurred.
has_reload_occurred self =
self.mr.has_been_finalized

View File

@ -4,7 +4,7 @@ import project.Any.Any
import project.Errors.Common.Uninitialized_State
import project.Meta
import project.Nothing.Nothing
from project.Data.Boolean import Boolean, True, False
from project.Data.Boolean import Boolean, False, True
## Resource provides an API for manual management of computation resources.
@ -79,8 +79,7 @@ type Managed_Resource
the `Managed_Resource` (or to `Uninitialized_State` error).
Returns:
Value returned from the `action`
with : (Any -> Any) -> Any -> Any
with : (Any -> Any) -> Any
with self ~action = @Tail_Call with_builtin self action
## PRIVATE
@ -102,5 +101,8 @@ type Managed_Resource
if x.is_error.not then False else
if x.catch.is_a Uninitialized_State then True else x
## PRIVATE
register_builtin r fn sys:Boolean = @Builtin_Method "Managed_Resource.register_builtin"
## PRIVATE
with_builtin r fn = @Builtin_Method "Managed_Resource.with_builtin"

View File

@ -4,12 +4,11 @@ import project.Data.Numbers.Integer
import project.Data.Text.Encoding.Encoding
import project.Data.Vector.Vector
import project.Error.Error
import project.Panic.Panic
import project.Errors.File_Error.File_Error
import project.Errors.Illegal_State.Illegal_State
import project.Errors.Problem_Behavior.Problem_Behavior
from project.Errors.Common import Uninitialized_State
import project.Nothing.Nothing
import project.Panic.Panic
import project.Runtime.Managed_Resource.Managed_Resource
import project.System.Advanced.Restartable_Input_Stream.Restartable_Input_Stream
import project.System.File.Advanced.Temporary_File.Temporary_File
@ -18,11 +17,12 @@ import project.System.File.File_Access.File_Access
import project.System.File.Generic.Writable_File.Writable_File
import project.System.Internal.Reporting_Stream_Decoder_Helper
from project.Data.Boolean import Boolean, False, True
from project.Errors.Common import Uninitialized_State
polyglot java import java.io.IOException
polyglot java import java.io.BufferedInputStream
polyglot java import java.io.ByteArrayInputStream
polyglot java import java.io.InputStream as Java_Input_Stream
polyglot java import java.io.IOException
polyglot java import org.enso.base.encoding.Encoding_Utils
polyglot java import org.enso.base.encoding.ReportingStreamDecoder
polyglot java import org.enso.base.Stream_Utils

View File

@ -10,6 +10,7 @@ import project.Column_Description.Column_Description
import project.Connection.Connection.Connection
import project.DB_Table.DB_Table
import project.Dialect
import project.Internal.Common.Connections_Helpers
import project.Internal.Connection.Entity_Naming_Properties.Entity_Naming_Properties
import project.Internal.Data_Link_Setup.Data_Link_Setup
import project.Internal.IR.Query.Query
@ -18,13 +19,11 @@ import project.Internal.SQL_Type_Reference.SQL_Type_Reference
import project.SQL_Query.SQL_Query
import project.SQL_Statement.SQL_Statement
import project.SQL_Type.SQL_Type
import project.Internal.Common.Connections_Helpers
from project.Connection.Connection import make_schema_selector, make_structure_creator, make_table_name_selector, make_table_types_selector
from project.Errors import SQL_Error, Table_Already_Exists, Table_Not_Found
from project.Internal.Postgres.Helpers import get_encoding_name, parse_postgres_encoding
from project.Internal.Upload.Helpers.Default_Arguments import first_column_name_in_structure
type Postgres_Connection
## PRIVATE

View File

@ -1,7 +1,7 @@
from Standard.Base import all
import Standard.Base.Data.Read.Many_Files_List.Many_Files_List
import Standard.Base.Errors.Common.Index_Out_Of_Bounds
import Standard.Base.Errors.Common.Floating_Point_Equality
import Standard.Base.Errors.Common.Index_Out_Of_Bounds
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
import Standard.Base.Errors.Illegal_State.Illegal_State
import Standard.Base.Internal.Rounding_Helpers

View File

@ -30,6 +30,7 @@ import Standard.Table.Internal.Column_Naming_Helper.Column_Naming_Helper
import Standard.Table.Internal.Constant_Column.Constant_Column
import Standard.Table.Internal.Display_Helpers
import Standard.Table.Internal.Join_Kind_Cross.Join_Kind_Cross
import Standard.Table.Internal.Match_Columns_Helpers
import Standard.Table.Internal.Problem_Builder.Problem_Builder
import Standard.Table.Internal.Replace_Helpers
import Standard.Table.Internal.Table_Helpers
@ -39,7 +40,6 @@ import Standard.Table.Internal.Table_Ref.Table_Ref
import Standard.Table.Internal.Unique_Name_Strategy.Unique_Name_Strategy
import Standard.Table.Internal.Value_Type_Helpers
import Standard.Table.Internal.Widget_Helpers
import Standard.Table.Internal.Match_Columns_Helpers
import Standard.Table.Row.Row
import Standard.Table.Rows_To_Read.Rows_To_Read
import Standard.Table.Value_Type.By_Type
@ -404,7 +404,7 @@ type DB_Table
result = self.updated_columns (new_columns.map _.as_internal)
Warning.attach (Deprecated.Warning "Standard.Database.DB_Table.DB_Table" "remove_columns_by_type" "Deprecated: use `remove_columns` with a `By_Type` instead.") result
## ALIAS select_blank_fields, select_missing_columns, select_na, filter_blank_columns
## ALIAS filter_blank_columns, select_blank_fields, select_missing_columns, select_na
GROUP Standard.Base.Selections
ICON select_column
@ -622,7 +622,7 @@ type DB_Table
Warning.with_suspended new_names names->
self.updated_columns (self.internal_columns.map c-> c.rename (names.at c.name))
## ALIAS rename, header
## ALIAS header, rename
GROUP Standard.Base.Metadata
ICON table_edit
Returns a new table with the columns renamed based on entries in the
@ -1418,7 +1418,7 @@ type DB_Table
_ = [columns, case_sensitivity, on_problems]
Error.throw (Unsupported_Database_Operation.Error "duplicates")
## ALIAS merge, xlookup, vlookup, hlookup, lookup
## ALIAS hlookup, lookup, merge, vlookup, xlookup
GROUP Standard.Base.Calculations
ICON join
Joins two tables according to the specified join conditions.
@ -1611,7 +1611,7 @@ type DB_Table
on_problems.attach_problems_before limit_problems <|
self.join_or_cross_join right join_kind=Join_Kind_Cross on=[] right_prefix on_problems
## ALIAS join, xlookup, vlookup, hlookup, lookup
## ALIAS hlookup, join, lookup, vlookup, xlookup
GROUP Standard.Base.Calculations
ICON join
Merges this table with a lookup table.
@ -1971,7 +1971,7 @@ type DB_Table
DB_Table.Value union_alias self.connection new_columns new_ctx
## ALIAS group by, summarize, count, count distinct, sum, average, mean, median, percentile, mode, standard deviation, variance, minimum, maximum, first, last, shortest, longest
## ALIAS average, count, count distinct, first, group by, last, longest, maximum, mean, median, minimum, mode, percentile, shortest, standard deviation, sum, summarize, variance
GROUP Standard.Base.Calculations
ICON transform4
@ -2310,7 +2310,7 @@ type DB_Table
_ = [columns, format, locale, error_on_missing_columns, on_problems]
Error.throw (Unsupported_Database_Operation.Error "format")
## ALIAS tokenize, parse
## ALIAS parse, tokenize
GROUP Standard.Base.Conversions
ICON split
Splits a column of text into a set of new columns.
@ -2336,7 +2336,7 @@ type DB_Table
_ = [column, delimiter, column_count.columns_to_split, on_problems]
Error.throw (Unsupported_Database_Operation.Error "split_to_columns")
## ALIAS tokenize, parse
## ALIAS parse, tokenize
GROUP Standard.Base.Conversions
ICON split
Splits a column of text into a set of new rows.
@ -2352,7 +2352,7 @@ type DB_Table
_ = [column, delimiter]
Error.throw (Unsupported_Database_Operation.Error "split_to_rows")
## ALIAS split, parse, regex
## ALIAS parse, regex, split
GROUP Standard.Base.Conversions
ICON split
Tokenizes a column of text into a set of new columns using a regular
@ -2381,7 +2381,7 @@ type DB_Table
_ = [column, pattern, case_sensitivity, column_count, on_problems]
Error.throw (Unsupported_Database_Operation.Error "tokenize_to_columns")
## ALIAS split, parse, regex
## ALIAS parse, regex, split
GROUP Standard.Base.Conversions
ICON split
Tokenizes a column of text into a set of new rows using a regular
@ -2404,7 +2404,7 @@ type DB_Table
_ = [column, pattern, case_sensitivity, at_least_one_row]
Error.throw (Unsupported_Database_Operation.Error "tokenize_to_rows")
## ALIAS split, tokenize, regex
## ALIAS regex, split, tokenize
GROUP Standard.Base.Conversions
ICON split
Converts a Text column into new columns using a regular expression
@ -2569,7 +2569,7 @@ type DB_Table
_ = [columns, shrink_types, error_on_missing_columns, on_problems]
Error.throw (Unsupported_Database_Operation.Error "auto_cast")
## ALIAS drop_missing_rows, dropna, remove_blank_rows, remove_empty_rows, remove_missing_rows, filter_empty_rows, drop_empty_rows
## ALIAS drop_empty_rows, drop_missing_rows, dropna, filter_empty_rows, remove_blank_rows, remove_empty_rows, remove_missing_rows
GROUP Standard.Base.Selections
ICON preparation
Remove rows which are all blank or containing blank values.
@ -2605,7 +2605,7 @@ type DB_Table
table = self.connection.read_statement sql
table.at column_name . at 0
## ALIAS load, import
## ALIAS import, load
GROUP Standard.Base.Input
ICON data_input
Returns a materialized dataframe containing rows of this table.
@ -2782,7 +2782,7 @@ type DB_Table
to_text : Text
to_text self = "(Database Table "+self.name.to_text+")"
## ALIAS export, save, output, to_file
## ALIAS export, output, save, to_file
GROUP Standard.Base.Output
ICON data_output
This function writes the table into a file.
@ -3002,7 +3002,7 @@ type DB_Table
transformer col = col.text_cleanse remove
Table_Helpers.replace_columns_with_transformed_columns self from transformer
## ALIAS cumulative, count, sum, total, minimum, maximum, sum, mean, product, variance, standard deviation
## ALIAS count, cumulative, maximum, mean, minimum, product, standard deviation, sum, sum, total, variance
GROUP Standard.Base.Values
ICON data_input
Adds a new column to the table with a running calculation.

View File

@ -1,6 +1,6 @@
import Standard.Base.Errors.Common.Missing_Argument
from Standard.Base import Text, Vector
from Standard.Base.Metadata import Display, make_single_choice, Widget
import Standard.Base.Errors.Common.Missing_Argument
## Specifies which columns to keep in a union operation.
type Columns_To_Keep

View File

@ -891,9 +891,9 @@ type Mixing_Date_Time_Types
## Indicates that a table with no rows has been returned.
type No_Rows
## PRIVATE
The message should be used to provide additional context.
private Warning message:Text
## PRIVATE
The message should be used to provide additional context.
private Warning message:Text
## PRIVATE
to_display_text self -> Text = self.message
## PRIVATE
to_display_text self -> Text = self.message

View File

@ -21,10 +21,10 @@ import project.Excel.Excel_Range.Excel_Range
import project.Headers.Headers
import project.Internal.Excel_Reader
import project.Internal.Excel_Section.Excel_Section
import project.Internal.Read_Many_As_Merged_Table_Strategy.Read_Many_As_Merged_Table_Strategy
import project.Internal.Read_Many_As_Merged_Table_Strategy.Read_Many_As_Table_Result
import project.Internal.Java_Problems
import project.Internal.Problem_Builder.Problem_Builder
import project.Internal.Read_Many_As_Merged_Table_Strategy.Read_Many_As_Merged_Table_Strategy
import project.Internal.Read_Many_As_Merged_Table_Strategy.Read_Many_As_Table_Result
import project.Match_Columns.Match_Columns
import project.Return_As_Table.Return_As_Table
import project.Rows_To_Read.Rows_To_Read
@ -217,7 +217,7 @@ type Excel_Workbook
_ = [alias]
self.read query headers=headers
## ALIAS get, range, sheet, worksheet, import, load, open
## ALIAS get, import, load, open, range, sheet, worksheet
GROUP Standard.Base.Input
ICON data_input
Read a range into a Table.

View File

@ -95,7 +95,7 @@ XML_Element.to_table self =
headers = Examples.simple_table_json_headers
Table.from_objects json headers
@fields (Widget.Vector_Editor item_editor=Widget.Text_Input item_default='""')
Table.from_objects : Any -> Vector | Nothing -> Boolean -> Table
Table.from_objects : Any -> Vector | Nothing -> Table
Table.from_objects value (fields : Vector | Nothing = Nothing) =
Expand_Objects_Helpers.create_table_from_objects value fields treat_dictionary_as_sequence=False

View File

@ -80,7 +80,7 @@ expand_column (table : Table) (column : Text | Integer) (fields : (Vector Text)
table = Table.new [["aaa", [1, 2]], ["bbb", [[30, 31], [40, 41]]]]
# => Table.new [["aaa", [1, 1, 2, 2]], ["bbb", [30, 31, 40, 41]]]
expand_to_rows : Table -> Text | Integer -> Boolean -> Table ! Type_Error | No_Such_Column | Index_Out_Of_Bounds
expand_to_rows : Table -> Text | Integer -> Boolean -> Boolean -> Table ! Type_Error | No_Such_Column | Index_Out_Of_Bounds
expand_to_rows table column:(Text | Integer) at_least_one_row=False sequences_only=False = if column.is_a Integer then expand_to_rows table (table.at column).name at_least_one_row else
row_expander : Any -> Vector
row_expander value:Convertible_To_Rows =

View File

@ -8,9 +8,9 @@ import project.Blank_Selector.Blank_Selector
import project.Column.Column
import project.Expression.Expression
import project.Internal.Column_Naming_Helper.Column_Naming_Helper
import project.Internal.Match_Columns_Helpers.Column_Set
import project.Internal.Problem_Builder.Problem_Builder
import project.Internal.Value_Type_Helpers
import project.Internal.Match_Columns_Helpers.Column_Set
import project.Position.Position
import project.Set_Mode.Set_Mode
import project.Sort_Column.Sort_Column

View File

@ -137,7 +137,7 @@ make_json_for_object_matrix current vector idx=0 = if idx == vector.length then
## PRIVATE
Render Dictionary to JSON
make_json_for_dictionary : Dictionary -> Integer -> Text -> JS_Object
make_json_for_dictionary : Dictionary -> Integer -> JS_Object
make_json_for_dictionary dict max_items =
header = ["header", ["key", "value"]]
all_rows = ["all_rows_count", dict.size]

View File

@ -3,7 +3,9 @@ from Standard.Table import all
from Standard.Database import all
from Standard.AWS import all
from Standard.Google_Api import all
from Standard.Microsoft import all
from Standard.Snowflake import all
from Standard.Tableau import all
import Standard.Examples
import Standard.Visualization
@ -13,5 +15,5 @@ main =
#### METADATA ####
[[{"index":{"value":270},"size":{"value":26}},"fcf199e6-d2b1-44b7-be92-a47545d6a593"]]
{"ide":{"node":{"fcf199e6-d2b1-44b7-be92-a47545d6a593":{"position":{"vector":[-388,282]},"visualization":{"show":true,"width":1027.671875,"height":193}}},"import":{}}}
[[{"index":{"value":338},"size":{"value":26}},"fcf199e6-d2b1-44b7-be92-a47545d6a593"]]
{"ide":{"node":{"fcf199e6-d2b1-44b7-be92-a47545d6a593":{"position":{"vector":[-388,282]},"visualization":{"show":true,"width":1129.671875,"height":252}}},"widget":{},"import":{},"snapshot":"eJyFkEEKwjAURPc5xT9BwQN0UVFcuaroskzbRD7+5JckpeLpRVBRpHU38B4DMy6qpzoj9Ih9sUayxH7QmAkixn3RA1pZwBtktIsF1amehzvVs9imGnje2XMXNanL80oddHKCy78dGD+FZ3wr2yv8IDb9gCOnEcI3ZNZgjAcHKg0RkWOxKyrpcUMRLXp6lRSTlU69NXf7xIOE"}}