mirror of
https://github.com/enso-org/enso.git
synced 2025-01-01 01:22:31 +03:00
Using conversions in a few places (#7859)
- Shuffle a few `from`s into correct places: - `Day_Of_Week.from` removing `Day_Of_Week_From` module. - Adding short cut for `http` and `https` in `Data.read` so it calls onto `Data.fetch` giving a single entry point. - Moved `URI` extensions from `Standard.Base.Data` module into `Standard.Base.Network.Extensions`. - Added `post` extension for `URI`. - Added `contains_key` to `JS_Object`. - Restored `into` in `JS_Object`: - Follows old logic populating a constructor. - Will use conversion from `JS_Object` if present. - Added automatic deserialization of `Date`, `Time_Of_Day` and `Date_Time` from JSON. - Uses conversion from `JS_Object`. - Added conversion from `Text` to a `HTTP_Method` and type checking where `HTTP_Method` used in public APIs. - Added support for `Date`, `Time_Of_Day` and `Date_Time` in `Table.from_objects`. - Added `expand_column` to `Table` to expand `JS_Object` to values. - Add type checking for `Table` in `right` arguments (allowing `Column`s to be used). - Use type checking in `Table.set` to allow for conversion to a `Column`. - Remove some unused imports. - Fix for bug in S3 edge case.
This commit is contained in:
parent
608ff2da09
commit
fb50eb7595
@ -578,6 +578,7 @@
|
||||
- [Renamed `Decimal` to `Float`.][7807]
|
||||
- [Implemented `Date_Time_Formatter` for more user-friendly date/time format
|
||||
parsing.][7826]
|
||||
- [Added `Table.expand_column` and improved JSON deserialization.][7859]
|
||||
- [Implemented `Table.auto_value_types` for in-memory tables.][7908]
|
||||
- [Implemented Text.substring to easily select part of a Text field][7913]
|
||||
|
||||
@ -822,6 +823,7 @@
|
||||
[7776]: https://github.com/enso-org/enso/pull/7776
|
||||
[7807]: https://github.com/enso-org/enso/pull/7807
|
||||
[7826]: https://github.com/enso-org/enso/pull/7826
|
||||
[7859]: https://github.com/enso-org/enso/pull/7859
|
||||
[7908]: https://github.com/enso-org/enso/pull/7908
|
||||
[7913]: https://github.com/enso-org/enso/pull/7913
|
||||
|
||||
|
@ -12,7 +12,7 @@ type AWS_SDK_Error
|
||||
to_display_text self = "AWS SDK Error: " + self.message
|
||||
|
||||
## PRIVATE
|
||||
handle_java_errors : AWS_SDK_Error
|
||||
handle_java_errors : Any -> Any ! AWS_SDK_Error
|
||||
handle_java_errors ~action =
|
||||
Panic.catch SdkClientException action caught_panic->
|
||||
Error.throw (AWS_SDK_Error.Error caught_panic.payload.getMessage)
|
||||
|
@ -149,4 +149,4 @@ make_client credentials:(AWS_Credential | Nothing) =
|
||||
Utility method for running an action with Java exceptions mapping.
|
||||
handle_io_errors uri:URI ~action =
|
||||
Panic.catch IOException action caught_panic->
|
||||
S3_Error.Error ("An IO error has occurred: " + caught_panic.payload.to_text) uri.to_text
|
||||
Error.throw (S3_Error.Error ("An IO error has occurred: " + caught_panic.payload.to_text) uri.to_text)
|
||||
|
@ -12,12 +12,12 @@ import project.Network.HTTP.Header.Header
|
||||
import project.Network.HTTP.HTTP
|
||||
import project.Network.HTTP.HTTP_Method.HTTP_Method
|
||||
import project.Network.HTTP.Request.Request
|
||||
import project.Network.HTTP.Request_Body.Request_Body
|
||||
import project.Network.HTTP.Request_Error
|
||||
import project.Network.URI.URI
|
||||
import project.Nothing.Nothing
|
||||
import project.System.File.File
|
||||
from project.Data.Boolean import Boolean, False, True
|
||||
from project.Network.HTTP.Request_Body import all
|
||||
from project.System.File_Format import Auto_Detect, File_Format
|
||||
|
||||
## ALIAS load, open
|
||||
@ -27,7 +27,8 @@ from project.System.File_Format import Auto_Detect, File_Format
|
||||
specified will use the file's extension to determine the file format.
|
||||
|
||||
Arguments:
|
||||
- path: The path to the file to read.
|
||||
- path: The path to the file to read. If the path is a URI, then the data
|
||||
will be fetched if from a supported protocol.
|
||||
- format: A `File_Format` object used to read file into memory.
|
||||
If `Auto_Detect` is specified; the provided file determines the specific
|
||||
type and configures it appropriately. If there is no matching type then
|
||||
@ -58,8 +59,10 @@ from project.System.File_Format import Auto_Detect, File_Format
|
||||
example_xls_to_table = Data.read Examples.xls (Excel (Worksheet 'Dates'))
|
||||
@format File_Format.default_widget
|
||||
read : Text | File -> File_Format -> Problem_Behavior -> Any ! File_Error
|
||||
read path format=Auto_Detect (on_problems=Problem_Behavior.Report_Warning) =
|
||||
File.new path . read format on_problems
|
||||
read path format=Auto_Detect (on_problems=Problem_Behavior.Report_Warning) = case path of
|
||||
_ : Text -> if (path.starts_with "http://") || (path.starts_with "https://") then fetch path else
|
||||
read (File.new path) format on_problems
|
||||
_ -> File.new path . read format on_problems
|
||||
|
||||
## ALIAS load text, open text
|
||||
GROUP Input
|
||||
@ -154,10 +157,9 @@ list_directory directory name_filter=Nothing recursive=False =
|
||||
- method: The HTTP method to use. Must be one of `HTTP_Method.Get`,
|
||||
`HTTP_Method.Head`, `HTTP_Method.Delete`, `HTTP_Method.Options`.
|
||||
Defaults to `HTTP_Method.Get`.
|
||||
|
||||
- headers: The headers to send with the request. Defaults to an empty vector.
|
||||
- try_auto_parse: If successful should the body be attempted to be parsed to
|
||||
an Enso native object.
|
||||
- try_auto_parse_response: If successful should the body be attempted to be
|
||||
parsed to an Enso native object.
|
||||
|
||||
> Example
|
||||
Read from an HTTP endpoint.
|
||||
@ -177,7 +179,7 @@ fetch (uri:(URI | Text)) (method:HTTP_Method=HTTP_Method.Get) (headers:(Vector (
|
||||
if try_auto_parse_response.not then response.with_materialized_body else
|
||||
response.decode if_unsupported=response.with_materialized_body
|
||||
|
||||
## ALIAS upload, http post
|
||||
## ALIAS http post, upload
|
||||
GROUP Input
|
||||
Writes the provided data to the provided URI. Returns the response,
|
||||
parsing the body if the content-type is recognised. Returns an error if the
|
||||
@ -300,18 +302,3 @@ post (uri:(URI | Text)) (body:Request_Body=Request_Body.Empty) (method:HTTP_Meth
|
||||
response = HTTP.post uri body method headers
|
||||
if try_auto_parse_response.not then response.with_materialized_body else
|
||||
response.decode if_unsupported=response.with_materialized_body
|
||||
|
||||
## ALIAS download, http get
|
||||
GROUP Input
|
||||
Fetches from the URI and returns the response, parsing the body if the
|
||||
content-type is recognised. Returns an error if the status code does not
|
||||
represent a successful response.
|
||||
|
||||
Arguments:
|
||||
- method: The HTTP method to use. Defaults to `GET`.
|
||||
- headers: The headers to send with the request. Defaults to an empty vector.
|
||||
- try_auto_parse: If successful should the body be attempted to be parsed to
|
||||
an Enso native object.
|
||||
URI.fetch : HTTP_Method -> Vector (Header | Pair Text Text) -> Boolean -> Any
|
||||
URI.fetch self method=HTTP_Method.Get headers=[] try_auto_parse=True =
|
||||
Data.fetch self method headers try_auto_parse
|
||||
|
@ -275,7 +275,7 @@ sql_like_to_regex sql_pattern =
|
||||
Regex.compile regex_pattern
|
||||
|
||||
## PRIVATE
|
||||
unify_condition_or_predicate : Filter_Condition | (Any -> Boolean) -> (Any -> Boolean)
|
||||
unify_condition_or_predicate : Filter_Condition|(Any->Boolean) -> (Any -> Boolean)
|
||||
unify_condition_or_predicate (condition_or_predicate : Filter_Condition | Function) =
|
||||
case condition_or_predicate of
|
||||
condition : Filter_Condition -> condition.to_predicate
|
||||
|
@ -6,8 +6,12 @@ import project.Data.Numbers.Integer
|
||||
import project.Data.Numbers.Number
|
||||
import project.Data.Pair.Pair
|
||||
import project.Data.Text.Text
|
||||
import project.Data.Time.Date.Date
|
||||
import project.Data.Time.Date_Time.Date_Time
|
||||
import project.Data.Time.Time_Of_Day.Time_Of_Day
|
||||
import project.Data.Vector.Vector
|
||||
import project.Error.Error
|
||||
import project.Errors.Common.No_Such_Conversion
|
||||
import project.Errors.Common.Type_Error
|
||||
import project.Errors.Illegal_Argument.Illegal_Argument
|
||||
import project.Errors.No_Such_Key.No_Such_Key
|
||||
@ -131,6 +135,11 @@ type JS_Object
|
||||
## PRIVATE
|
||||
Value js_object
|
||||
|
||||
## GROUP Logical
|
||||
Returns True iff the objects contains the given `key`.
|
||||
contains_key : Text -> Boolean
|
||||
contains_key self key:Text = has_property self.js_object key
|
||||
|
||||
## Get a value for a key of the object, or a default value if that key is not present.
|
||||
|
||||
Arguments:
|
||||
@ -138,7 +147,7 @@ type JS_Object
|
||||
- if_missing: The value to return if the key is not found.
|
||||
@key make_field_name_selector
|
||||
get : Text -> Any -> JS_Object | Boolean | Number | Nothing | Text | Vector
|
||||
get self key ~if_missing=Nothing =
|
||||
get self key:Text ~if_missing=Nothing =
|
||||
if (has_property self.js_object key) . not then if_missing else
|
||||
value = get_value self.js_object key
|
||||
make_enso value
|
||||
@ -151,7 +160,7 @@ type JS_Object
|
||||
- key: The key to get.
|
||||
@key make_field_name_selector
|
||||
at : Text -> JS_Object | Boolean | Number | Nothing | Text | Vector ! No_Such_Key
|
||||
at self key = self.get key (Error.throw (No_Such_Key.Error self key))
|
||||
at self key:Text = self.get key (Error.throw (No_Such_Key.Error self key))
|
||||
|
||||
## GROUP Metadata
|
||||
Get the keys of the object.
|
||||
@ -201,6 +210,31 @@ type JS_Object
|
||||
to_default_visualization_data self =
|
||||
render self
|
||||
|
||||
## Creates an Enso object from the JS_Object.
|
||||
into : Any -> Any
|
||||
into self target_type = case target_type of
|
||||
JS_Object -> self
|
||||
Vector -> self.to_vector
|
||||
Map -> Map.from_pairs self.to_vector
|
||||
_ ->
|
||||
## First try a conversion
|
||||
Panic.catch No_Such_Conversion (self.to target_type) _->
|
||||
## If that fails, try to construct the type
|
||||
meta = Meta.meta target_type
|
||||
meta_type = if meta.is_a Meta.Type then meta else Meta.meta (Meta.type_of target_type)
|
||||
constructors = meta_type.constructors
|
||||
filtered = if self.contains_key "constructor" then constructors.filter c-> c.name == self.get "constructor" else constructors
|
||||
case filtered.length of
|
||||
0 -> Error.throw (Illegal_Argument.Error "Unable to build a "+target_type.to_text+" - cannot find the constructor.")
|
||||
1 ->
|
||||
constructor = filtered.at 0
|
||||
field_names = constructor.fields
|
||||
if field_names.all self.contains_key then constructor.new (field_names.map self.at) else
|
||||
missing_fields = field_names.filter (f-> self.contains_key f . not)
|
||||
Error.throw (Illegal_Argument.Error "Unable to build a "+target_type.to_text+" - the constructor "+constructor.name+"'s fields "+missing_fields.to_display_text+" not found in the source object.")
|
||||
_ -> Error.throw (Illegal_Argument.Error "Unable to build a "+target_type.to_text+" cannot find unique constructor.")
|
||||
|
||||
|
||||
## PRIVATE
|
||||
type JS_Object_Comparator
|
||||
## PRIVATE
|
||||
@ -264,7 +298,18 @@ make_enso js_object =
|
||||
proxy = Array_Proxy.new js_object.length (i-> make_enso (js_object.at i))
|
||||
Vector.from_polyglot_array proxy
|
||||
_ : JS_Object -> js_object
|
||||
_ -> JS_Object.Value js_object
|
||||
_ ->
|
||||
wrapped = JS_Object.Value js_object
|
||||
|
||||
## Handle deserializing date and time types.
|
||||
type_name = wrapped.get "type"
|
||||
parsed = if type_name == "Date" then Date.from wrapped else
|
||||
if type_name == "Date_Time" then Date_Time.from wrapped else
|
||||
if type_name == "Time_Of_Day" then Time_Of_Day.from wrapped else
|
||||
wrapped
|
||||
|
||||
if parsed.is_error then wrapped else parsed
|
||||
|
||||
|
||||
## PRIVATE
|
||||
Internal function to convert any JS_Objects into their native JS objects before passing to JS.
|
||||
|
@ -1,6 +1,6 @@
|
||||
import project.Any.Any
|
||||
import project.Data.Ordering.Comparable
|
||||
import project.Data.Locale.Locale
|
||||
import project.Data.Ordering.Comparable
|
||||
import project.Data.Text.Text
|
||||
import project.Error.Error
|
||||
import project.Errors.Common.Arithmetic_Error
|
||||
|
@ -10,7 +10,6 @@ import project.Data.Time.Date_Range.Date_Range
|
||||
import project.Data.Time.Date_Time.Date_Time
|
||||
import project.Data.Time.Date_Time_Formatter.Date_Time_Formatter
|
||||
import project.Data.Time.Day_Of_Week.Day_Of_Week
|
||||
import project.Data.Time.Day_Of_Week_From
|
||||
import project.Data.Time.Duration.Duration
|
||||
import project.Data.Time.Period.Period
|
||||
import project.Data.Time.Time_Of_Day.Time_Of_Day
|
||||
@ -827,3 +826,8 @@ is_weekend date =
|
||||
## PRIVATE
|
||||
fits_in_range start end date =
|
||||
(start <= date) && (date < end)
|
||||
|
||||
## PRIVATE
|
||||
Date.from (that:JS_Object) =
|
||||
if that.get "type" == "Date" && ["year", "month", "day"].all that.contains_key then Date.new (that.get "year") (that.get "month") (that.get "day") else
|
||||
Error.throw (Illegal_Argument.Error "Invalid JS_Object for Date.from.")
|
||||
|
@ -9,7 +9,6 @@ import project.Data.Time.Date.Date
|
||||
import project.Data.Time.Date_Period.Date_Period
|
||||
import project.Data.Time.Date_Time_Formatter.Date_Time_Formatter
|
||||
import project.Data.Time.Day_Of_Week.Day_Of_Week
|
||||
import project.Data.Time.Day_Of_Week_From
|
||||
import project.Data.Time.Duration.Duration
|
||||
import project.Data.Time.Period.Period
|
||||
import project.Data.Time.Time_Of_Day.Time_Of_Day
|
||||
@ -18,6 +17,7 @@ import project.Data.Time.Time_Zone.Time_Zone
|
||||
import project.Data.Vector.Vector
|
||||
import project.Error.Error
|
||||
import project.Errors.Common.Type_Error
|
||||
import project.Errors.Illegal_Argument.Illegal_Argument
|
||||
import project.Errors.Time_Error.Time_Error
|
||||
import project.Meta
|
||||
import project.Nothing.Nothing
|
||||
@ -858,3 +858,13 @@ type Date_Time
|
||||
format : Date_Time_Formatter -> Text
|
||||
format self format:Date_Time_Formatter =
|
||||
format.format_date_time self
|
||||
|
||||
## PRIVATE
|
||||
Date_Time.from (that:JS_Object) =
|
||||
## Must have year, month, day, hour and minute. Optional for second, nanosecond and zone.
|
||||
case that.get "type" == "Date_Time" && ["year", "month", "day", "hour", "minute"].all that.contains_key of
|
||||
True ->
|
||||
time_zone = if that.contains_key "zone" . not then Time_Zone.system else that.get "zone" . into Time_Zone
|
||||
if that.contains_key "nanosecond" && (that.contains_key "second" . not) then Error.throw (Illegal_Argument.Error "Invalid JS_Object for Date_Time.from.") else
|
||||
Date_Time.new (that.get "year") (that.get "month") (that.get "day") (that.get "hour") (that.get "minute") (that.get "second" 0) nanosecond=(that.get "nanosecond" 0) zone=time_zone
|
||||
False -> Error.throw (Illegal_Argument.Error "Invalid JS_Object for Date_Time.from.")
|
||||
|
@ -8,20 +8,19 @@ import project.Error.Error
|
||||
import project.Errors.Illegal_Argument.Illegal_Argument
|
||||
import project.Errors.Problem_Behavior.Problem_Behavior
|
||||
import project.Errors.Time_Error.Time_Error
|
||||
import project.Nothing.Nothing
|
||||
import project.Panic.Panic
|
||||
from project.Data.Boolean import Boolean, False, True
|
||||
|
||||
import project.Internal.Time.Format.Analyzer.Analyzer
|
||||
import project.Internal.Time.Format.As_Java_Formatter_Interpreter
|
||||
import project.Internal.Time.Format.Parser
|
||||
import project.Internal.Time.Format.Tokenizer.Tokenizer
|
||||
import project.Nothing.Nothing
|
||||
import project.Panic.Panic
|
||||
from project.Data.Boolean import Boolean, False, True
|
||||
|
||||
polyglot java import java.lang.Exception as JException
|
||||
polyglot java import java.time.format.DateTimeFormatter
|
||||
polyglot java import org.enso.base.time.EnsoDateTimeFormatter
|
||||
polyglot java import org.enso.base.time.FormatterCacheKey
|
||||
polyglot java import org.enso.base.time.FormatterCache
|
||||
polyglot java import org.enso.base.time.FormatterCacheKey
|
||||
polyglot java import org.enso.base.time.FormatterKind
|
||||
|
||||
type Date_Time_Formatter
|
||||
|
@ -1,4 +1,6 @@
|
||||
import project.Data.Numbers.Integer
|
||||
import project.Error.Error
|
||||
import project.Errors.Illegal_Argument.Illegal_Argument
|
||||
from project.Data.Boolean import Boolean, False, True
|
||||
from project.Data.Ordering import all
|
||||
|
||||
@ -76,3 +78,23 @@ type Day_Of_Week_Comparator
|
||||
|
||||
## PRIVATE
|
||||
Comparable.from (_:Day_Of_Week) = Day_Of_Week_Comparator
|
||||
|
||||
## Convert from an integer to a Day_Of_Week
|
||||
|
||||
Arguments:
|
||||
- `that`: The first day of the week.
|
||||
- `first_day`: The first day of the week.
|
||||
- `start_at_zero`: If True, first day of the week is 0 otherwise is 1.
|
||||
Day_Of_Week.from (that : Integer) (first_day:Day_Of_Week=Day_Of_Week.Sunday) (start_at_zero:Boolean=False) =
|
||||
shifted = if start_at_zero then that else that - 1
|
||||
|
||||
case (shifted < 0) || (shifted > 6) of
|
||||
True ->
|
||||
valid_range = if start_at_zero then "0-6" else "1-7"
|
||||
message = "Invalid day of week (must be " + valid_range + ")."
|
||||
Error.throw (Illegal_Argument.Error message)
|
||||
False ->
|
||||
day_number = if first_day == Day_Of_Week.Sunday then shifted else
|
||||
(shifted + (first_day.to_integer start_at_zero=True)) % 7
|
||||
|
||||
[Day_Of_Week.Sunday, Day_Of_Week.Monday, Day_Of_Week.Tuesday, Day_Of_Week.Wednesday, Day_Of_Week.Thursday, Day_Of_Week.Friday, Day_Of_Week.Saturday].at day_number
|
||||
|
@ -1,25 +0,0 @@
|
||||
import project.Data.Numbers.Integer
|
||||
import project.Data.Time.Day_Of_Week.Day_Of_Week
|
||||
import project.Error.Error
|
||||
import project.Errors.Illegal_Argument.Illegal_Argument
|
||||
from project.Data.Boolean import Boolean, False, True
|
||||
|
||||
## Convert from an integer to a Day_Of_Week
|
||||
|
||||
Arguments:
|
||||
- `that`: The first day of the week.
|
||||
- `first_day`: The first day of the week.
|
||||
- `start_at_zero`: If True, first day of the week is 0 otherwise is 1.
|
||||
Day_Of_Week.from (that : Integer) (first_day:Day_Of_Week=Day_Of_Week.Sunday) (start_at_zero:Boolean=False) =
|
||||
shifted = if start_at_zero then that else that - 1
|
||||
|
||||
case (shifted < 0) || (shifted > 6) of
|
||||
True ->
|
||||
valid_range = if start_at_zero then "0-6" else "1-7"
|
||||
message = "Invalid day of week (must be " + valid_range + ")."
|
||||
Error.throw (Illegal_Argument.Error message)
|
||||
False ->
|
||||
day_number = if first_day == Day_Of_Week.Sunday then shifted else
|
||||
(shifted + (first_day.to_integer start_at_zero=True)) % 7
|
||||
|
||||
[Day_Of_Week.Sunday, Day_Of_Week.Monday, Day_Of_Week.Tuesday, Day_Of_Week.Wednesday, Day_Of_Week.Thursday, Day_Of_Week.Friday, Day_Of_Week.Saturday].at day_number
|
@ -471,3 +471,12 @@ type Time_Of_Day
|
||||
format : Date_Time_Formatter -> Text
|
||||
format self format:Date_Time_Formatter =
|
||||
format.format_time self
|
||||
|
||||
## PRIVATE
|
||||
Time_Of_Day.from (that:JS_Object) =
|
||||
## Must have hour and minute but second and nanosecond are optional
|
||||
case that.get "type" == "Time_Of_Day" && ["hour", "minute"].all that.contains_key of
|
||||
True ->
|
||||
if that.contains_key "nanosecond" && (that.contains_key "second" . not) then Error.throw (Illegal_Argument.Error "Invalid JS_Object for Time_Of_Day.from.") else
|
||||
Time_Of_Day.new (that.get "hour") (that.get "minute") (that.get "second" 0) nanosecond=(that.get "nanosecond" 0)
|
||||
False -> Error.throw (Illegal_Argument.Error "Invalid JS_Object for Time_Of_Day.from.")
|
||||
|
@ -4,6 +4,7 @@ import project.Data.Numbers.Integer
|
||||
import project.Data.Text.Text
|
||||
import project.Data.Time.Date_Time.Date_Time
|
||||
import project.Error.Error
|
||||
import project.Errors.Illegal_Argument.Illegal_Argument
|
||||
import project.Errors.Time_Error.Time_Error
|
||||
import project.Panic.Panic
|
||||
from project.Data.Boolean import Boolean, False, True
|
||||
@ -177,10 +178,16 @@ type Time_Zone
|
||||
to_js_object : JS_Object
|
||||
to_js_object self =
|
||||
type_pair = ["type", "Time_Zone"]
|
||||
cons_pair = ["constructor", "new"]
|
||||
cons_pair = ["constructor", "parse"]
|
||||
JS_Object.from_pairs [type_pair, cons_pair, ["id", self.zone_id]]
|
||||
|
||||
## PRIVATE
|
||||
Convert to a display representation of this Time_Zone.
|
||||
to_display_text : Text
|
||||
to_display_text self = self.to_text
|
||||
|
||||
## PRIVATE
|
||||
Time_Zone.from (that:JS_Object) =
|
||||
if that.get "type" == "Time_Zone" && ["id"].all that.contains_key then Time_Zone.parse (that.get "id") else
|
||||
Error.throw (Illegal_Argument.Error "Invalid JS_Object for Time_Zone.from.")
|
||||
|
||||
|
@ -3,12 +3,10 @@ import project.Data.Text.Text
|
||||
import project.Data.Time.Date.Date
|
||||
import project.Data.Vector.Vector
|
||||
import project.Errors.Illegal_Argument.Illegal_Argument
|
||||
import project.Internal.Time.Format.Analyzer.Analyzer
|
||||
import project.Panic.Panic
|
||||
from project.Data.Boolean import Boolean, False, True
|
||||
|
||||
import project.Internal.Time.Format.Analyzer.Analyzer
|
||||
from project.Internal.Time.Format.Parser import Common_Nodes, Standard_Date_Patterns, ISO_Week_Year_Patterns, Time_Patterns, Time_Zone_Patterns
|
||||
from project.Internal.Time.Format.Parser import Text_Representation, Numeric_Representation, Two_Digit_Year_Representation
|
||||
from project.Internal.Time.Format.Parser import Common_Nodes, ISO_Week_Year_Patterns, Numeric_Representation, Standard_Date_Patterns, Text_Representation, Time_Patterns, Time_Zone_Patterns, Two_Digit_Year_Representation
|
||||
|
||||
polyglot java import java.time.format.DateTimeFormatter
|
||||
polyglot java import java.time.format.DateTimeFormatterBuilder
|
||||
|
@ -7,14 +7,13 @@ import project.Data.Vector.Builder as Vector_Builder
|
||||
import project.Data.Vector.Vector
|
||||
import project.Error.Error
|
||||
import project.Errors.Illegal_State.Illegal_State
|
||||
import project.Internal.Time.Format.Tokenizer.Format_Token
|
||||
import project.Nothing.Nothing
|
||||
import project.Panic.Panic
|
||||
import project.Runtime.Ref.Ref
|
||||
from project.Data.Boolean import Boolean, False, True
|
||||
from project.Data.Text.Extensions import all
|
||||
|
||||
import project.Internal.Time.Format.Tokenizer.Format_Token
|
||||
|
||||
## PRIVATE
|
||||
type Text_Representation
|
||||
## PRIVATE
|
||||
@ -161,6 +160,7 @@ type Parser
|
||||
any_pattern
|
||||
_ -> Panic.throw (Illegal_State.Error "Unexpected (here) token type: "+token.to_text)
|
||||
|
||||
## PRIVATE
|
||||
fail_invalid_pattern self character count extra_message="" =
|
||||
Panic.throw (Date_Time_Format_Parse_Error.Error "The pattern "+(character*count)+" is not a valid pattern for the "+self.mode.pattern_format_name+" format."+extra_message)
|
||||
|
||||
|
@ -42,7 +42,6 @@ import project.Data.Time.Date_Range.Date_Range
|
||||
import project.Data.Time.Date_Time.Date_Time
|
||||
import project.Data.Time.Date_Time_Formatter.Date_Time_Formatter
|
||||
import project.Data.Time.Day_Of_Week.Day_Of_Week
|
||||
import project.Data.Time.Day_Of_Week_From
|
||||
import project.Data.Time.Duration.Duration
|
||||
import project.Data.Time.Period.Period
|
||||
import project.Data.Time.Time_Of_Day.Time_Of_Day
|
||||
@ -131,7 +130,6 @@ export project.Data.Time.Date_Range.Date_Range
|
||||
export project.Data.Time.Date_Time.Date_Time
|
||||
export project.Data.Time.Date_Time_Formatter.Date_Time_Formatter
|
||||
export project.Data.Time.Day_Of_Week.Day_Of_Week
|
||||
export project.Data.Time.Day_Of_Week_From
|
||||
export project.Data.Time.Duration.Duration
|
||||
export project.Data.Time.Period.Period
|
||||
export project.Data.Time.Time_Of_Day.Time_Of_Day
|
||||
|
@ -1,6 +1,14 @@
|
||||
import project.Any.Any
|
||||
import project.Data
|
||||
import project.Data.Pair.Pair
|
||||
import project.Data.Text.Text
|
||||
import project.Data.Vector.Vector
|
||||
import project.Errors.Common.Syntax_Error
|
||||
import project.Network.HTTP.Header.Header
|
||||
import project.Network.HTTP.HTTP_Method.HTTP_Method
|
||||
import project.Network.HTTP.Request_Body.Request_Body
|
||||
import project.Network.URI.URI
|
||||
from project.Data.Boolean import Boolean, False, True
|
||||
|
||||
## ALIAS parse_uri, uri from text
|
||||
GROUP Conversions
|
||||
@ -16,3 +24,60 @@ import project.Network.URI.URI
|
||||
example_parse = "http://example.com".to_uri
|
||||
Text.to_uri : URI ! Syntax_Error
|
||||
Text.to_uri self = URI.parse self
|
||||
|
||||
## ALIAS download, http get
|
||||
GROUP Input
|
||||
Fetches from the URI and returns the response, parsing the body if the
|
||||
content-type is recognised. Returns an error if the status code does not
|
||||
represent a successful response.
|
||||
|
||||
Arguments:
|
||||
- method: The HTTP method to use. Must be one of `HTTP_Method.Get`,
|
||||
`HTTP_Method.Head`, `HTTP_Method.Delete`, `HTTP_Method.Options`.
|
||||
Defaults to `HTTP_Method.Get`.
|
||||
- headers: The headers to send with the request. Defaults to an empty vector.
|
||||
- try_auto_parse_response: If successful should the body be attempted to be
|
||||
parsed to an Enso native object.
|
||||
URI.fetch : HTTP_Method -> Vector (Header | Pair Text Text) -> Boolean -> Any
|
||||
URI.fetch self (method:HTTP_Method=HTTP_Method.Get) headers=[] try_auto_parse_response=True =
|
||||
Data.fetch self method headers try_auto_parse_response
|
||||
|
||||
## ALIAS upload, http post
|
||||
GROUP Input
|
||||
Writes the provided data to the provided URI. Returns the response,
|
||||
parsing the body if the content-type is recognised. Returns an error if the
|
||||
status code does not represent a successful response.
|
||||
|
||||
Arguments:
|
||||
- body: The data to write. See `Supported Body Types` below.
|
||||
- method: The HTTP method to use. Must be one of `HTTP_Method.Post`,
|
||||
`HTTP_Method.Put`, `HTTP_Method.Patch`. Defaults to `HTTP_Method.Post`.
|
||||
- headers: The headers to send with the request. Defaults to an empty vector.
|
||||
- try_auto_parse_response: If successful should the body be attempted to be
|
||||
parsed to an Enso native object.
|
||||
|
||||
! Specifying Content Types
|
||||
|
||||
If the `body` parameter specifies an explicit content type, then it is an
|
||||
error to also specify additional `Content-Type` headers in the `headers`
|
||||
parameter. (It is not an error to specify multiple `Content-Type` values in
|
||||
`headers`, however.)
|
||||
|
||||
! Supported Body Types
|
||||
|
||||
- Request_Body.Text: Sends a text string, with optional encoding and content
|
||||
type.
|
||||
- Request_Body.Json: Sends an Enso object, after converting it to JSON.
|
||||
- Request_Body.Binary: Sends a file.
|
||||
- Request_Body.Form_Data: Sends a form encoded as key/value pairs. The keys
|
||||
must be `Text`, and the values must be `Text` or `File`.
|
||||
- Request_Body.Empty: Sends an empty body.
|
||||
|
||||
Additionally, the following types are allowed as the `body` parameter:
|
||||
|
||||
- Text: shorthand for `Request_Body.Text that_text`.
|
||||
- File: shorthand for `Request_Body.Binary that_file`.
|
||||
- Any other Enso object: shorthand for `Request_Body.Json that_object`.
|
||||
URI.post : Request_Body -> HTTP_Method -> Vector (Header | Pair Text Text) -> Boolean -> Any
|
||||
URI.post self (body:Request_Body=Request_Body.Empty) (method:HTTP_Method=HTTP_Method.Post) (headers:(Vector (Header | Pair Text Text))=[]) (try_auto_parse_response:Boolean=True) =
|
||||
Data.post self body method headers try_auto_parse_response
|
||||
|
@ -153,7 +153,7 @@ type HTTP
|
||||
|
||||
## PRIVATE
|
||||
Static helper for get-like methods
|
||||
fetch : (URI | Text) -> HTTP_Method -> Vector (Header | Pair Text Text) -> Boolean -> Any
|
||||
fetch : (URI | Text) -> HTTP_Method -> Vector (Header | Pair Text Text) -> Any
|
||||
fetch (uri:(URI | Text)) (method:HTTP_Method=HTTP_Method.Get) (headers:(Vector (Header | Pair Text Text))=[]) =
|
||||
check_method fetch_methods method <|
|
||||
request = Request.new method uri (parse_headers headers) Request_Body.Empty
|
||||
@ -161,7 +161,7 @@ type HTTP
|
||||
|
||||
## PRIVATE
|
||||
Static helper for post-like methods
|
||||
post : (URI | Text) -> Request_Body -> HTTP_Method -> Vector (Header | Pair Text Text) -> Boolean -> Any
|
||||
post : (URI | Text) -> Request_Body -> HTTP_Method -> Vector (Header | Pair Text Text) -> Any
|
||||
post (uri:(URI | Text)) (body:Request_Body=Request_Body.Empty) (method:HTTP_Method=HTTP_Method.Post) (headers:(Vector (Header | Pair Text Text))=[]) =
|
||||
check_method post_methods method <|
|
||||
request = Request.new method uri (parse_headers headers) body
|
||||
@ -280,7 +280,7 @@ post_methods : Set HTTP_Method
|
||||
post_methods = Set.from_vector [HTTP_Method.Post, HTTP_Method.Put, HTTP_Method.Patch, HTTP_Method.Delete]
|
||||
|
||||
## PRIVATE
|
||||
check_method : Set HTTP_Method -> Any -> Any ! Illegal_Argument
|
||||
check_method : Set HTTP_Method -> Any -> Any -> Any ! Illegal_Argument
|
||||
check_method allowed_methods method ~action =
|
||||
if allowed_methods.contains method then action else
|
||||
Error.throw (Illegal_Argument.Error ("Unsupported method " + method.to_display_text))
|
||||
|
@ -1,4 +1,7 @@
|
||||
import project.Data.Text.Case.Case
|
||||
import project.Data.Text.Text
|
||||
from project.Data.Text.Extensions import all
|
||||
|
||||
|
||||
type HTTP_Method
|
||||
## The HTTP method "OPTIONS".
|
||||
@ -46,3 +49,17 @@ type HTTP_Method
|
||||
HTTP_Method.Trace -> "TRACE"
|
||||
HTTP_Method.Connect -> "CONNECT"
|
||||
HTTP_Method.Custom verb -> verb
|
||||
|
||||
## PRIVATE
|
||||
Converts from Text to an HTTP_Method.
|
||||
HTTP_Method.from (that:Text) = case that.to_case Case.Upper of
|
||||
"OPTIONS" -> HTTP_Method.Options
|
||||
"GET" -> HTTP_Method.Get
|
||||
"HEAD" -> HTTP_Method.Head
|
||||
"POST" -> HTTP_Method.Post
|
||||
"PUT" -> HTTP_Method.Put
|
||||
"PATCH" -> HTTP_Method.Patch
|
||||
"DELETE" -> HTTP_Method.Delete
|
||||
"TRACE" -> HTTP_Method.Trace
|
||||
"CONNECT" -> HTTP_Method.Connect
|
||||
_ -> HTTP_Method.Custom that
|
||||
|
@ -184,6 +184,7 @@ type Header
|
||||
to_display_text : Text
|
||||
to_display_text self = self.name + ": " + self.value.to_display_text
|
||||
|
||||
## Gets the name for content_type
|
||||
content_type_header_name : Text
|
||||
content_type_header_name = "Content-Type"
|
||||
|
||||
|
@ -32,7 +32,7 @@ type Request
|
||||
import Standard.Base.Network.URI.URI
|
||||
|
||||
example_new = Request.new Method.Post (URI.parse "http://example.com")
|
||||
new : HTTP_Method -> (Text | URI) -> Vector -> Request_Body -> Request
|
||||
new : HTTP_Method -> (Text | URI) -> Vector Header -> Request_Body -> Request
|
||||
new (method:HTTP_Method) (url:(Text | URI)) (headers:(Vector Header)=[]) (body:Request_Body=Request_Body.Empty) =
|
||||
Panic.recover Any (Request.Value method (Panic.rethrow (url.to_uri)) headers body)
|
||||
|
||||
|
@ -53,6 +53,11 @@ type Request_Body
|
||||
Request_Body.Form_Data _ url_encoded -> if url_encoded then Header.application_x_www_form_urlencoded else Nothing
|
||||
Request_Body.Empty -> Nothing
|
||||
|
||||
## PRIVATE
|
||||
Request_Body.from (that:Text) = Request_Body.Text that
|
||||
|
||||
## PRIVATE
|
||||
Request_Body.from (that:File) = Request_Body.Binary that
|
||||
|
||||
## PRIVATE
|
||||
Request_Body.from (that:Any) = Request_Body.Json that
|
||||
|
@ -1,10 +1,9 @@
|
||||
import project.Data.Locale.Locale
|
||||
import project.Data.Time.Date.Date
|
||||
import project.Data.Time.Date_Time.Date_Time
|
||||
import project.Data.Time.Time_Of_Day.Time_Of_Day
|
||||
import project.Data.Time.Date_Time_Formatter.Date_Time_Formatter
|
||||
import project.Data.Time.Time_Of_Day.Time_Of_Day
|
||||
import project.Meta
|
||||
|
||||
import project.Metadata.Widget
|
||||
from project.Metadata import make_single_choice
|
||||
|
||||
|
@ -1522,8 +1522,8 @@ type Column
|
||||
## GROUP Standard.Base.Conversions
|
||||
Formatting values is not supported in database columns.
|
||||
@locale Locale.default_widget
|
||||
format : Text | Date_Time_Formatter | Column -> Locale -> Column ! Illegal_Argument
|
||||
format self (format : Text | Date_Time_Formatter | Column | Nothing)=Nothing locale=Locale.default =
|
||||
format : Text | Date_Time_Formatter | Column | Nothing -> Locale -> Column ! Illegal_Argument
|
||||
format self format:(Text | Date_Time_Formatter | Column | Nothing)=Nothing locale=Locale.default =
|
||||
_ = [format, locale]
|
||||
Error.throw <| Unsupported_Database_Operation.Error "`Column.format` is not implemented yet for the Database backends."
|
||||
|
||||
@ -1754,3 +1754,8 @@ adapt_unified_column column expected_type =
|
||||
A shorthand to be able to share the implementations between in-memory and
|
||||
database.
|
||||
simple_unary_op column op_kind = column.make_unary_op op_kind
|
||||
|
||||
## PRIVATE
|
||||
Materialized_Column.from (that:Column) =
|
||||
_ = [that]
|
||||
Error.throw (Illegal_Argument.Error "Currently cross-backend operations are not supported. Materialize the column using `.read` before mixing it with an in-memory Table.")
|
||||
|
@ -2408,3 +2408,8 @@ default_join_condition : Table -> Join_Kind | Join_Kind_Cross -> Join_Condition
|
||||
default_join_condition table join_kind = case join_kind of
|
||||
Join_Kind_Cross.Cross -> []
|
||||
_ -> [Join_Condition.Equals table.column_names.first]
|
||||
|
||||
## PRIVATE
|
||||
Materialized_Table.from (that:Table) =
|
||||
_ = [that]
|
||||
Error.throw (Illegal_Argument.Error "Currently cross-backend operations are not supported. Materialize the table using `.read` before mixing it with an in-memory Table.")
|
||||
|
@ -0,0 +1,21 @@
|
||||
from Standard.Base import all
|
||||
import Standard.Base.Errors.Common.Type_Error
|
||||
|
||||
import Standard.Table.Internal.Widget_Helpers
|
||||
|
||||
import project.Data.Table.Table
|
||||
import project.Errors.Unsupported_Database_Operation
|
||||
|
||||
## GROUP Standard.Base.Conversions
|
||||
Expand a column of objects to a new set of columns.
|
||||
|
||||
Arguments:
|
||||
- column: The column to expand.
|
||||
- fields: The set fields to expand. If `Nothing` then all fields are added.
|
||||
- prefix: Prefix to add to the column names. If `Nothing` then the column
|
||||
name is used.
|
||||
@column Widget_Helpers.make_column_name_selector
|
||||
Table.expand_column : Text | Integer -> Vector | Nothing -> Text | Table -> Table ! Type_Error
|
||||
Table.expand_column self column fields=Nothing prefix=Nothing =
|
||||
_ = [column, fields, prefix]
|
||||
Error.throw (Unsupported_Database_Operation.Error "Table.expand_column is currently not implemented for the Database backend. You may download the table to memory using `.read` to use this feature.")
|
@ -129,7 +129,7 @@ Table.select_into_database_table self connection (table_name : Text) primary_key
|
||||
More expensive checks, like clashing keys or unmatched rows are checked
|
||||
only on a sample of rows, so errors may still occur when the output action
|
||||
is enabled.
|
||||
Table.update_rows : Table -> Update_Action -> Vector Text | Nothing -> Boolean -> Problem_Behavior -> Table ! Table_Not_Found | Unmatched_Columns | Missing_Input_Columns | Column_Type_Mismatch | SQL_Error | Illegal_Argument
|
||||
Table.update_rows : Table | In_Memory_Table -> Update_Action -> Vector Text | Nothing -> Boolean -> Problem_Behavior -> Table ! Table_Not_Found | Unmatched_Columns | Missing_Input_Columns | Column_Type_Mismatch | SQL_Error | Illegal_Argument
|
||||
Table.update_rows self (source_table : Table | In_Memory_Table) (update_action : Update_Action = Update_Action.Update_Or_Insert) (key_columns : Vector | Nothing = default_key_columns self) (error_on_missing_columns : Boolean = False) (on_problems : Problem_Behavior = Problem_Behavior.Report_Warning) =
|
||||
common_update_table source_table self update_action key_columns error_on_missing_columns on_problems
|
||||
|
||||
@ -188,6 +188,6 @@ Table.update_rows self (source_table : Table | In_Memory_Table) (update_action :
|
||||
columns, and returns the count of rows that would have been deleted by this
|
||||
operation, with a `Dry_Run_Operation` warning attached.
|
||||
@key_columns Widget_Helpers.make_column_name_vector_selector
|
||||
Table.delete_rows : Table -> Vector Text | Nothing -> Boolean -> Integer ! Missing_Input_Columns | SQL_Error
|
||||
Table.delete_rows self (key_values_to_delete : Table | In_Memory_Table) (key_columns : Vector Text = default_key_columns_required self) (allow_duplicate_matches : Boolean = False) =
|
||||
Table.delete_rows : Table | In_Memory_Table -> Vector Text | Nothing -> Boolean -> Integer ! Missing_Input_Columns | SQL_Error
|
||||
Table.delete_rows self (key_values_to_delete : Table | In_Memory_Table) (key_columns : Vector Text | Nothing = default_key_columns_required self) (allow_duplicate_matches : Boolean = False) =
|
||||
common_delete_rows self key_values_to_delete key_columns allow_duplicate_matches
|
||||
|
@ -120,8 +120,8 @@ Table.select_into_database_table self connection (table_name : Text) primary_key
|
||||
More expensive checks, like clashing keys or unmatched rows are checked
|
||||
only on a sample of rows, so errors may still occur when the output action
|
||||
is enabled.
|
||||
Table.update_rows : Table | Database_Table -> Update_Action -> Vector Text | Nothing -> Boolean -> Problem_Behavior -> Database_Table ! Table_Not_Found | Unmatched_Columns | Missing_Input_Columns | Column_Type_Mismatch | SQL_Error | Illegal_Argument
|
||||
Table.update_rows self (source_table : Table | Database_Table) (update_action : Update_Action = Update_Action.Update_Or_Insert) (key_columns : Vector | Nothing = Nothing) (error_on_missing_columns : Boolean = False) (on_problems : Problem_Behavior = Problem_Behavior.Report_Warning) =
|
||||
Table.update_rows : Database_Table | Table -> Update_Action -> Vector Text | Nothing -> Boolean -> Problem_Behavior -> Database_Table ! Table_Not_Found | Unmatched_Columns | Missing_Input_Columns | Column_Type_Mismatch | SQL_Error | Illegal_Argument
|
||||
Table.update_rows self (source_table : Database_Table | Table) (update_action : Update_Action = Update_Action.Update_Or_Insert) (key_columns : Vector | Nothing = Nothing) (error_on_missing_columns : Boolean = False) (on_problems : Problem_Behavior = Problem_Behavior.Report_Warning) =
|
||||
_ = [source_table, update_action, key_columns, error_on_missing_columns, on_problems]
|
||||
Error.throw (Illegal_Argument.Error "Table.update_rows modifies the underlying table, so it is only supported for Database tables - in-memory tables are immutable.")
|
||||
|
||||
@ -180,7 +180,7 @@ Table.update_rows self (source_table : Table | Database_Table) (update_action :
|
||||
columns, and returns the count of rows that would have been deleted by this
|
||||
operation, with a `Dry_Run_Operation` warning attached.
|
||||
@key_columns Widget_Helpers.make_column_name_vector_selector
|
||||
Table.delete_rows : Table -> Vector Text | Nothing -> Boolean -> Integer ! Missing_Input_Columns | SQL_Error
|
||||
Table.delete_rows : Table | Database_Table -> Vector Text | Nothing -> Boolean -> Integer ! Missing_Input_Columns | SQL_Error
|
||||
Table.delete_rows self (key_values_to_delete : Table | Database_Table) (key_columns=[]) (allow_duplicate_matches : Boolean = False) =
|
||||
_ = [key_values_to_delete, key_columns, allow_duplicate_matches]
|
||||
Error.throw (Illegal_Argument.Error "Table.delete_rows modifies the underlying table, so it is only supported for Database tables - in-memory tables are immutable. If you want to create a copy of this table with some rows removed based on a key from another table, you can use a Left Exclusive join, e.g. `table.join key_values_to_delete join_kind=Join_Kind.Left_Exclusive on=key_columns`.")
|
||||
|
@ -297,7 +297,7 @@ make_batched_insert_template connection table_name column_names =
|
||||
template
|
||||
|
||||
## PRIVATE
|
||||
common_update_table (source_table : In_Memory_Table | Database_Table) (target_table : Database_Table) update_action key_columns error_on_missing_columns on_problems =
|
||||
common_update_table (source_table : Database_Table | In_Memory_Table) (target_table : Database_Table) update_action key_columns error_on_missing_columns on_problems =
|
||||
check_target_table_for_update target_table <|
|
||||
connection = target_table.connection
|
||||
Panic.recover SQL_Error <| handle_upload_errors <|
|
||||
|
@ -10,6 +10,7 @@ import project.Connection.SSL_Mode.SSL_Mode
|
||||
import project.Data.Column_Description.Column_Description
|
||||
import project.Data.SQL_Query.SQL_Query
|
||||
import project.Data.Update_Action.Update_Action
|
||||
import project.Extensions.Table_Conversions
|
||||
import project.Extensions.Upload_Database_Table
|
||||
import project.Extensions.Upload_In_Memory_Table
|
||||
from project.Connection.Postgres_Details.Postgres_Details import Postgres
|
||||
@ -27,6 +28,7 @@ export project.Connection.SSL_Mode.SSL_Mode
|
||||
export project.Data.Column_Description.Column_Description
|
||||
export project.Data.SQL_Query.SQL_Query
|
||||
export project.Data.Update_Action.Update_Action
|
||||
export project.Extensions.Table_Conversions
|
||||
export project.Extensions.Upload_Database_Table
|
||||
export project.Extensions.Upload_In_Memory_Table
|
||||
from project.Connection.Postgres_Details.Postgres_Details export Postgres
|
||||
|
@ -1,6 +1,5 @@
|
||||
from Standard.Base import all
|
||||
|
||||
import project.Data.Column.Column
|
||||
import project.Data.Sort_Column.Sort_Column
|
||||
|
||||
## Defines an Aggregate Column
|
||||
@ -11,7 +10,7 @@ type Aggregate_Column
|
||||
- column: the column (specified by name, expression or index) to group
|
||||
by.
|
||||
- new_name: name of new column.
|
||||
Group_By (column:Text|Integer|Column|Any) (new_name:Text="") # Column needed because of 6866
|
||||
Group_By (column:Text|Integer|Any) (new_name:Text="") # Any needed because of 6866
|
||||
|
||||
## Creates a new column with the row count of each group. If no rows,
|
||||
evaluates to 0.
|
||||
@ -31,7 +30,7 @@ type Aggregate_Column
|
||||
multiple selection.
|
||||
- new_name: name of new column.
|
||||
- ignore_nothing: if all values are Nothing won't be included.
|
||||
Count_Distinct (columns:(Text | Integer | Regex | Vector (Integer | Text | Regex | Column))=0) (new_name:Text="") (ignore_nothing:Boolean=False) # Column needed because of 6866
|
||||
Count_Distinct (columns:(Text | Integer | Regex | Vector (Integer | Text | Regex | Any))=0) (new_name:Text="") (ignore_nothing:Boolean=False) # Any needed because of 6866
|
||||
|
||||
## ALIAS Count_Not_Null
|
||||
|
||||
@ -41,7 +40,7 @@ type Aggregate_Column
|
||||
Arguments:
|
||||
- column: the column (specified by name, expression or index) to count.
|
||||
- new_name: name of new column.
|
||||
Count_Not_Nothing (column:Text|Integer|Column|Any=0) (new_name:Text="") # Column needed because of 6866
|
||||
Count_Not_Nothing (column:Text|Integer|Any=0) (new_name:Text="") # Any needed because of 6866
|
||||
|
||||
## ALIAS Count_Null, Count_Missing
|
||||
|
||||
@ -51,7 +50,7 @@ type Aggregate_Column
|
||||
Arguments:
|
||||
- column: the column (specified by name, expression or index) to count.
|
||||
- new_name: name of new column.
|
||||
Count_Nothing (column:Text|Integer|Column|Any=0) (new_name:Text="") # Column needed because of 6866
|
||||
Count_Nothing (column:Text|Integer|Any=0) (new_name:Text="") # Any needed because of 6866
|
||||
|
||||
## Creates a new column with the count of not `Nothing` (null) and non-empty
|
||||
("") values of the column within each group. If no rows, evaluates to 0.
|
||||
@ -59,7 +58,7 @@ type Aggregate_Column
|
||||
Arguments:
|
||||
- column: the column (specified by name, expression or index) to count.
|
||||
- new_name: name of new column.
|
||||
Count_Not_Empty (column:Text|Integer|Column|Any=0) (new_name:Text="") # Column needed because of 6866
|
||||
Count_Not_Empty (column:Text|Integer|Any=0) (new_name:Text="") # Any needed because of 6866
|
||||
|
||||
## Creates a new column with the count of `Nothing` (null) or empty ("")
|
||||
text values of the column within each group. If no rows, evaluates to 0.
|
||||
@ -67,7 +66,7 @@ type Aggregate_Column
|
||||
Arguments:
|
||||
- column: the column (specified by name, expression or index) to count.
|
||||
- new_name: name of new column.
|
||||
Count_Empty (column:Text|Integer|Column|Any=0) (new_name:Text="") # Column needed because of 6866
|
||||
Count_Empty (column:Text|Integer|Any=0) (new_name:Text="") # Any needed because of 6866
|
||||
|
||||
## Creates a new column with the sum of values (ignoring missing values) of
|
||||
the column within each group. If no rows, evaluates to `Nothing`.
|
||||
@ -75,7 +74,7 @@ type Aggregate_Column
|
||||
Arguments:
|
||||
- column: the column (specified by name, expression or index) to total.
|
||||
- new_name: name of new column.
|
||||
Sum (column:Text|Integer|Column|Any=0) (new_name:Text="") # Column needed because of 6866
|
||||
Sum (column:Text|Integer|Any=0) (new_name:Text="") # Any needed because of 6866
|
||||
|
||||
## Creates a new column with the mean of values (ignoring missing values) of
|
||||
the column within each group. If no rows, evaluates to `Nothing`.
|
||||
@ -83,7 +82,7 @@ type Aggregate_Column
|
||||
Arguments:
|
||||
- column: the column (specified by name, expression or index) to average.
|
||||
- new_name: name of new column.
|
||||
Average (column:Text|Integer|Column|Any=0) (new_name:Text="") # Column needed because of 6866
|
||||
Average (column:Text|Integer|Any=0) (new_name:Text="") # Any needed because of 6866
|
||||
|
||||
## Creates a new column with the median of values (ignoring missing values)
|
||||
of the column within each group. If no rows, evaluates to `Nothing`.
|
||||
@ -92,7 +91,7 @@ type Aggregate_Column
|
||||
- column: column (specified by name, expression or index) to calculate
|
||||
median on.
|
||||
- new_name: name of new column.
|
||||
Median (column:Text|Integer|Column|Any=0) (new_name:Text="") # Column needed because of 6866
|
||||
Median (column:Text|Integer|Any=0) (new_name:Text="") # Any needed because of 6866
|
||||
|
||||
## Creates a new column with the median of values (ignoring missing values)
|
||||
of the column within each group. If no rows, evaluates to `Nothing`.
|
||||
@ -102,7 +101,7 @@ type Aggregate_Column
|
||||
- column: column (specified by name, expression or index) to compute
|
||||
percentile.
|
||||
- new_name: name of new column.
|
||||
Percentile (percentile:Number=0.5) (column:Text|Integer|Column|Any=0) (new_name:Text="") # Column needed because of 6866
|
||||
Percentile (percentile:Number=0.5) (column:Text|Integer|Any=0) (new_name:Text="") # Any needed because of 6866
|
||||
|
||||
## Creates a new column with the mode of values (ignoring missing values)
|
||||
of the column within each group. If no rows, evaluates to `Nothing`.
|
||||
@ -111,7 +110,7 @@ type Aggregate_Column
|
||||
- column: column (specified by name, expression or index) to find the
|
||||
most common value.
|
||||
- new_name: name of new column.
|
||||
Mode (column:Text|Integer|Column|Any=0) (new_name:Text="") # Column needed because of 6866
|
||||
Mode (column:Text|Integer|Any=0) (new_name:Text="") # Any needed because of 6866
|
||||
|
||||
## Creates a new column with the standard deviation of values (ignoring
|
||||
missing values) of the column within each group. If no rows, evaluates to
|
||||
@ -122,7 +121,7 @@ type Aggregate_Column
|
||||
standard deviation.
|
||||
- new_name: name of new column.
|
||||
- population: specifies if group is a sample or the population
|
||||
Standard_Deviation (column:Text|Integer|Column|Any=0) (new_name:Text="") (population:Boolean=False) # Column needed because of 6866
|
||||
Standard_Deviation (column:Text|Integer|Any=0) (new_name:Text="") (population:Boolean=False) # Any needed because of 6866
|
||||
|
||||
## Creates a new column with the values concatenated together. `Nothing`
|
||||
values will become an empty string. If no rows, evaluates to `Nothing`.
|
||||
@ -135,7 +134,7 @@ type Aggregate_Column
|
||||
- suffix: added at the end of the result.
|
||||
- quote_char: character used to quote the values if the value is `Empty`
|
||||
or contains the separator.
|
||||
Concatenate (column:Text|Integer|Column|Any=0) (new_name:Text="") (separator:Text="") (prefix:Text="") (suffix:Text="") (quote_char:Text="") # Column needed because of 6866
|
||||
Concatenate (column:Text|Integer|Any=0) (new_name:Text="") (separator:Text="") (prefix:Text="") (suffix:Text="") (quote_char:Text="") # Any needed because of 6866
|
||||
|
||||
## Creates a new column with the first value in each group. If no rows,
|
||||
evaluates to `Nothing`.
|
||||
@ -148,7 +147,7 @@ type Aggregate_Column
|
||||
not missing value returned.
|
||||
- order_by: required for database tables. Specifies how to order the
|
||||
results within the group.
|
||||
First (column:Text|Integer|Column|Any=0) (new_name:Text="") (ignore_nothing:Boolean=True) (order_by:(Text | Vector (Text | Sort_Column) | Nothing)=Nothing) # Column needed because of 6866
|
||||
First (column:Text|Integer|Any=0) (new_name:Text="") (ignore_nothing:Boolean=True) (order_by:(Text | Vector (Text | Sort_Column) | Nothing)=Nothing) # Any needed because of 6866
|
||||
|
||||
## Creates a new column with the last value in each group. If no rows,
|
||||
evaluates to `Nothing`.
|
||||
@ -161,7 +160,7 @@ type Aggregate_Column
|
||||
not missing value returned.
|
||||
- order_by: required for database tables. Specifies how to order the
|
||||
results within the group.
|
||||
Last (column:Text|Integer|Column|Any=0) (new_name:Text="") (ignore_nothing:Boolean=True) (order_by:(Text | Vector (Text | Sort_Column) | Nothing)=Nothing) # Column needed because of 6866
|
||||
Last (column:Text|Integer|Any=0) (new_name:Text="") (ignore_nothing:Boolean=True) (order_by:(Text | Vector (Text | Sort_Column) | Nothing)=Nothing) # Any needed because of 6866
|
||||
|
||||
## Creates a new column with the maximum value in each group. If no rows,
|
||||
evaluates to `Nothing`.
|
||||
@ -170,7 +169,7 @@ type Aggregate_Column
|
||||
- column: column (specified by name, expression or index) to find the
|
||||
group maximum.
|
||||
- new_name: name of new column.
|
||||
Maximum (column:Text|Integer|Column|Any=0) (new_name:Text="") # Column needed because of 6866
|
||||
Maximum (column:Text|Integer|Any=0) (new_name:Text="") # Any needed because of 6866
|
||||
|
||||
## Creates a new column with the maximum value in each group. If no rows,
|
||||
evaluates to `Nothing`.
|
||||
@ -179,7 +178,7 @@ type Aggregate_Column
|
||||
- column: column (specified by name, expression or index) to find the
|
||||
group minimum.
|
||||
- new_name: name of new column.
|
||||
Minimum (column:Text|Integer|Column|Any=0) (new_name:Text="") # Column needed because of 6866
|
||||
Minimum (column:Text|Integer|Any=0) (new_name:Text="") # Any needed because of 6866
|
||||
|
||||
## Creates a new column with the shortest text in each group. If no rows,
|
||||
evaluates to `Nothing`.
|
||||
@ -188,7 +187,7 @@ type Aggregate_Column
|
||||
- column: column (specified by name, expression or index) to find the
|
||||
group shortest value.
|
||||
- new_name: name of new column.
|
||||
Shortest (column:Text|Integer|Column|Any=0) (new_name:Text="") # Column needed because of 6866
|
||||
Shortest (column:Text|Integer|Any=0) (new_name:Text="") # Any needed because of 6866
|
||||
|
||||
## Creates a new column with the longest text in each group. If no rows,
|
||||
evaluates to `Nothing`.
|
||||
@ -197,4 +196,4 @@ type Aggregate_Column
|
||||
- column: column (specified by name, expression or index) to find the
|
||||
group longest value.
|
||||
- new_name: name of new column.
|
||||
Longest (column:Text|Integer|Column|Any=0) (new_name:Text="") # Column needed because of 6866
|
||||
Longest (column:Text|Integer|Any=0) (new_name:Text="") # Any needed because of 6866
|
||||
|
@ -59,7 +59,7 @@ type Column
|
||||
|
||||
example_from_vector =
|
||||
Column.from_vector "My Column" [1, 2, 3, 4, 5]
|
||||
from_vector : Text -> Vector -> Value_Type | Auto -> Column ! Invalid_Value_Type
|
||||
from_vector : Text -> Vector -> Auto | Value_Type -> Column ! Invalid_Value_Type
|
||||
from_vector (name : Text) (items : Vector) (value_type : Auto | Value_Type = Auto) =
|
||||
## If the type does not accept date-time-like values, we can skip the
|
||||
additional logic for polyglot conversions that would normally be used,
|
||||
@ -1692,8 +1692,8 @@ type Column
|
||||
input.format "#,##0.00" locale=(Locale.new "fr")
|
||||
# ==> ["100 000 000,00", "2 222,00", "3,00"]
|
||||
@locale Locale.default_widget
|
||||
format : Text | Date_Time_Formatter | Column -> Locale -> Column ! Illegal_Argument
|
||||
format self (format : Text | Date_Time_Formatter | Column | Nothing)=Nothing locale=Locale.default =
|
||||
format : Text | Date_Time_Formatter | Column | Nothing -> Locale -> Column ! Illegal_Argument
|
||||
format self format:(Text | Date_Time_Formatter | Column | Nothing)=Nothing locale=Locale.default =
|
||||
new_column = case format of
|
||||
format_column : Column -> Value_Type.expect_text format_column <|
|
||||
formatter = make_value_formatter_for_value_type self.value_type locale
|
||||
@ -2527,3 +2527,15 @@ cast_if_needed column value_type = if column.value_type == value_type then colum
|
||||
## PRIVATE
|
||||
naming_helper : Column_Naming_Helper
|
||||
naming_helper = Column_Naming_Helper.in_memory
|
||||
|
||||
## PRIVATE
|
||||
Conversion method to a Column from a Vector.
|
||||
Column.from (that:Vector) (name:Text="Vector") = Column.from_vector name that
|
||||
|
||||
## PRIVATE
|
||||
Conversion method to a Column from a Vector.
|
||||
Column.from (that:Range) (name:Text="Range") = Column.from_vector name that.to_vector
|
||||
|
||||
## PRIVATE
|
||||
Conversion method to a Column from a Vector.
|
||||
Column.from (that:Date_Range) (name:Text="Date Range") = Column.from_vector name that.to_vector
|
||||
|
@ -1,9 +1,8 @@
|
||||
from Standard.Base import all
|
||||
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
|
||||
|
||||
import Standard.Base.Metadata.Display
|
||||
from Standard.Base.Metadata.Widget import Vector_Editor
|
||||
from Standard.Base.Widget_Helpers import make_date_format_selector, make_time_format_selector, make_date_time_format_selector
|
||||
from Standard.Base.Widget_Helpers import make_date_format_selector, make_date_time_format_selector, make_time_format_selector
|
||||
|
||||
import project.Data.Type.Storage
|
||||
import project.Internal.Java_Problems
|
||||
@ -115,8 +114,8 @@ type Data_Formatter
|
||||
@datetime_formats (make_vector_widget make_date_time_format_selector)
|
||||
@date_formats (make_vector_widget make_date_format_selector)
|
||||
@time_formats (make_vector_widget make_time_format_selector)
|
||||
with_datetime_formats : ((Vector Date_Time_Formatter) | Date_Time_Formatter) -> ((Vector Date_Time_Formatter) | Date_Time_Formatter) -> ((Vector Date_Time_Formatter) | Date_Time_Formatter) -> Data_Formatter
|
||||
with_datetime_formats self (datetime_formats:Vector|Date_Time_Formatter = self.datetime_formats) (date_formats:Vector|Date_Time_Formatter = self.date_formats) (time_formats:Vector|Date_Time_Formatter = self.time_formats) =
|
||||
with_datetime_formats : (Vector Date_Time_Formatter | Date_Time_Formatter) -> (Vector Date_Time_Formatter | Date_Time_Formatter) -> (Vector Date_Time_Formatter | Date_Time_Formatter) -> Data_Formatter
|
||||
with_datetime_formats self datetime_formats:(Vector | Date_Time_Formatter)=self.datetime_formats date_formats:(Vector | Date_Time_Formatter)=self.date_formats time_formats:(Vector | Date_Time_Formatter)=self.time_formats =
|
||||
convert_formats formats =
|
||||
vector = case formats of
|
||||
v : Vector -> v
|
||||
@ -173,7 +172,7 @@ type Data_Formatter
|
||||
|
||||
## PRIVATE
|
||||
Clone the instance with some properties overridden.
|
||||
clone : Boolean -> Boolean -> Text -> Text -> Boolean -> Vector Text -> Vector Text -> Vector Text -> Locale -> Vector Text -> Vector Text -> Data_Formatter
|
||||
clone : Boolean -> Boolean -> Text -> Text -> Boolean -> Vector Text -> Vector Text -> Vector Text -> Vector Text -> Vector Text -> Data_Formatter
|
||||
clone self (trim_values=self.trim_values) (allow_leading_zeros=self.allow_leading_zeros) (decimal_point=self.decimal_point) (thousand_separator=self.thousand_separator) (allow_exponential_notation=self.allow_exponential_notation) (datetime_formats=self.datetime_formats) (date_formats=self.date_formats) (time_formats=self.time_formats) (true_values=self.true_values) (false_values=self.false_values) =
|
||||
Data_Formatter.Value trim_values=trim_values allow_leading_zeros=allow_leading_zeros decimal_point=decimal_point thousand_separator=thousand_separator allow_exponential_notation=allow_exponential_notation datetime_formats=datetime_formats date_formats=date_formats time_formats=time_formats true_values=true_values false_values=false_values
|
||||
|
||||
|
@ -33,6 +33,8 @@ import project.Delimited.Delimited_Format.Delimited_Format
|
||||
import project.Internal.Add_Row_Number
|
||||
import project.Internal.Aggregate_Column_Helper
|
||||
import project.Internal.Column_Naming_Helper.Column_Naming_Helper
|
||||
import project.Internal.Delimited_Reader
|
||||
import project.Internal.Delimited_Writer
|
||||
import project.Internal.Java_Problems
|
||||
import project.Internal.Join_Helpers
|
||||
import project.Internal.Parse_Values_Helper
|
||||
@ -40,7 +42,6 @@ import project.Internal.Problem_Builder.Problem_Builder
|
||||
import project.Internal.Split_Tokenize
|
||||
import project.Internal.Table_Helpers
|
||||
import project.Internal.Table_Helpers.Table_Column_Helper
|
||||
import project.Internal.Unique_Name_Strategy.Unique_Name_Strategy
|
||||
import project.Internal.Widget_Helpers
|
||||
from project.Data.Column import get_item_string, normalize_string_for_display
|
||||
from project.Data.Type.Value_Type import Auto, Value_Type
|
||||
@ -1407,15 +1408,11 @@ type Table
|
||||
table.set double_inventory new_name="total_stock"
|
||||
table.set "2 * [total_stock]" new_name="total_stock_expr"
|
||||
@new_name Widget_Helpers.make_column_name_selector
|
||||
set : Column | Text | Array | Vector | Range | Date_Range -> Text | Nothing -> Set_Mode -> Problem_Behavior -> Table ! Existing_Column | Missing_Column | No_Such_Column | Expression_Error
|
||||
set self column new_name=Nothing set_mode=Set_Mode.Add_Or_Update on_problems=Report_Warning =
|
||||
set : Text | Column -> Text | Nothing -> Set_Mode -> Problem_Behavior -> Table ! Existing_Column | Missing_Column | No_Such_Column | Expression_Error
|
||||
set self column:(Text | Column) new_name=Nothing set_mode=Set_Mode.Add_Or_Update on_problems=Report_Warning =
|
||||
resolved = case column of
|
||||
_ : Text -> self.evaluate_expression column on_problems
|
||||
_ : Column -> column
|
||||
_ : Vector -> Column.from_vector (new_name.if_nothing "Vector") column
|
||||
_ : Array -> Column.from_vector (new_name.if_nothing "Vector") column.to_vector
|
||||
_ : Range -> Column.from_vector (new_name.if_nothing "Range") column.to_vector
|
||||
_ : Date_Range -> Column.from_vector (new_name.if_nothing "Date Range") column.to_vector
|
||||
_ -> Error.throw (Illegal_Argument.Error "Unsupported type for `Table.set`.")
|
||||
|
||||
renamed = case new_name of
|
||||
@ -1603,7 +1600,7 @@ type Table
|
||||
table.join other on=[Join_Condition.Equals "A" "A", Join_Condition.Equals "B" "B"]
|
||||
@on Widget_Helpers.make_join_condition_selector
|
||||
join : Table -> Join_Kind -> Vector (Join_Condition | Text) | Text -> Text -> Problem_Behavior -> Table
|
||||
join self right (join_kind : Join_Kind = Join_Kind.Left_Outer) on=[Join_Condition.Equals self.column_names.first] right_prefix="Right " on_problems=Report_Warning = Out_Of_Memory.handle_java_exception "join" <|
|
||||
join self right:Table (join_kind : Join_Kind = Join_Kind.Left_Outer) on=[Join_Condition.Equals self.column_names.first] right_prefix="Right " on_problems=Report_Warning = Out_Of_Memory.handle_java_exception "join" <|
|
||||
if check_table "right" right then
|
||||
# [left_unmatched, matched, right_unmatched]
|
||||
rows_to_keep = case join_kind of
|
||||
@ -1665,7 +1662,7 @@ type Table
|
||||
example, by sorting the table; in-memory tables will keep the memory
|
||||
layout order while for database tables the order may be unspecified).
|
||||
cross_join : Table -> Integer | Nothing -> Text -> Problem_Behavior -> Table
|
||||
cross_join self right right_row_limit=100 right_prefix="Right " on_problems=Report_Warning = Out_Of_Memory.handle_java_exception "cross_join" <|
|
||||
cross_join self right:Table right_row_limit=100 right_prefix="Right " on_problems=Report_Warning = Out_Of_Memory.handle_java_exception "cross_join" <|
|
||||
if check_table "right" right then
|
||||
limit_problems = case right_row_limit.is_nothing.not && (right.row_count > right_row_limit) of
|
||||
True ->
|
||||
@ -1727,7 +1724,7 @@ type Table
|
||||
The ordering of rows in the resulting table is not specified.
|
||||
@keep_unmatched (make_single_choice [["True", "Boolean.True"], ["False", "Boolean.False"], ["Report", Meta.get_qualified_type_name Report_Unmatched]])
|
||||
zip : Table -> Boolean | Report_Unmatched -> Text -> Problem_Behavior -> Table
|
||||
zip self right keep_unmatched=Report_Unmatched right_prefix="Right " on_problems=Report_Warning =
|
||||
zip self right:Table keep_unmatched=Report_Unmatched right_prefix="Right " on_problems=Report_Warning =
|
||||
if check_table "right" right then
|
||||
keep_unmatched_bool = case keep_unmatched of
|
||||
Report_Unmatched -> True
|
||||
@ -1837,6 +1834,7 @@ type Table
|
||||
union self tables match_columns=Match_Columns.By_Name keep_unmatched_columns=Report_Unmatched allow_type_widening=True on_problems=Report_Warning =
|
||||
all_tables = case tables of
|
||||
v : Vector -> [self] + v
|
||||
single_column : Column -> [self, single_column.to_table]
|
||||
single_table -> [self, single_table]
|
||||
all_tables.all (check_table "tables") . if_not_error <|
|
||||
problem_builder = Problem_Builder.new
|
||||
@ -2393,3 +2391,36 @@ concat_columns column_set all_tables result_type result_row_count on_problems =
|
||||
storage = parent_table.at i . java_column . getStorage
|
||||
storage_builder.appendBulkStorage storage
|
||||
Column.from_storage column_set.name storage_builder.seal
|
||||
|
||||
## PRIVATE
|
||||
Conversion method to a Table from a Column.
|
||||
Table.from (that:Column) = that.to_table
|
||||
|
||||
## Converts a Text value into a Table.
|
||||
|
||||
The format of the text is determined by the `format` argument.
|
||||
|
||||
Arguments:
|
||||
- that: The text to convert.
|
||||
- format: The format of the text.
|
||||
- on_problems: What to do if there are problems reading the text.
|
||||
Table.from (that : Text) (format:Delimited_Format = Delimited_Format.Delimited '\t') (on_problems:Problem_Behavior=Report_Warning) =
|
||||
case format of
|
||||
_ : Delimited_Format -> Delimited_Reader.read_text that format on_problems
|
||||
_ -> Unimplemented.throw "Table.from is currently only implemented for Delimited_Format."
|
||||
|
||||
## Converts a Table into a Text value.
|
||||
|
||||
The format of the text is determined by the `format` argument.
|
||||
|
||||
Arguments:
|
||||
- that: The table to convert.
|
||||
- format: The format of the text.
|
||||
Text.from (that : Table) (format:Delimited_Format = Delimited_Format.Delimited '\t') =
|
||||
case format of
|
||||
_ : Delimited_Format -> Delimited_Writer.write_text that format
|
||||
_ -> Unimplemented.throw "Text.from is currently only implemented for Delimited_Format."
|
||||
|
||||
## PRIVATE
|
||||
Conversion method to a Table from a Vector.
|
||||
Table.from (that:Vector) (fields : (Vector | Nothing) = Nothing) = that.to_table fields
|
||||
|
@ -1,41 +1,56 @@
|
||||
from Standard.Base import all
|
||||
import Standard.Base.Data.Text.Regex.Regex_Syntax_Error
|
||||
import Standard.Base.Errors.Common.Index_Out_Of_Bounds
|
||||
import Standard.Base.Errors.Common.Type_Error
|
||||
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
|
||||
import Standard.Base.Errors.Unimplemented.Unimplemented
|
||||
|
||||
import project.Data.Match_Columns.Match_Columns
|
||||
import project.Data.Table.Table
|
||||
import project.Delimited.Delimited_Format.Delimited_Format
|
||||
import project.Errors.Invalid_JSON_Format
|
||||
import project.Internal.Delimited_Reader
|
||||
import project.Internal.Delimited_Writer
|
||||
import project.Errors.No_Such_Column
|
||||
import project.Internal.Parse_To_Table
|
||||
import project.Internal.Widget_Helpers
|
||||
|
||||
## Converts a Text value into a Table.
|
||||
## GROUP Standard.Base.Conversions
|
||||
Expand a column of objects to a new set of columns.
|
||||
|
||||
The format of the text is determined by the `format` argument.
|
||||
Arguments:
|
||||
- column: The column to expand.
|
||||
- fields: The set fields to expand. If `Nothing` then all fields are added.
|
||||
- prefix: Prefix to add to the column names. If `Nothing` then the column
|
||||
name is used.
|
||||
@column Widget_Helpers.make_column_name_selector
|
||||
Table.expand_column : Text | Integer -> Vector | Nothing -> Text | Table -> Table ! Type_Error | No_Such_Column | Index_Out_Of_Bounds
|
||||
Table.expand_column self column fields=Nothing prefix=Nothing =
|
||||
column_object = self.at column
|
||||
expanded = Table.from_objects column_object.to_vector fields
|
||||
|
||||
output_builder = Vector.new_builder self.column_count+expanded.column_count-1
|
||||
|
||||
## Resolve names
|
||||
resolved_prefix = if prefix.is_nothing then column_object.name+" " else prefix
|
||||
naming_strategy = self.column_naming_helper.create_unique_name_strategy
|
||||
naming_strategy.mark_used (self.column_names.filter (c->c!=column_object.name))
|
||||
new_names = naming_strategy.make_all_unique (expanded.column_names.map n-> resolved_prefix+n)
|
||||
new_columns = new_names.zip expanded.columns (n->c-> c.rename n)
|
||||
|
||||
## Create Merged Columns
|
||||
self.columns.each c->
|
||||
if c.name != column_object.name then output_builder.append c else
|
||||
output_builder.append_vector_range new_columns
|
||||
|
||||
Table.new output_builder.to_vector
|
||||
|
||||
## GROUP Standard.Base.Conversions
|
||||
Converts this `Vector` into a `Table`.
|
||||
|
||||
Arguments:
|
||||
- that: The text to convert.
|
||||
- format: The format of the text.
|
||||
- on_problems: What to do if there are problems reading the text.
|
||||
Table.from (that : Text) (format:Delimited_Format = Delimited_Format.Delimited '\t') (on_problems:Problem_Behavior=Report_Warning) =
|
||||
case format of
|
||||
_ : Delimited_Format -> Delimited_Reader.read_text that format on_problems
|
||||
_ -> Unimplemented.throw "Table.from is currently only implemented for Delimited_Format."
|
||||
|
||||
## Converts a Table into a Text value.
|
||||
|
||||
The format of the text is determined by the `format` argument.
|
||||
|
||||
Arguments:
|
||||
- that: The table to convert.
|
||||
- format: The format of the text.
|
||||
Text.from (that : Table) (format:Delimited_Format = Delimited_Format.Delimited '\t') =
|
||||
case format of
|
||||
_ : Delimited_Format -> Delimited_Writer.write_text that format
|
||||
_ -> Unimplemented.throw "Text.from is currently only implemented for Delimited_Format."
|
||||
- fields: a Vector of Text representing the names of fields to look up.
|
||||
If `Nothing` then all fields found are added.
|
||||
Vector.to_table : Vector | Nothing -> Table ! Type_Error
|
||||
Vector.to_table self fields=Nothing =
|
||||
Table.from_objects self fields
|
||||
|
||||
## GROUP Standard.Base.Constants
|
||||
Converts an object or a Vector of object into a Table, by looking up the
|
||||
@ -70,6 +85,9 @@ Table.from_objects value fields=Nothing =
|
||||
_ : Number -> ["Value"]
|
||||
_ : Boolean -> ["Value"]
|
||||
_ : Text -> ["Value"]
|
||||
_ : Date -> ["Value"]
|
||||
_ : Time_Of_Day -> ["Value"]
|
||||
_ : Date_Time -> ["Value"]
|
||||
_ -> Error.throw (Illegal_Argument.Error "Invalid item within Vector for Table.from_objects. Currently only JS_Object, Number, Boolean, Text and Nothing are supported (got "+(Meta.get_simple_type_name v)+").")
|
||||
|
||||
get_value v field = case v of
|
||||
@ -81,6 +99,9 @@ Table.from_objects value fields=Nothing =
|
||||
_ : Number -> table_for_value value
|
||||
_ : Boolean -> table_for_value value
|
||||
_ : Text -> table_for_value value
|
||||
_ : Date -> table_for_value value
|
||||
_ : Time_Of_Day -> table_for_value value
|
||||
_ : Date_Time -> table_for_value value
|
||||
_ : JS_Object ->
|
||||
field_names = fields.if_nothing value.field_names
|
||||
values = field_names.map value.get
|
||||
@ -107,7 +128,7 @@ Table.from_objects value fields=Nothing =
|
||||
|
||||
Table.new (used_fields.zip used_values)
|
||||
_ : Array -> Table.from_objects (Vector.from_polyglot_array value) fields
|
||||
_ -> Error.throw (Illegal_Argument.Error "Invalid value for Table.from_objects. Currently must be one of JS_Object, Vector, Array, Number, Boolean, Text and Nothing are supported (got "+(Meta.get_simple_type_name value)+").")
|
||||
_ -> Error.throw (Illegal_Argument.Error "Invalid value for Table.from_objects. Currently must be one of JS_Object, Vector, Array, Number, Boolean, Text, date/time and Nothing are supported (got "+(Meta.get_simple_type_name value)+").")
|
||||
|
||||
## GROUP Standard.Base.Conversions
|
||||
Converts a Text into a Table using a regular expression pattern.
|
||||
|
@ -8,7 +8,6 @@ import project.Internal.Multi_Value_Key.Ordered_Multi_Value_Key
|
||||
import project.Internal.Multi_Value_Key.Unordered_Multi_Value_Key
|
||||
import project.Internal.Problem_Builder.Problem_Builder
|
||||
import project.Internal.Table_Helpers
|
||||
import project.Internal.Unique_Name_Strategy.Unique_Name_Strategy
|
||||
from project.Errors import Duplicate_Output_Column_Names
|
||||
from project.Internal.Java_Exports import make_long_builder
|
||||
|
||||
|
@ -8,7 +8,6 @@ import project.Data.Type.Value_Type.Value_Type
|
||||
import project.Internal.Column_Naming_Helper.Column_Naming_Helper
|
||||
import project.Internal.Problem_Builder.Problem_Builder
|
||||
import project.Internal.Table_Helpers
|
||||
import project.Internal.Unique_Name_Strategy.Unique_Name_Strategy
|
||||
from project.Data.Aggregate_Column.Aggregate_Column import all
|
||||
from project.Errors import Duplicate_Output_Column_Names, Invalid_Aggregation, Invalid_Column_Names, No_Output_Columns
|
||||
|
||||
|
@ -33,7 +33,7 @@ make_value_formatter_for_value_type value_type locale = case value_type of
|
||||
Create a formatter for the given format string.
|
||||
The `value` parameter has to have a `format` method that takes a format and
|
||||
locale.
|
||||
make_value_formatter : Locale -> (Text -> Any -> Text)
|
||||
make_value_formatter : Locale -> Text | Nothing -> (Text -> Any -> Text)
|
||||
make_value_formatter locale (format : Text | Nothing) =
|
||||
if format.is_nothing || format.is_empty then .to_text else
|
||||
value->
|
||||
@ -42,14 +42,14 @@ make_value_formatter locale (format : Text | Nothing) =
|
||||
## PRIVATE
|
||||
Create a `Boolean` formatter that takes the format string as the second
|
||||
parameter.
|
||||
make_boolean_formatter : (Text -> Boolean -> Text)
|
||||
make_boolean_formatter : Text | Nothing -> (Text -> Boolean -> Text)
|
||||
make_boolean_formatter (format : Text | Nothing) =
|
||||
if format.is_nothing || format.is_empty then .to_text else
|
||||
data_formatter = Data_Formatter.Value.with_format Value_Type.Boolean format
|
||||
bool -> data_formatter.format bool
|
||||
|
||||
## PRIVATE
|
||||
make_datetime_formatter : Locale -> Date_Time_Formatter | Text | Nothing -> (Any -> Text)
|
||||
make_datetime_formatter : Locale -> Text | Date_Time_Formatter | Nothing -> (Any -> Text)
|
||||
make_datetime_formatter (locale_override : Locale) (format : Text | Date_Time_Formatter | Nothing) =
|
||||
use_default = format.is_nothing || (format == "")
|
||||
if use_default then .to_text else
|
||||
|
@ -4,7 +4,6 @@ import project.Data.Column.Column
|
||||
import project.Data.Table.Table
|
||||
import project.Data.Type.Value_Type.Value_Type
|
||||
import project.Internal.Problem_Builder.Problem_Builder
|
||||
import project.Internal.Unique_Name_Strategy.Unique_Name_Strategy
|
||||
|
||||
from project.Errors import Column_Count_Exceeded, Column_Count_Mismatch, Duplicate_Output_Column_Names, Invalid_Value_Type, Missing_Input_Columns
|
||||
from project.Internal.Java_Exports import make_string_builder
|
||||
|
@ -12,7 +12,6 @@ import project.Data.Type.Value_Type.Value_Type
|
||||
import project.Data.Type.Value_Type_Helpers
|
||||
import project.Internal.Column_Naming_Helper.Column_Naming_Helper
|
||||
import project.Internal.Problem_Builder.Problem_Builder
|
||||
import project.Internal.Unique_Name_Strategy.Unique_Name_Strategy
|
||||
from project.Errors import Ambiguous_Column_Rename, Column_Type_Mismatch, Invalid_Aggregate_Column, Missing_Input_Columns, No_Common_Type, No_Input_Columns_Selected, No_Output_Columns, Too_Many_Column_Names_Provided
|
||||
|
||||
polyglot java import java.util.HashSet
|
||||
|
@ -910,7 +910,7 @@ spec =
|
||||
Test.group "[In-Memory-specific] Table.join" <|
|
||||
Test.specify "should correctly report unsupported cross-backend joins" <|
|
||||
t = Table.new [["X", [1, 2, 3]]]
|
||||
t.join 42 . should_fail_with Type_Error
|
||||
Panic.recover Type_Error (t.join 42) . should_fail_with Type_Error
|
||||
|
||||
db_connection = Database.connect (SQLite In_Memory)
|
||||
db_table = (Table.new [["Y", [4, 5, 6]]]).select_into_database_table db_connection "test"
|
||||
|
@ -66,6 +66,42 @@ spec =
|
||||
deep_err.should_fail_parsing_with "closing quote ] expected at position 34"
|
||||
"123 4".should_fail_parsing_with "JSON cannot be fully parsed at position 4"
|
||||
|
||||
Test.specify "should be able to deserialize using into via conversion" <|
|
||||
Json.parse '{"type":"Time_Zone","constructor":"parse","id":"Europe/Moscow"}' . into Time_Zone . should_equal (Time_Zone.parse "Europe/Moscow")
|
||||
|
||||
Test.specify "should be able to deserialize using into for single constructor" <|
|
||||
Json.parse '{"first": 1, "second": 2}' . into Pair . should_equal (Pair.Value 1 2)
|
||||
Json.parse '{"start": 15, "end": 20, "step": 3}' . into Range . should_equal (Range.Between 15 20 3)
|
||||
|
||||
Test.specify "should be able to deserialize using into for multiple constructors" <|
|
||||
Json.parse '{"than": 2}' . into Filter_Condition . should_fail_with Illegal_Argument
|
||||
Json.parse '{"constructor": "Less", "than": 2}' . into Filter_Condition . should_equal (Filter_Condition.Less 2)
|
||||
Json.parse '{"constructor": "NotARealOne", "than": 2}' . into Filter_Condition . should_fail_with Illegal_Argument
|
||||
|
||||
Test.specify "should be able to deserialize Date" <|
|
||||
'{"type": "Date", "constructor": "new", "year": 2018, "month": 7, "day": 3}'.should_parse_as (Date.new 2018 7 3)
|
||||
'{"type": "Date", "year": 2025, "month": 5, "day": 12}'.should_parse_as (Date.new 2025 5 12)
|
||||
'{"type": "Date", "month": 5, "day": 12}' . should_parse_as (JS_Object.from_pairs [["type", "Date"], ["month", 5], ["day", 12]])
|
||||
'{"type": "Date", "year": 2019, "day": 12}' . should_parse_as (JS_Object.from_pairs [["type", "Date"], ["year", 2019], ["day", 12]])
|
||||
|
||||
Test.specify "should be able to deserialize Time_Of_Day" <|
|
||||
'{"type": "Time_Of_Day", "constructor": "new", "hour": 22, "minute": 14, "second": 47}'.should_parse_as (Time_Of_Day.new 22 14 47)
|
||||
'{"type": "Time_Of_Day", "hour": 12, "minute": 30}'.should_parse_as (Time_Of_Day.new 12 30 0)
|
||||
'{"type": "Time_Of_Day", "hour": 18, "minute": 6, "second": 13, "nanosecond": 1234568}'.should_parse_as (Time_Of_Day.new 18 6 13 nanosecond=1234568)
|
||||
'{"type": "Time_Of_Day", "minute": 14, "second": 47}' . should_parse_as (JS_Object.from_pairs [["type", "Time_Of_Day"], ["minute", 14], ["second", 47]])
|
||||
'{"type": "Time_Of_Day", "hour": 14, "second": 47}' . should_parse_as (JS_Object.from_pairs [["type", "Time_Of_Day"], ["hour", 14], ["second", 47]])
|
||||
'{"type": "Time_Of_Day", "hour": 18, "minute": 6, "nanosecond": 1234568}'.should_parse_as (JS_Object.from_pairs [["type", "Time_Of_Day"], ["hour", 18], ["minute", 6], ["nanosecond", 1234568]])
|
||||
|
||||
Test.specify "should be able to deserialize Date_Time" <|
|
||||
tz = Time_Zone.parse "Europe/Moscow"
|
||||
'{"type":"Date_Time","constructor":"new","year":2023,"month":9,"day":29,"hour":11,"minute":52,"second":33,"nanosecond":572104300,"zone":{"type":"Time_Zone","constructor":"parse","id":"Europe/Moscow"}}'.should_parse_as (Date_Time.new 2023 9 29 11 52 33 nanosecond=572104300 zone=tz)
|
||||
'{"type":"Date_Time","constructor":"new","year":2023,"month":9,"day":29,"hour":11,"minute":52,"second":33,"zone":{"type":"Time_Zone","constructor":"parse","id":"Europe/Moscow"}}'.should_parse_as (Date_Time.new 2023 9 29 11 52 33 zone=tz)
|
||||
'{"type":"Date_Time","constructor":"new","year":2023,"month":9,"day":29,"hour":11,"minute":52,"second":33,"nanosecond":572104300}'.should_parse_as (Date_Time.new 2023 9 29 11 52 33 nanosecond=572104300)
|
||||
'{"type":"Date_Time","constructor":"new","year":2023,"month":9,"day":29,"hour":11,"minute":52,"second":33}'.should_parse_as (Date_Time.new 2023 9 29 11 52 33)
|
||||
'{"type":"Date_Time","constructor":"new","year":2023,"month":9,"day":29,"hour":11,"minute":52}'.should_parse_as (Date_Time.new 2023 9 29 11 52 00)
|
||||
'{"type":"Date_Time","constructor":"new","year":2023,"month":9,"day":29,"hour":11,"second":52}'.should_parse_as (JS_Object.from_pairs [["type", "Date_Time"], ["constructor", "new"], ["year", 2023], ["month", 9], ["day", 29], ["hour", 11], ["second", 52]])
|
||||
'{"type":"Date_Time","constructor":"new","year":2023,"month":9,"day":29,"hour":11,"minute":52,"nanosecond":572104300}'.should_parse_as (JS_Object.from_pairs [["type", "Date_Time"], ["constructor", "new"], ["year", 2023], ["month", 9], ["day", 29], ["hour", 11], ["minute", 52], ["nanosecond", 572104300]])
|
||||
|
||||
Test.group "JSON Serialization" <|
|
||||
Test.specify "should print JSON structures to valid json" <|
|
||||
"0".should_render_itself
|
||||
@ -100,6 +136,10 @@ spec =
|
||||
JS_Object.from_pairs [["foo", "bar"]] . to_json . should_equal '{"foo":"bar"}'
|
||||
JS_Object.from_pairs [["foo", "bar"], ["baz", Nothing]] . to_json . should_equal '{"foo":"bar","baz":null}'
|
||||
|
||||
Test.specify "should be buildable from pairs" <|
|
||||
JS_Object.from_pairs [["foo", "bar"]] . to_json . should_equal '{"foo":"bar"}'
|
||||
JS_Object.from_pairs [["foo", "bar"], ["baz", Nothing]] . to_json . should_equal '{"foo":"bar","baz":null}'
|
||||
|
||||
Test.specify "should be handle equality on a key level" <|
|
||||
JS_Object.from_pairs [["a", 42]] . should_equal <| Json.parse '{"a": 42}'
|
||||
JS_Object.from_pairs [["a", 42]] . should_not_equal <| JS_Object.from_pairs [["a", 43]]
|
||||
@ -125,6 +165,11 @@ spec =
|
||||
Json.parse "null" . get "foo" . should_equal Nothing
|
||||
Json.parse "null" . get "foo" 1 . should_equal 1
|
||||
|
||||
Test.specify "should allow checking for fields" <|
|
||||
object = Json.parse '{ "foo": "bar", "baz": ["foo", "x", false],"y": {"z": null, "w": null} }'
|
||||
object.contains_key "foo" . should_equal True
|
||||
object.contains_key "bar" . should_equal False
|
||||
|
||||
Test.specify "should be able to get field_names" <|
|
||||
Json.parse '{ "foo": "bar", "baz": ["foo", "x", false] }' . field_names . should_equal ["foo", "baz"]
|
||||
Json.parse '{}' . field_names . should_equal []
|
||||
|
@ -32,9 +32,9 @@ spec =
|
||||
Test.specify "should convert to Json" <|
|
||||
zone = Time_Zone.new 1 2 3
|
||||
zone.to_json.should_equal <|
|
||||
JS_Object.from_pairs [["type", "Time_Zone"], ["constructor", "new"], ["id", "+01:02:03"]] . to_text
|
||||
JS_Object.from_pairs [["type", "Time_Zone"], ["constructor", "parse"], ["id", "+01:02:03"]] . to_text
|
||||
Time_Zone.utc.to_json.should_equal <|
|
||||
JS_Object.from_pairs [["type", "Time_Zone"], ["constructor", "new"], ["id", "UTC"]] . to_text
|
||||
JS_Object.from_pairs [["type", "Time_Zone"], ["constructor", "parse"], ["id", "UTC"]] . to_text
|
||||
Test.specify "should throw error when parsing invalid zone id" <|
|
||||
case Time_Zone.parse "foo" . catch of
|
||||
Time_Error.Error msg _ ->
|
||||
@ -66,9 +66,9 @@ spec =
|
||||
Test.specify "should convert to Json" <|
|
||||
zone = ZoneOffset.ofHoursMinutesSeconds 1 2 3
|
||||
zone.to_json.should_equal <|
|
||||
JS_Object.from_pairs [["type", "Time_Zone"], ["constructor", "new"], ["id", "+01:02:03"]] . to_text
|
||||
JS_Object.from_pairs [["type", "Time_Zone"], ["constructor", "parse"], ["id", "+01:02:03"]] . to_text
|
||||
(ZoneId.of "UTC").to_json.should_equal <|
|
||||
JS_Object.from_pairs [["type", "Time_Zone"], ["constructor", "new"], ["id", "UTC"]] . to_text
|
||||
JS_Object.from_pairs [["type", "Time_Zone"], ["constructor", "parse"], ["id", "UTC"]] . to_text
|
||||
Test.specify "should correctly determine the type of zone" <|
|
||||
zone = ZoneId.systemDefault
|
||||
Meta.type_of zone . should_equal_type Time_Zone
|
||||
|
@ -32,6 +32,22 @@ spec =
|
||||
pending_has_url = if base_url != Nothing then Nothing else
|
||||
"The HTTP tests only run when the `ENSO_HTTP_TEST_HTTPBIN_URL` environment variable is set to URL of the httpbin server"
|
||||
|
||||
Test.group "HTTP_Method parse" <|
|
||||
Test.specify "should be able to parse a string value into a method" <|
|
||||
"GET" . to HTTP_Method . should_equal (HTTP_Method.Get)
|
||||
"POST" . to HTTP_Method . should_equal (HTTP_Method.Post)
|
||||
"PUT" . to HTTP_Method . should_equal (HTTP_Method.Put)
|
||||
"PATCH" . to HTTP_Method . should_equal (HTTP_Method.Patch)
|
||||
|
||||
Test.specify "should be case insensitive" <|
|
||||
"get" . to HTTP_Method . should_equal (HTTP_Method.Get)
|
||||
"pOst" . to HTTP_Method . should_equal (HTTP_Method.Post)
|
||||
"puT" . to HTTP_Method . should_equal (HTTP_Method.Put)
|
||||
"PATCH" . to HTTP_Method . should_equal (HTTP_Method.Patch)
|
||||
|
||||
Test.specify "should make a custom method" <|
|
||||
"CUSTOM" . to HTTP_Method . should_equal (HTTP_Method.Custom "CUSTOM")
|
||||
|
||||
Test.group "HTTP client" pending=pending_has_url <|
|
||||
Test.specify "should create HTTP client with timeout setting" <|
|
||||
http = HTTP.new (timeout = (Duration.new seconds=30))
|
||||
@ -72,16 +88,27 @@ spec =
|
||||
}
|
||||
response . should_equal expected_response
|
||||
|
||||
uri_response = url_get.to_uri.fetch
|
||||
uri_response . should_equal expected_response
|
||||
|
||||
Test.specify "Can perform a HEAD" <|
|
||||
response = Data.fetch url_head method=HTTP_Method.Head
|
||||
response.code.code . should_equal 200
|
||||
response.decode_as_text . should_equal ''
|
||||
|
||||
uri_response = url_head.to_uri.fetch method=HTTP_Method.Head
|
||||
uri_response.code.code . should_equal 200
|
||||
uri_response.decode_as_text . should_equal ''
|
||||
|
||||
Test.specify "Can perform an OPTIONS" <|
|
||||
response = Data.fetch url_options method=HTTP_Method.Options
|
||||
response.code.code . should_equal 200
|
||||
response.decode_as_text . should_equal ''
|
||||
|
||||
uri_response = url_head.to_uri.fetch method=HTTP_Method.Options
|
||||
uri_response.code.code . should_equal 200
|
||||
uri_response.decode_as_text . should_equal ''
|
||||
|
||||
Test.specify "Can perform auto-parse" <|
|
||||
response = Data.fetch url_get
|
||||
response.at "headers" . at "Content-Length" . should_equal "0"
|
||||
@ -102,6 +129,10 @@ spec =
|
||||
}
|
||||
response.decode_as_json . should_equal expected_response
|
||||
|
||||
uri_response = url_get.to_uri.fetch try_auto_parse_response=False
|
||||
uri_response.code.code . should_equal 200
|
||||
uri_response.decode_as_json . should_equal expected_response
|
||||
|
||||
Test.specify "Can still perform request when output context is disabled" <|
|
||||
run_with_and_without_output <|
|
||||
Data.fetch url_get try_auto_parse_response=False . code . code . should_equal 200
|
||||
@ -149,6 +180,9 @@ spec =
|
||||
}
|
||||
response . should_equal expected_response
|
||||
|
||||
url_response = url_post.to_uri.post (Request_Body.Text "hello world")
|
||||
url_response . should_equal expected_response
|
||||
|
||||
Test.specify "Can perform a Request_Body.Json JSON POST" <|
|
||||
json = Json.parse '{"a": "asdf", "b": 123}'
|
||||
response = Data.post url_post (Request_Body.Json json)
|
||||
|
Loading…
Reference in New Issue
Block a user