Tidy Standard.Base part 3 of n ... (#3893)

Here we go again...
- Tidied up `Pair` and stopped exporting `Pair_Data`. Adjusted so type exported.
- Tidy imports for `Json`, `Json.Internal`, `Locale`.
- Tidy imports Ordering.*. Export `Sort_Direction` and `Case_Sensitivity` as types.
- Move methods of `Statistics` into `Statistic`. Publishing the types not the module.
- Added a `compute` to a `Rank_Method`.
- Tidied the `Regression` module.
- Move methods of `Date`, `Date_Time`, `Duration`, `Time_Of_Day` and `Time_Zone` into type. Publishing types not modules.
- Added exporting `Period`, `Date_Period` and `Time_Period` as types. Static methods moved into types.

# Important Notes
- Move `compare_to_ignore_case`, `equals_ignore_case` and `to_case_insensitive_key` from Extensions into `Text`.
- Hiding polyglot java imports from export all in `Main.enso`.
This commit is contained in:
James Dunkerley 2022-11-21 15:30:18 +00:00 committed by GitHub
parent 79329ef00f
commit 93fee3a51f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
84 changed files with 1551 additions and 1525 deletions

View File

@ -81,19 +81,19 @@ type Any
== self that = if Meta.is_same_object self that then True else
self_meta = Meta.meta self
that_meta = Meta.meta that
case Pair.Pair_Data self_meta that_meta of
Pair.Pair_Data (Meta.Atom_Data _) (Meta.Atom_Data _) ->
case Pair.Value self_meta that_meta of
Pair.Value (Meta.Atom_Data _) (Meta.Atom_Data _) ->
c_1 = self_meta.constructor ...
c_2 = that_meta.constructor ...
if Meta.is_same_object c_1 c_2 . not then False else
f_1 = self_meta.fields
f_2 = that_meta.fields
0.up_to f_1.length . all i-> (f_1.at i) == (f_2.at i)
Pair.Pair_Data (Meta.Error_Data _) (Meta.Error_Data _) -> self_meta.payload == that_meta.payload
Pair.Pair_Data (Meta.Polyglot_Data o_1) (Meta.Polyglot_Data o_2) ->
Pair.Value (Meta.Error_Data _) (Meta.Error_Data _) -> self_meta.payload == that_meta.payload
Pair.Value (Meta.Polyglot_Data o_1) (Meta.Polyglot_Data o_2) ->
langs_match = (self_meta.get_language == Meta.Java) && (that_meta.get_language == Meta.Java)
if langs_match.not then False else o_1.equals o_2
Pair.Pair_Data (Meta.Unresolved_Symbol_Data _) (Meta.Unresolved_Symbol_Data _) ->
Pair.Value (Meta.Unresolved_Symbol_Data _) (Meta.Unresolved_Symbol_Data _) ->
(self_meta.name == that_meta.name) && (self_meta.scope == that_meta.scope)
## Constructor comparison is covered by the identity equality.
Primitive objects should define their own equality.
@ -323,7 +323,7 @@ type Any
Applying a function over a value 10.
10.map_nothing *2
map_nothing : (a -> b) -> b | Nothing
map_nothing : (Any -> Any) -> Any | Nothing
map_nothing self f = case self of
Nothing -> Nothing
a -> f a

View File

@ -4,8 +4,8 @@ import project.Data.Vector.Vector
import project.Math
import project.Random
import project.Runtime.Ref
## Imported this way as then imports the extension methods.
from project.Data.Range import Range
from project.Data.Range import all
from project.Data.Boolean import Boolean, True, False
from project.Error.Common import Error, Panic, Index_Out_Of_Bounds_Error_Data, Illegal_Argument_Error_Data

View File

@ -1,7 +1,15 @@
from Standard.Base import all
import project.Data.Map.No_Value_For_Key
import project.Data.Any.Any
import project.Data.Json.Internal
import project.Data.Map.Map
import project.Data.Map.No_Value_For_Key
import project.Data.Text.Text
import project.Data.Vector.Vector
import project.Meta
import project.Nothing.Nothing
from project.Data.Boolean import Boolean, True, False
from project.Data.Range import all
from project.Error.Common import Panic, Error, Illegal_Argument_Error_Data
## Represents a JSON structure.
type Json

View File

@ -1,5 +1,17 @@
from Standard.Base import all
from project.Data.Json import Marshalling_Error
import project.Data.Any.Any
import project.Data.List.List
import project.Data.Map.Map
import project.Data.Numbers.Number
import project.Data.Numbers.Decimal
import project.Data.Numbers.Integer
import project.Data.Text.Text
import project.Data.Vector.Vector
import project.Meta
import project.Nothing.Nothing
from project.Data.Boolean import Boolean, True, False
from project.Data.Json import Json, Marshalling_Error
from project.Error.Common import Panic
import project.Runtime.Ref

View File

@ -2,6 +2,7 @@ import project.Data.Any.Any
import project.Data.Filter_Condition.Filter_Condition
import project.Data.Numbers.Number
import project.Data.Numbers.Integer
import project.Data.Text.Text
import project.Data.Vector.Vector
import project.Function.Function
import project.Nothing.Nothing

View File

@ -1,4 +1,10 @@
from Standard.Base import all
import project.Data.Any.Any
import project.Data.Json.Json
import project.Data.Text.Text
import project.Data.Vector.Vector
import project.Nothing.Nothing
from project.Data.Boolean import Boolean, False
polyglot java import java.util.Locale as JavaLocale

View File

@ -454,7 +454,7 @@ type Map
first : Pair
first self =
first p m = case m of
Map.Bin _ k v l _ -> @Tail_Call first (Pair.Pair_Data k v) l
Map.Bin _ k v l _ -> @Tail_Call first (Pair.Value k v) l
Map.Tip -> p
first Nothing self
@ -463,7 +463,7 @@ type Map
last : Pair
last self =
last p m = case m of
Map.Bin _ k v _ r -> @Tail_Call last (Pair.Pair_Data k v) r
Map.Bin _ k v _ r -> @Tail_Call last (Pair.Value k v) r
Map.Tip -> p
last Nothing self

View File

@ -162,4 +162,3 @@ size : Map -> Integer
size m = case m of
Map.Bin s _ _ _ _ -> s
_ -> 0

View File

@ -1,4 +1,6 @@
import project.Data.Json.Json
import project.Data.Ordering.Ordering
import project.Data.Text.Text
from project.Data.Boolean import Boolean, True, False
from project.Error.Common import Panic,Error,Illegal_Argument_Error

View File

@ -1,4 +1,11 @@
from Standard.Base import all
import project.Data.Any.Any
import project.Data.Ordering.Natural_Order
import project.Data.Ordering.Ordering
import project.Data.Text.Case_Sensitivity.Case_Sensitivity
import project.Data.Text.Text_Ordering.Text_Ordering
import project.Nothing.Nothing
from project.Data.Boolean import True, False
from project.Data.Vector import handle_incomparable_value
polyglot java import org.enso.base.ObjectComparator

View File

@ -1,4 +1,10 @@
from Standard.Base import all
import project.Data.Numbers.Integer
import project.Data.Ordering.Ordering
import project.Data.Pair.Pair
import project.Data.Text.Text
import project.Data.Text.Case_Sensitivity.Case_Sensitivity
from project.Data.Boolean import True, False
polyglot java import org.enso.base.Text_Utils
polyglot java import com.ibm.icu.text.BreakIterator
@ -38,10 +44,10 @@ compare text1 text2 case_sensitivity=Case_Sensitivity.Sensitive =
## Find end of number and return pair of index and flag if reached end
loop text next iter =
new_next = iter.next
if (new_next == -1) then (Pair_Data next True) else
if (new_next == -1) then (Pair.Value next True) else
substring = Text_Utils.substring text next new_next
character = Text_Utils.get_chars substring . at 0
if (is_digit character).not then (Pair_Data next False) else
if (is_digit character).not then (Pair.Value next False) else
@Tail_Call loop text new_next iter
pair = loop text next iter
@ -60,18 +66,18 @@ compare text1 text2 case_sensitivity=Case_Sensitivity.Sensitive =
prev2 - index to start of current character in text2.
next2 - index to start of next character (or -1 if finished) in text2.
order prev1 next1 prev2 next2 =
case (Pair_Data (next1 == -1) (next2 == -1)) of
Pair_Data True True -> Ordering.Equal
Pair_Data True False -> Ordering.Less
Pair_Data False True -> Ordering.Greater
Pair_Data False False ->
case (Pair.Value (next1 == -1) (next2 == -1)) of
Pair.Value True True -> Ordering.Equal
Pair.Value True False -> Ordering.Less
Pair.Value False True -> Ordering.Greater
Pair.Value False False ->
substring1 = Text_Utils.substring text1 prev1 next1
first_char_1 = Text_Utils.get_chars substring1 . at 0
substring2 = Text_Utils.substring text2 prev2 next2
first_char_2 = Text_Utils.get_chars substring2 . at 0
tmp = Pair_Data (is_digit first_char_1) (is_digit first_char_2)
tmp = Pair.Value (is_digit first_char_1) (is_digit first_char_2)
## ToDo: Move to case on second block
Appears to be an issue using a nested case statement on a pair
https://www.pivotaltracker.com/story/show/181280737

View File

@ -1,8 +1,4 @@
from Standard.Base import all
# TODO Dubious constructor export
from project.Data.Ordering.Sort_Direction.Sort_Direction import all
from project.Data.Ordering.Sort_Direction.Sort_Direction export all
import project.Data.Numbers.Integer
## Specifies the direction of ordering.
type Sort_Direction
@ -26,5 +22,5 @@ type Sort_Direction
## Convert into the sign of the direction
to_sign : Integer
to_sign self = case self of
Ascending -> 1
Descending -> -1
Sort_Direction.Ascending -> 1
Sort_Direction.Descending -> -1

View File

@ -1,4 +1,8 @@
from Standard.Base import all
import project.Data.Any.Any
import project.Data.Ordering.Ordering
import project.Data.Vector.Vector
from project.Data.Boolean import True, False
## Compares two Vectors according to the lexicographic ordering.
@ -46,4 +50,3 @@ compare vector1 vector2 (element_comparator = _.compare_to _) =
length that means they must have been equal.
vector1.length . compare_to vector2.length
go 0

View File

@ -1,12 +1,7 @@
from Standard.Base import all
# TODO Dubious constructor export
from project.Data.Pair.Pair import all
from project.Data.Pair.Pair export all
import project.Data.Any.Any
## A pair of elements.
type Pair
## UNSTABLE
A simple pair of elements.
@ -14,7 +9,7 @@ type Pair
Arguments:
- first: The first element.
- second: The second element.
Pair_Data first second
Value first second
## UNSTABLE
@ -26,4 +21,4 @@ type Pair
(Pair 1 2).map (+1) == (Pair 2 3)
map : (Any -> Any) -> Pair
map self fun =
Pair_Data (fun self.first) (fun self.second)
Pair.Value (fun self.first) (fun self.second)

View File

@ -1,106 +1,100 @@
from Standard.Base import all
import project.Data.Numbers.Number
import project.Data.Statistics.Statistic
import project.Data.Text.Text
import project.Data.Vector.Vector
import project.Nothing.Nothing
from project.Error.Common import Error, Panic, Illegal_Argument_Error_Data, Illegal_Argument_Error
polyglot java import org.enso.base.statistics.Regression
polyglot java import org.enso.base.statistics.FitError
# TODO Dubious constructor export
from project.Data.Regression.Model import all
from project.Data.Regression.Model export all
type Model
## Fit a line (y = A x + B) to the data with an optional fixed intercept.
Linear_Model (intercept:Number|Nothing=Nothing)
Linear (intercept:Number|Nothing=Nothing)
## Fit a exponential line (y = A exp(B x)) to the data with an optional fixed intercept.
Exponential_Model (intercept:Number|Nothing=Nothing)
Exponential (intercept:Number|Nothing=Nothing)
## Fit a logarithmic line (y = A log x + B) to the data.
Logarithmic_Model
Logarithmic
## Fit a power series (y = A x ^ B) to the data.
Power_Model
Power
## PRIVATE
Computes the R Squared value for a model and returns a new instance.
fitted_model_with_r_squared : Any -> Number -> Number -> Vector -> Vector -> Fitted_Model
fitted_model_with_r_squared constructor a b known_xs known_ys =
model = constructor a b
r_squared = Statistic.compute known_ys (Statistic.R_Squared (known_xs.map model.predict))
constructor a b r_squared
## PRIVATE
Computes the natural log series as long as all values are positive.
ln_series : Vector -> Vector ! Illegal_Argument_Error
ln_series xs series_name="Values" =
ln_with_panic x = if x.is_nothing then Nothing else
if x <= 0 then Panic.throw (Illegal_Argument_Error_Data (series_name + " must be positive.")) else x.ln
Panic.recover Illegal_Argument_Error_Data <| xs.map ln_with_panic
## Use Least Squares to fit a line to the data.
fit_least_squares : Vector -> Vector -> Model -> Fitted_Model ! Illegal_Argument_Error | Fit_Error
fit_least_squares known_xs known_ys model=Linear_Model =
fit_least_squares known_xs known_ys model=Model.Linear =
Illegal_Argument_Error.handle_java_exception <| Fit_Error.handle_java_exception <| case model of
Linear_Model intercept ->
Model.Linear intercept ->
fitted = if intercept.is_nothing then Regression.fit_linear known_xs.to_array known_ys.to_array else
Regression.fit_linear known_xs.to_array known_ys.to_array intercept
Fitted_Linear_Model fitted.slope fitted.intercept fitted.rSquared
Exponential_Model intercept ->
log_ys = ln_series known_ys "Y-values"
Fitted_Model.Linear fitted.slope fitted.intercept fitted.rSquared
Model.Exponential intercept ->
log_ys = Model.ln_series known_ys "Y-values"
fitted = if intercept.is_nothing then Regression.fit_linear known_xs.to_array log_ys.to_array else
Regression.fit_linear known_xs.to_array log_ys.to_array intercept.ln
fitted_model_with_r_squared Fitted_Exponential_Model fitted.intercept.exp fitted.slope known_xs known_ys
Logarithmic_Model ->
log_xs = ln_series known_xs "X-values"
Model.fitted_model_with_r_squared Fitted_Model.Exponential fitted.intercept.exp fitted.slope known_xs known_ys
Model.Logarithmic ->
log_xs = Model.ln_series known_xs "X-values"
fitted = Regression.fit_linear log_xs.to_array known_ys.to_array
fitted_model_with_r_squared Fitted_Logarithmic_Model fitted.slope fitted.intercept known_xs known_ys
Power_Model ->
log_xs = ln_series known_xs "X-values"
log_ys = ln_series known_ys "Y-values"
Model.fitted_model_with_r_squared Fitted_Model.Logarithmic fitted.slope fitted.intercept known_xs known_ys
Model.Power ->
log_xs = Model.ln_series known_xs "X-values"
log_ys = Model.ln_series known_ys "Y-values"
fitted = Regression.fit_linear log_xs.to_array log_ys.to_array
fitted_model_with_r_squared Fitted_Power_Model fitted.intercept.exp fitted.slope known_xs known_ys
Model.fitted_model_with_r_squared Fitted_Model.Power fitted.intercept.exp fitted.slope known_xs known_ys
_ -> Error.throw (Illegal_Argument_Error_Data "Unsupported model.")
# TODO Dubious constructor export
from project.Data.Regression.Fitted_Model import all
from project.Data.Regression.Fitted_Model export all
type Fitted_Model
## Fitted line (y = slope x + intercept).
Fitted_Linear_Model slope:Number intercept:Number r_squared:Number=0.0
Linear slope:Number intercept:Number r_squared:Number=0.0
## Fitted exponential line (y = a exp(b x)).
Fitted_Exponential_Model a:Number b:Number r_squared:Number=0.0
Exponential a:Number b:Number r_squared:Number=0.0
## Fitted logarithmic line (y = a log x + b).
Fitted_Logarithmic_Model a:Number b:Number r_squared:Number=0.0
Logarithmic a:Number b:Number r_squared:Number=0.0
## Fitted power series (y = a x ^ b).
Fitted_Power_Model a:Number b:Number r_squared:Number=0.0
Power a:Number b:Number r_squared:Number=0.0
## Display the fitted line.
to_text : Text
to_text self =
equation = case self of
Fitted_Linear_Model slope intercept _ -> slope.to_text + " * X + " + intercept.to_text
Fitted_Exponential_Model a b _ -> a.to_text + " * (" + b.to_text + " * X).exp"
Fitted_Logarithmic_Model a b _ -> a.to_text + " * X.ln + " + b.to_text
Fitted_Power_Model a b _ -> a.to_text + " * X ^ " + b.to_text
Fitted_Model.Linear slope intercept _ -> slope.to_text + " * X + " + intercept.to_text
Fitted_Model.Exponential a b _ -> a.to_text + " * (" + b.to_text + " * X).exp"
Fitted_Model.Logarithmic a b _ -> a.to_text + " * X.ln + " + b.to_text
Fitted_Model.Power a b _ -> a.to_text + " * X ^ " + b.to_text
"Fitted_Model(" + equation + ")"
## Use the model to predict a value.
predict : Number -> Number
predict self x = case self of
Fitted_Linear_Model slope intercept _ -> slope * x + intercept
Fitted_Exponential_Model a b _ -> a * (b * x).exp
Fitted_Logarithmic_Model a b _ -> a * x.ln + b
Fitted_Power_Model a b _ -> a * (x ^ b)
Fitted_Model.Linear slope intercept _ -> slope * x + intercept
Fitted_Model.Exponential a b _ -> a * (b * x).exp
Fitted_Model.Logarithmic a b _ -> a * x.ln + b
Fitted_Model.Power a b _ -> a * (x ^ b)
_ -> Error.throw (Illegal_Argument_Error_Data "Unsupported model.")
## PRIVATE
Computes the R Squared value for a model and returns a new instance.
fitted_model_with_r_squared : Any -> Number -> Number -> Vector -> Vector -> Fitted_Model
fitted_model_with_r_squared constructor a b known_xs known_ys =
model = constructor a b
r_squared = known_ys.compute (Statistics.R_Squared (known_xs.map model.predict))
constructor a b r_squared
## PRIVATE
Computes the natural log series as long as all values are positive.
ln_series : Vector -> Vector ! Illegal_Argument_Error
ln_series xs series_name="Values" =
ln_with_panic x = if x.is_nothing then Nothing else
if x <= 0 then Panic.throw (Illegal_Argument_Error_Data (series_name + " must be positive.")) else x.ln
Panic.recover Illegal_Argument_Error_Data <| xs.map ln_with_panic
# TODO Dubious constructor export
from project.Data.Regression.Fit_Error import all
from project.Data.Regression.Fit_Error export all
## PRIVATE
An error thrown when the linear regression cannot be computed.
@ -108,7 +102,7 @@ from project.Data.Regression.Fit_Error export all
Arguments:
- message: The error message.
type Fit_Error
Fit_Error_Data message
Error message
## PRIVATE
@ -118,4 +112,4 @@ type Fit_Error
## PRIVATE
handle_java_exception =
Panic.catch_java FitError handler=(java_exception-> Error.throw (Fit_Error_Data java_exception.getMessage))
Panic.catch_java FitError handler=(java_exception-> Error.throw (Fit_Error.Error java_exception.getMessage))

View File

@ -1,10 +1,16 @@
from Standard.Base import Boolean, True, False, Nothing, Vector, Number, Any, Error, Array, Panic, Illegal_Argument_Error_Data, Illegal_Argument_Error, Unsupported_Argument_Types, Unsupported_Argument_Types_Data, Incomparable_Values_Error
import project.Data.Any.Any
import project.Data.Array.Array
import project.Data.Ordering.Comparator
import project.Data.Numbers.Number
import project.Data.Vector.Vector
import project.Meta
import project.Nothing.Nothing
from Standard.Base.Data.Vector import Empty_Error
from project.Data.Boolean import Boolean, True, False
from project.Data.Range import all
from project.Data.Vector import Empty_Error
import Standard.Base.Data.Ordering.Comparator
import Standard.Base.Data.Statistics.Rank_Method
from project.Error.Common import Error, Panic, Illegal_Argument_Error_Data, Illegal_Argument_Error, Unsupported_Argument_Types, Unsupported_Argument_Types_Data, Incomparable_Values_Error
polyglot java import org.enso.base.statistics.Moments
polyglot java import org.enso.base.statistics.CountMinMax
@ -14,23 +20,46 @@ polyglot java import org.enso.base.statistics.Rank
polyglot java import java.lang.ClassCastException
polyglot java import java.lang.NullPointerException
# TODO Dubious constructor export
from project.Data.Statistics.Statistic import all
from project.Data.Statistics.Statistic export all
## Specifies how to handle ranking of equal values.
type Rank_Method
## Use the mean of all ranks for equal values.
Average
## Use the lowest of all ranks for equal values.
Minimum
## Use the highest of all ranks for equal values.
Maximum
## Use same rank value for equal values and next group is the immediate
following ranking number.
Dense
## Equal values are assigned the next rank in order that they occur.
Ordinal
## Assigns a rank to each value of data, dealing with equal values according to the method.
Arguments:
- data: Input data to rank.
compute : Vector -> Vector
compute self input =
java_method = case self of
Rank_Method.Minimum -> Rank.Method.MINIMUM
Rank_Method.Maximum -> Rank.Method.MAXIMUM
Rank_Method.Average -> Rank.Method.AVERAGE
Rank_Method.Ordinal -> Rank.Method.ORDINAL
Rank_Method.Dense -> Rank.Method.DENSE
report_nullpointer caught_panic = Error.throw (Illegal_Argument_Error_Data caught_panic.payload.cause.getMessage)
handle_nullpointer = Panic.catch NullPointerException handler=report_nullpointer
handle_classcast = Panic.catch ClassCastException handler=(_ -> Error.throw Incomparable_Values_Error)
handle_classcast <| handle_nullpointer <|
java_ranks = Rank.rank input.to_array Comparator.new java_method
Vector.from_polyglot_array java_ranks
type Statistic
## PRIVATE
Convert the Enso Statistic into Java equivalent.
to_moment_statistic : SingleValue
to_moment_statistic self = case self of
Sum -> Moments.SUM
Mean -> Moments.MEAN
Variance p -> if p then Moments.VARIANCE_POPULATION else Moments.VARIANCE
Standard_Deviation p -> if p then Moments.STANDARD_DEVIATION_POPULATION else Moments.STANDARD_DEVIATION
Skew p -> if p then Moments.SKEW_POPULATION else Moments.SKEW
Kurtosis -> Moments.KURTOSIS
_ -> Nothing
## Count the number of non-Nothing and non-NaN values.
Count
@ -89,98 +118,113 @@ type Statistic
- predicted: the series to compute the r_squared with.
R_Squared (predicted:Vector)
## Compute a single statistic on a vector like object.
## Compute a single statistic on a vector like object.
Arguments:
- data: Vector like object which has a `to_array` method.
- statistic: Statistic to calculate.
compute : Vector -> Statistic -> Any
compute data statistic=Statistic.Count =
Statistic.compute_bulk data [statistic] . first
Arguments:
- data: Vector like object which has a `to_array` method.
- statistic: Statistic to calculate.
compute : Vector -> Statistic -> Any
compute data statistic=Count =
compute_bulk data [statistic] . first
## Compute a set of statistics on a vector like object.
Arguments:
- data: Vector like object which has a `to_array` method.
- statistics: Set of statistics to calculate.
compute_bulk : Vector -> Vector Statistic -> Vector Any
compute_bulk data statistics=[Statistic.Count, Statistic.Sum] =
count_min_max = statistics.any s->((s.is_a Statistic.Count) || (s.is_a Statistic.Minimum) || (s.is_a Statistic.Maximum))
## Compute a set of statistics on a vector like object.
java_stats = statistics.map to_moment_statistic
skip_java_stats = java_stats.all s->s.is_nothing
Arguments:
- data: Vector like object which has a `to_array` method.
- statistics: Set of statistics to calculate.
compute_bulk : Vector -> [Statistic] -> [Any]
compute_bulk data statistics=[Count, Sum] =
count_min_max = statistics.any s->((s.is_a Count) || (s.is_a Minimum) || (s.is_a Maximum))
report_invalid _ =
statistics.map_with_index i->v->
if java_stats.at i . is_nothing then Nothing else
Error.throw (Illegal_Argument_Error_Data ("Can only compute " + v.to_text + " on numerical data sets."))
handle_unsupported = Panic.catch Unsupported_Argument_Types_Data handler=report_invalid
java_stats = statistics.map .to_moment_statistic
skip_java_stats = java_stats.all s->s.is_nothing
empty_map s = if (s == Statistic.Count) || (s == Statistic.Sum) then 0 else
if (s == Statistic.Minimum) || (s == Statistic.Maximum) then Error.throw Empty_Error else
Number.nan
report_invalid _ =
statistics.map_with_index i->v->
if java_stats.at i . is_nothing then Nothing else
Error.throw (Illegal_Argument_Error_Data ("Can only compute " + v.to_text + " on numerical data sets."))
handle_unsupported = Panic.catch Unsupported_Argument_Types_Data handler=report_invalid
if data.length == 0 then statistics.map empty_map else
count_min_max_values = if count_min_max then CountMinMax.new (CountMinMax.toObjectStream data.to_array) Comparator.new else Nothing
stats_array = if skip_java_stats then Nothing else
handle_unsupported <| Moments.compute data.to_array java_stats.to_array
empty_map s = if (s == Count) || (s == Sum) then 0 else
if (s == Minimum) || (s == Maximum) then Error.throw Empty_Error else
Number.nan
statistics.map_with_index i->s->case s of
Statistic.Count -> count_min_max_values.count
Statistic.Minimum ->
if count_min_max_values.comparatorError then (Error.throw Incomparable_Values_Error) else
count_min_max_values.minimum
Statistic.Maximum ->
if count_min_max_values.comparatorError then (Error.throw Incomparable_Values_Error) else
count_min_max_values.maximum
Statistic.Covariance s -> calculate_correlation_statistics data s . covariance
Statistic.Pearson s -> calculate_correlation_statistics data s . pearsonCorrelation
Statistic.Spearman s -> calculate_spearman_rank data s
Statistic.R_Squared s -> calculate_correlation_statistics data s . rSquared
_ -> stats_array.at i
if data.length == 0 then statistics.map empty_map else
count_min_max_values = if count_min_max then CountMinMax.new (CountMinMax.toObjectStream data.to_array) Comparator.new else Nothing
stats_array = if skip_java_stats then Nothing else
handle_unsupported <| Moments.compute data.to_array java_stats.to_array
## Calculate a variance-covariance matrix between the input series.
statistics.map_with_index i->s->case s of
Count -> count_min_max_values.count
Minimum ->
if count_min_max_values.comparatorError then (Error.throw Incomparable_Values_Error) else
count_min_max_values.minimum
Maximum ->
if count_min_max_values.comparatorError then (Error.throw Incomparable_Values_Error) else
count_min_max_values.maximum
Covariance s -> calculate_correlation_statistics data s . covariance
Pearson s -> calculate_correlation_statistics data s . pearsonCorrelation
Spearman s -> calculate_spearman_rank data s
R_Squared s -> calculate_correlation_statistics data s . rSquared
_ -> stats_array.at i
Arguments:
- data: The input data sets
covariance_matrix : Vector Vector -> Vector Vector
covariance_matrix data =
stats_vectors = calculate_correlation_statistics_matrix data
stats_vectors.map v->(v.map .covariance)
## Calculate a Pearson correlation matrix between the input series.
## Calculate a variance-covariance matrix between the input series.
Arguments:
- data: The input data sets
pearson_correlation : Vector Vector -> Vector Vector
pearson_correlation data =
stats_vectors = calculate_correlation_statistics_matrix data
stats_vectors.map v->(v.map .pearsonCorrelation)
Arguments:
- data: The input data sets
covariance_matrix : [Vector] -> [Vector]
covariance_matrix data =
stats_vectors = calculate_correlation_statistics_matrix data
stats_vectors.map v->(v.map .covariance)
## Calculate a Spearman Rank correlation matrix between the input series.
Arguments:
- data: The input data sets
spearman_correlation : Vector Vector -> Vector Vector
spearman_correlation data =
Panic.handle_wrapped_dataflow_error <|
output = Vector.new_builder data.length
## Calculate a Pearson correlation matrix between the input series.
0.up_to data.length . each i->
output.append <|
Vector.new data.length j->
if j == i then 1 else
if j < i then (output.at j . at i) else
Panic.throw_wrapped_if_error <|
calculate_spearman_rank (data.at i) (data.at j)
Arguments:
- data: The input data sets
pearson_correlation : [Vector] -> [Vector]
pearson_correlation data =
stats_vectors = calculate_correlation_statistics_matrix data
stats_vectors.map v->(v.map .pearsonCorrelation)
output.to_vector
## Assigns a rank to each value of data, dealing with equal values according to the method.
## Calculate a Spearman Rank correlation matrix between the input series.
Arguments:
- data: The input data sets
spearman_correlation : [Vector] -> [Vector]
spearman_correlation data =
Panic.handle_wrapped_dataflow_error <|
output = Vector.new_builder data.length
0.up_to data.length . each i->
output.append <|
Vector.new data.length j->
if j == i then 1 else
if j < i then (output.at j . at i) else
Panic.throw_wrapped_if_error <|
calculate_spearman_rank (data.at i) (data.at j)
output.to_vector
Arguments:
- data: Input data to rank.
- method: Method used to deal with equal values.
rank_data : Vector -> Rank_Method -> Vector
rank_data input method=Rank_Method.Average =
method.compute input
## PRIVATE
Convert the Enso Statistic into Java equivalent.
to_moment_statistic : Statistic -> MomentStatistic
to_moment_statistic s = case s of
Statistic.Sum -> Moments.SUM
Statistic.Mean -> Moments.MEAN
Statistic.Variance p -> if p then Moments.VARIANCE_POPULATION else Moments.VARIANCE
Statistic.Standard_Deviation p -> if p then Moments.STANDARD_DEVIATION_POPULATION else Moments.STANDARD_DEVIATION
Statistic.Skew p -> if p then Moments.SKEW_POPULATION else Moments.SKEW
Statistic.Kurtosis -> Moments.KURTOSIS
_ -> Nothing
## PRIVATE
wrap_java_call : Any -> Any
@ -190,66 +234,38 @@ wrap_java_call ~function =
handle_unsupported <| Illegal_Argument_Error.handle_java_exception <| function
## PRIVATE
Given two series, get a computed CorrelationStatistics object
calculate_correlation_statistics : Vector -> Vector -> CorrelationStatistics
calculate_correlation_statistics x_data y_data =
wrap_java_call <| CorrelationStatistics.compute x_data.to_array y_data.to_array
## PRIVATE
Given two series, get a compute the Spearman Rank correlation
calculate_spearman_rank : Vector -> Vector -> Decimal
calculate_spearman_rank x_data y_data =
wrap_java_call <| CorrelationStatistics.spearmanRankCorrelation x_data.to_array y_data.to_array
## PRIVATE
Given a set of series get CorrelationStatistics objects
calculate_correlation_statistics_matrix : [Vector] -> [CorrelationStatistics]
calculate_correlation_statistics_matrix : Vector Vector -> Vector CorrelationStatistics
calculate_correlation_statistics_matrix data =
data_array = Vector.new data.length i->(data.at i).to_array . to_array
stats_array = wrap_java_call <| CorrelationStatistics.computeMatrix data_array
Vector.new stats_array.length i->(Vector.from_polyglot_array (stats_array.at i))
## Compute a single statistic on the vector.
Arguments:
- statistic: Statistic to calculate.
Vector.compute : Statistic -> Any
Vector.compute self statistic=Count =
Vector.compute self statistic=Statistic.Count =
self.compute_bulk [statistic] . first
## Compute statistics on the vector.
Arguments:
- statistics: Set of statistics to calculate.
Vector.compute_bulk : [Statistic] -> [Any]
Vector.compute_bulk self statistics=[Count, Sum] =
compute_bulk self statistics
## Assigns a rank to each value of data, dealing with equal values according to the method.
Arguments:
- data: Input data to rank.
- method: Method used to deal with equal values.
rank_data : Vector -> Rank_Method -> Vector
rank_data input method=Rank_Method.Average =
java_method = case method of
Rank_Method.Minimum -> Rank.Method.MINIMUM
Rank_Method.Maximum -> Rank.Method.MAXIMUM
Rank_Method.Average -> Rank.Method.AVERAGE
Rank_Method.Ordinal -> Rank.Method.ORDINAL
Rank_Method.Dense -> Rank.Method.DENSE
report_nullpointer caught_panic = Error.throw (Illegal_Argument_Error_Data caught_panic.payload.cause.getMessage)
handle_nullpointer = Panic.catch NullPointerException handler=report_nullpointer
handle_classcast = Panic.catch ClassCastException handler=(_ -> Error.throw Incomparable_Values_Error)
handle_classcast <| handle_nullpointer <|
java_ranks = Rank.rank input.to_array Comparator.new java_method
Vector.from_polyglot_array java_ranks
Vector.compute_bulk : Vector Statistic -> Vector Any
Vector.compute_bulk self statistics=[Statistic.Count, Statistic.Sum] =
Statistic.compute_bulk self statistics

View File

@ -1,22 +0,0 @@
# TODO Dubious constructor export
from project.Data.Statistics.Rank_Method.Rank_Method import all
from project.Data.Statistics.Rank_Method.Rank_Method export all
## Specifies how to handle ranking of equal values.
type Rank_Method
## Use the mean of all ranks for equal values.
Average
## Use the lowest of all ranks for equal values.
Minimum
## Use the highest of all ranks for equal values.
Maximum
## Use same rank value for equal values and next group is the immediate
following ranking number.
Dense
## Equal values are assigned the next rank in order that they occur.
Ordinal

View File

@ -1,4 +1,5 @@
import project.Data.Any.Any
import project.Data.Locale.Locale
import project.Data.Ordering.Ordering
import project.Meta
@ -69,6 +70,56 @@ type Text
comparison_result = Text_Utils.compare_normalized self that
Ordering.from_sign comparison_result
## Checks whether `self` is equal to `that`, ignoring the case of the texts.
Arguments:
- that: The text to compare `self` for case-insensitive equality with.
Two texts are considered equal ignoring case if they are of the same length
and corresponding characters are equal ignoring case.
! Unicode Equality
The definition of equality includes Unicode canonicalization. I.e. two
texts are equal if they are identical after canonical decomposition. This
ensures that different ways of expressing the same character in the
underlying binary representation are considered equal.
> Example
The string 'É' (i.e. the character U+00C9, LATIN CAPITAL LETTER E WITH
ACUTE) is equal ignore case to the string 'é' (i.e. the character U+00E9,
LATIN SMALL LETTER E WITH ACUTE), which is canonically the same as the
string 'e\u0301' (i.e. the letter `e` followed by U+0301, COMBINING ACUTE
ACCENT). Therefore:
(('É' . equals_ignore_case 'é') && ('é' . equals_ignore_case 'e\u0301')) == True
equals_ignore_case : Text -> Locale -> Boolean
equals_ignore_case self that locale=Locale.default =
Text_Utils.equals_ignore_case self that locale.java_locale
## ADVANCED
PRIVATE
UNSTABLE
Unifies the case of all letters in the text, generating a key which can be
used to perform case-insensitive comparisons.
to_case_insensitive_key : Locale -> Text
to_case_insensitive_key self locale=Locale.default =
Text_Utils.case_insensitive_key self locale.java_locale
## Compare two texts to discover their ordering.
Arguments:
- that: The text to order `self` with respect to.
> Example
Checking how "a" orders in relation to "b".
"a".compare_to_ignore_case "b"
compare_to_ignore_case : Text -> Locale -> Ordering
compare_to_ignore_case self that locale=Locale.default =
if that.is_nothing then Error.throw (Type_Error_Data Text that "that") else
comparison_result = Text_Utils.compare_normalized_ignoring_case self that locale.java_locale
Ordering.from_sign comparison_result
## ALIAS Check Emptiness
Check if `self` is empty.

View File

@ -1,9 +1,5 @@
from Standard.Base import all
# TODO Dubious constructor export
from project.Data.Text.Case_Sensitivity.Case_Sensitivity import all
from project.Data.Text.Case_Sensitivity.Case_Sensitivity export all
type Case_Sensitivity
## Represents a case-sensitive comparison mode.
Sensitive

View File

@ -423,58 +423,6 @@ Text.lines : Boolean -> Vector Text
Text.lines self keep_endings=False =
Vector.from_polyglot_array (Text_Utils.split_on_lines self keep_endings)
## Checks whether `self` is equal to `that`, ignoring the case of the texts.
Arguments:
- that: The text to compare `self` for case-insensitive equality with.
Two texts are considered equal ignoring case if they are of the same length
and corresponding characters are equal ignoring case.
! Unicode Equality
The definition of equality includes Unicode canonicalization. I.e. two
texts are equal if they are identical after canonical decomposition. This
ensures that different ways of expressing the same character in the
underlying binary representation are considered equal.
> Example
The string 'É' (i.e. the character U+00C9, LATIN CAPITAL LETTER E WITH
ACUTE) is equal ignore case to the string 'é' (i.e. the character U+00E9,
LATIN SMALL LETTER E WITH ACUTE), which is canonically the same as the
string 'e\u0301' (i.e. the letter `e` followed by U+0301, COMBINING ACUTE
ACCENT). Therefore:
(('É' . equals_ignore_case 'é') && ('é' . equals_ignore_case 'e\u0301')) == True
Text.equals_ignore_case : Text -> Locale -> Boolean
Text.equals_ignore_case self that locale=Locale.default =
Text_Utils.equals_ignore_case self that locale.java_locale
## ADVANCED
PRIVATE
UNSTABLE
Unifies the case of all letters in the text, generating a key which can be
used to perform case-insensitive comparisons.
Text.to_case_insensitive_key : Locale -> Text
Text.to_case_insensitive_key self locale=Locale.default =
Text_Utils.case_insensitive_key self locale.java_locale
## Compare two texts to discover their ordering.
Arguments:
- that: The text to order `self` with respect to.
> Example
Checking how "a" orders in relation to "b".
"a".compare_to_ignore_case "b"
Text.compare_to_ignore_case : Text -> Locale -> Ordering
Text.compare_to_ignore_case self that locale=Locale.default =
if that.is_nothing then Error.throw (Type_Error_Data Text that "that") else
comparison_result = Text_Utils.compare_normalized_ignoring_case self that locale.java_locale
if comparison_result == 0 then Ordering.Equal else
if comparison_result < 0 then Ordering.Less else
Ordering.Greater
## Inserts text value at the specified index.
Arguments:

View File

@ -55,7 +55,7 @@ type Match_Matrix
unmatched_criteria self =
checked_criteria = self.criteria.map_with_index j-> criterion->
has_matches = self.does_criterion_match_anything j
Pair_Data has_matches criterion
Pair.Value has_matches criterion
checked_criteria.filter (p -> p.first.not) . map .second
## PRIVATE
@ -109,4 +109,4 @@ internal_match_criteria_implementation matcher objects criteria reorder=False na
select_matching_indices match_matrix.is_object_matched_by_anything
result = selected_indices.map objects.at
Pair_Data result unmatched_criteria
Pair.Value result unmatched_criteria

View File

@ -101,6 +101,6 @@ type Regex_Matcher
Selects pairs matching their first element with the provided criteria and
ordering the result according to the order of criteria that matched them.
Text_Matcher.match_criteria [Pair_Data "foo" 42, Pair_Data "bar" 33, Pair_Data "baz" 10, Pair_Data "foo" 0, Pair_Data 10 10] ["bar", "foo"] reorder=True name_mapper=_.name == [Pair_Data "bar" 33, Pair_Data "foo" 42, Pair_Data "foo" 0]
Text_Matcher.match_criteria [Pair.Value "foo" 42, Pair.Value "bar" 33, Pair.Value "baz" 10, Pair.Value "foo" 0, Pair.Value 10 10] ["bar", "foo"] reorder=True name_mapper=_.name == [Pair.Value "bar" 33, Pair.Value "foo" 42, Pair.Value "foo" 0]
match_criteria : Vector Any -> Vector Text -> Boolean -> (Any -> Text) -> Problem_Behavior -> Vector Any ! No_Matches_Found
match_criteria self = match_criteria_implementation self

View File

@ -174,10 +174,10 @@ range_to_char_indices text range = if range.step != 1 then Error.throw (Illegal_
end = if range.end == Nothing then len else (if range.end < 0 then range.end + len else range.end)
is_valid = (Range_Data 0 len+1).contains
case (Pair_Data (is_valid start) (is_valid end)) of
Pair_Data False _ -> Error.throw (Index_Out_Of_Bounds_Error_Data range.start len)
Pair_Data True False -> Error.throw (Index_Out_Of_Bounds_Error_Data range.end len)
Pair_Data True True ->
case (Pair.Value (is_valid start) (is_valid end)) of
Pair.Value False _ -> Error.throw (Index_Out_Of_Bounds_Error_Data range.start len)
Pair.Value True False -> Error.throw (Index_Out_Of_Bounds_Error_Data range.end len)
Pair.Value True True ->
if start>=end then (Range_Data 0 0) else
iterator = BreakIterator.getCharacterInstance
iterator.setText text

View File

@ -67,6 +67,6 @@ type Text_Matcher
Selects pairs matching their first element with the provided criteria and
ordering the result according to the order of criteria that matched them.
Text_Matcher.match_criteria [Pair_Data "foo" 42, Pair_Data "bar" 33, Pair_Data "baz" 10, Pair_Data "foo" 0, Pair_Data 10 10] ["bar", "foo"] reorder=True name_mapper=_.name == [Pair_Data "bar" 33, Pair_Data "foo" 42, Pair_Data "foo" 0]
Text_Matcher.match_criteria [Pair.Value "foo" 42, Pair.Value "bar" 33, Pair.Value "baz" 10, Pair.Value "foo" 0, Pair.Value 10 10] ["bar", "foo"] reorder=True name_mapper=_.name == [Pair.Value "bar" 33, Pair.Value "foo" 42, Pair.Value "foo" 0]
match_criteria : Vector Any -> Vector Text -> Boolean -> (Any -> Text) -> Problem_Behavior -> Vector Any ! No_Matches_Found
match_criteria self = match_criteria_implementation self

View File

@ -137,8 +137,8 @@ find_sub_range_end text predicate =
iterator.setText text
loop index start end =
if end == -1 then (Pair.Pair_Data Nothing start) else
if predicate index start end then (Pair.Pair_Data start end) else
if end == -1 then (Pair.Value Nothing start) else
if predicate index start end then (Pair.Value start end) else
@Tail_Call loop (index + 1) end iterator.next
loop 0 0 iterator.next

View File

@ -1,11 +1,25 @@
from Standard.Base import all
import project.Data.Any.Any
import project.Data.Json.Json
import project.Data.Locale.Locale
import project.Data.Numbers.Integer
import project.Data.Ordering.Ordering
import project.Data.Text.Text
import project.Data.Time.Date_Period.Date_Period
import project.Data.Time.Date_Time
import project.Data.Time.Day_Of_Week.Day_Of_Week
import project.Data.Time.Day_Of_Week_From
import project.Data.Time.Duration.Duration
import project.Data.Time.Period.Period
import project.Data.Time.Time_Of_Day.Time_Of_Day
import project.Data.Time.Time_Zone.Time_Zone
import project.Data.Vector.Vector
import project.Math
import project.Meta
import project.Nothing.Nothing
import project.Polyglot
import project.Data.Time.Duration
import project.Data.Time.Period
import project.Data.Time.Date_Period
import Standard.Base.Polyglot
from Standard.Base.Error.Common import Error, Panic, Time_Error, Time_Error_Data, Polyglot_Error_Data, unimplemented
from project.Data.Boolean import Boolean, True, False
from project.Error.Common import Error, Panic, Type_Error_Data, Time_Error, Time_Error_Data, Polyglot_Error_Data, Illegal_Argument_Error, Illegal_Argument_Error_Data, unimplemented
polyglot java import org.enso.base.Time_Utils
polyglot java import java.time.temporal.ChronoField
@ -13,61 +27,6 @@ polyglot java import java.time.temporal.IsoFields
polyglot java import java.time.DateTimeException
polyglot java import java.lang.ArithmeticException
## Obtains the current date from the system clock in the system timezone.
> Example
Get the current date.
example_now = Date.now
now : Date
now = @Builtin_Method "Date.now"
## ALIAS Current Date
Obtains the current date from the system clock in the system timezone.
> Example
Get the current date.
example_today = Date.today
today : Date
today = now
## Constructs a new Date from a year, month, and day.
Arguments
- year: The year to represent.
- month: The month-of-year to represent, from 1 (January) to 12 (December).
- day: The day-of-month to represent, from 1 to 31. It must be valid for the
year and month.
Returns a `Time_Error` if the provided time is not valid.
> Example
Create a new local date at Unix epoch.
from Standard.Base import Date
example_new = Date.new 1970
> Example
Get the local date of 5th August 1986.
example_new = Date.new 1986 8 5
new : Integer -> Integer -> Integer -> Date ! Time_Error
new year (month = 1) (day = 1) =
## TODO This is left using the old error handling approach, because
magically, changing this to the `catch_java` (which is now preferred way
of catching Polyglot_Errors) lead to the "should format local date using
provided pattern" test failing because it called the `LocalDate.format`
instead of Enso format. Hopefully this will be fixed with
https://github.com/enso-org/enso/pull/3559
Then this should be switched to use `Panic.catch_java`.
Panic.recover Any (new_builtin year month day) . catch Any e-> case e of
Polyglot_Error_Data err -> Error.throw (Time_Error_Data err.getMessage)
ex -> ex
## PRIVATE
Constructs a new Date from a year, month, and day.
@ -81,78 +40,6 @@ new year (month = 1) (day = 1) =
new_builtin : Integer -> Integer -> Integer -> Date
new_builtin year month day = @Builtin_Method "Date.new_builtin"
## ALIAS Date from Text
Converts text containing a date into a Date object.
Arguments:
- text: The text to try and parse as a date.
- pattern: An optional pattern describing how to parse the text.
Returns a `Time_Error` if the provided `text` cannot be parsed using the
provided `pattern`.
? Pattern Syntax
Patterns are based on a simple sequence of letters and symbols. For
example, "d MMM yyyy" will format "2011-12-03" as "3 Dec 2011".
? Default Date Formatting
Unless you provide a custom format, the text must represent a valid date
that can be parsed using the ISO-8601 extended local date format. The
format consists of:
- Four digits or more for the year. Years in the range 0000 to 9999
will be pre-padded by zero to ensure four digits. Years outside
that range will have a prefixed positive or negative symbol.
- A dash
- Two digits for the month-of-year. This is pre-padded by zero to ensure
two digits.
- A dash
- Two digits for the day-of-month. This is pre-padded by zero to ensure two
digits.
> Example
Parse the date of 23rd December 2020.
from Standard.Base import Date
example_parse = Date.parse "2020-12-23"
> Example
Recover from an error due to a wrong format.
from Standard.Base import Date
from Standard.Base.Error.Common import Time_Error
example_parse_err = Date.parse "my birthday" . catch Time_Error _->
Date.new 2000 1 1
> Example
Parse "1999-1-1" as Date using a custom format.
from Standard.Base import Date
example_parse = Date.parse "1999-1-1" "yyyy-M-d"
> Example
Recover from the parse error.
from Standard.Base import Date
from Standard.Base.Error.Common import Time_Error
example_parse_err =
date = Date.parse "1999-1-1" "yyyy-MM-dd"
date.catch Time_Error (_->Date.new 2000 1 1)
parse : Text -> (Text | Nothing) -> Date ! Time_Error
parse text pattern=Nothing =
result = Panic.recover Any <| case pattern of
Nothing -> parse_builtin text 0
_ : Text -> parse_builtin text pattern
_ -> Panic.throw (Time_Error_Data "An invalid pattern was provided.")
result . map_error <| case _ of
Polyglot_Error_Data err -> Time_Error_Data err.getMessage
ex -> ex
## PRIVATE
Converts text containing a date into a Date object.
@ -193,6 +80,132 @@ parse_builtin text pattern = @Builtin_Method "Date.parse_builtin"
offset or timezone.
@Builtin_Type
type Date
## Obtains the current date from the system clock in the system timezone.
> Example
Get the current date.
example_now = Date.now
now : Date
now = @Builtin_Method "Date.now"
## ALIAS Current Date
Obtains the current date from the system clock in the system timezone.
> Example
Get the current date.
example_today = Date.today
today : Date
today = Date.now
## Constructs a new Date from a year, month, and day.
Arguments
- year: The year to represent.
- month: The month-of-year to represent, from 1 (January) to 12 (December).
- day: The day-of-month to represent, from 1 to 31. It must be valid for the
year and month.
Returns a `Time_Error` if the provided time is not valid.
> Example
Create a new local date at Unix epoch.
from Standard.Base import Date
example_new = Date.new 1970
> Example
Get the local date of 5th August 1986.
example_new = Date.new 1986 8 5
new : Integer -> Integer -> Integer -> Date ! Time_Error
new year (month = 1) (day = 1) =
## TODO This is left using the old error handling approach, because
magically, changing this to the `catch_java` (which is now preferred way
of catching Polyglot_Errors) lead to the "should format local date using
provided pattern" test failing because it called the `LocalDate.format`
instead of Enso format. Hopefully this will be fixed with
https://github.com/enso-org/enso/pull/3559
Then this should be switched to use `Panic.catch_java`.
Panic.recover Any (new_builtin year month day) . catch Any e-> case e of
Polyglot_Error_Data err -> Error.throw (Time_Error_Data err.getMessage)
ex -> ex
## ALIAS Date from Text
Converts text containing a date into a Date object.
Arguments:
- text: The text to try and parse as a date.
- pattern: An optional pattern describing how to parse the text.
Returns a `Time_Error` if the provided `text` cannot be parsed using the
provided `pattern`.
? Pattern Syntax
Patterns are based on a simple sequence of letters and symbols. For
example, "d MMM yyyy" will format "2011-12-03" as "3 Dec 2011".
? Default Date Formatting
Unless you provide a custom format, the text must represent a valid date
that can be parsed using the ISO-8601 extended local date format. The
format consists of:
- Four digits or more for the year. Years in the range 0000 to 9999
will be pre-padded by zero to ensure four digits. Years outside
that range will have a prefixed positive or negative symbol.
- A dash
- Two digits for the month-of-year. This is pre-padded by zero to ensure
two digits.
- A dash
- Two digits for the day-of-month. This is pre-padded by zero to ensure two
digits.
> Example
Parse the date of 23rd December 2020.
from Standard.Base import Date
example_parse = Date.parse "2020-12-23"
> Example
Recover from an error due to a wrong format.
from Standard.Base import Date
from Standard.Base.Error.Common import Time_Error
example_parse_err = Date.parse "my birthday" . catch Time_Error _->
Date.new 2000 1 1
> Example
Parse "1999-1-1" as Date using a custom format.
from Standard.Base import Date
example_parse = Date.parse "1999-1-1" "yyyy-M-d"
> Example
Recover from the parse error.
from Standard.Base import Date
from Standard.Base.Error.Common import Time_Error
example_parse_err =
date = Date.parse "1999-1-1" "yyyy-MM-dd"
date.catch Time_Error (_->Date.new 2000 1 1)
parse : Text -> (Text | Nothing) -> Date ! Time_Error
parse text pattern=Nothing =
result = Panic.recover Any <| case pattern of
Nothing -> parse_builtin text 0
_ : Text -> parse_builtin text pattern
_ -> Panic.throw (Time_Error_Data "An invalid pattern was provided.")
result . map_error <| case _ of
Polyglot_Error_Data err -> Time_Error_Data err.getMessage
ex -> ex
## Get the year field.
@ -315,14 +328,14 @@ type Date
end-exclusive manner), by default the end date is not included in the
count. This has the nice property that for example to count the work
days within the next week you can do
`date.work_days_until (date + (Period.days 7)` and it will look at the 7 days
starting from the current `date` and not 8 days. This also gives us a
property that `date.work_days_until (date.add_work_days N) == N` for
any non-negative N. On the other hand, sometimes we may want the end
date to be included in the count, so we provide the `include_end_date`
argument for that purpose. Setting it to `True` should make the result
consistent with the `NETWORKDAYS` function in Excel and similar
products.
`date.work_days_until (date + (Period.new days=7)` and it will look at
the 7 days starting from the current `date` and not 8 days. This also
gives us a property that
`date.work_days_until (date.add_work_days N) == N` for any non-negative
N. On the other hand, sometimes we may want the end date to be included
in the count, so we provide the `include_end_date` argument for that
purpose. Setting it to `True` should make the result consistent with
the `NETWORKDAYS` function in Excel and similar products.
> Example
Count the number of workdays between two dates.
@ -333,7 +346,7 @@ type Date
work_days_until : Date -> Vector Date -> Boolean -> Integer
work_days_until self end holidays=[] include_end_date=False =
Date_Time.ensure_in_epoch self <|
if include_end_date then self.work_days_until (end + (Period.days 1)) holidays include_end_date=False else
if include_end_date then self.work_days_until (end + (Period.new days=1)) holidays include_end_date=False else
weekdays = week_days_between self end
## We count holidays that occurred within the period, but not on the
weekends (as weekend days have already been excluded from the count).
@ -357,7 +370,7 @@ type Date
from Standard.Base import Date, Time_Of_Day, Time_Zone
example_to_time = Date.new 2020 2 3 . to_date_time Time_Of_Day.new Time_Zone.utc
to_date_time : Time_Of_Day -> Time_Zone -> Date_Time
to_date_time : Time_Of_Day -> Time_Zone -> Date_Time.Date_Time
to_date_time self (time_of_day=Time_Of_Day.new) (zone=Time_Zone.system) = self.to_time_builtin time_of_day zone
## Add the specified amount of time to this instant to get another date.
@ -370,13 +383,13 @@ type Date
import Standard.Base.Data.Time.Duration
example_add = Date.new 2020 + (Period.months 6)
example_add = Date.new 2020 + (Period.new months=6)
+ : Period -> Date ! (Time_Error | Illegal_Argument_Error)
+ self period =
case period of
_ : Period.Period ->
_ : Period ->
Time_Utils.date_adjust self Time_Utils.AdjustOp.PLUS period.internal_period
_ : Duration.Duration ->
_ : Duration ->
Error.throw (Time_Error_Data "Date does not support adding/subtracting Duration. Use Period instead.")
_ ->
Error.throw (Illegal_Argument_Error_Data "Illegal period argument")
@ -462,7 +475,7 @@ type Date
as long as needed to fall on a non-weekend non-holiday
workday.
go end_date =
if holidays.contains end_date || is_weekend end_date then @Tail_Call go (end_date + (Period.days 1)) else end_date
if holidays.contains end_date || is_weekend end_date then @Tail_Call go (end_date + (Period.new days=1)) else end_date
go end
False ->
## We shift a bit so that if shifting by N full weeks, the 'last'
@ -500,7 +513,7 @@ type Date
holiday, we need to ensure that we move it - this time
backwards - to the first workday.
go end_date =
if holidays.contains end_date || is_weekend end_date then @Tail_Call go (end_date - (Period.days 1)) else end_date
if holidays.contains end_date || is_weekend end_date then @Tail_Call go (end_date - (Period.new days=1)) else end_date
go end
## Subtract the specified amount of time from this instant to get another
@ -515,14 +528,14 @@ type Date
from Standard.Base import Date
import Standard.Base.Data.Time.Duration
example_subtract = Date.new 2020 - (Period.days 7)
example_subtract = Date.new 2020 - (Period.new days=7)
- : Period -> Date ! (Time_Error | Illegal_Argument_Error)
- self period =
case period of
_ : Period.Period ->
_ : Period ->
new_java_date = Time_Utils.date_adjust self Time_Utils.AdjustOp.MINUS period.internal_period
new new_java_date.year new_java_date.month new_java_date.day
_ : Duration.Duration ->
Date.new new_java_date.year new_java_date.month new_java_date.day
_ : Duration ->
Error.throw (Time_Error_Data "Date does not support adding/subtracting Duration. Use Period instead.")
_ ->
Error.throw (Illegal_Argument_Error_Data "Illegal period argument")
@ -593,6 +606,7 @@ type Date
## Compares two Dates for equality.
== : Date -> Boolean
== self that = case that of
Date -> Meta.is_same_object self Date
_ : Date ->
sign = Time_Utils.compare_to_localdate self that
0 == sign
@ -604,7 +618,7 @@ week_days_between start end =
starting point), the last week (containing the end point), and the full
weeks in between those. In some cases there may be no weeks in-between
and the first and last week can be the same week.
start_of_first_full_week = (start.start_of Date_Period.Week) + (Period.days 7)
start_of_first_full_week = (start.start_of Date_Period.Week) + (Period.new days=7)
start_of_last_week = end.start_of Date_Period.Week
full_weeks_between = (Time_Utils.days_between start_of_first_full_week start_of_last_week).div 7
case full_weeks_between < 0 of
@ -620,7 +634,7 @@ week_days_between start end =
_ -> days_between
False ->
# We count the days in the first week up until Friday - the weekend is not counted.
first_week_days = Math.max 0 (Time_Utils.days_between start (start_of_first_full_week - (Period.days 2)))
first_week_days = Math.max 0 (Time_Utils.days_between start (start_of_first_full_week - (Period.new days=2)))
# We count the days in the last week, not including the weekend.
last_week_days = Math.min (Time_Utils.days_between start_of_last_week end) 5
full_weeks_between * 5 + first_week_days + last_week_days

View File

@ -1,18 +1,19 @@
from Standard.Base import all
import project.Data.Time.Date.Date
import project.Data.Time.Date_Time.Date_Time
import project.Data.Time.Day_Of_Week.Day_Of_Week
from project.Data.Boolean import Boolean, True, False
polyglot java import org.enso.base.Time_Utils
polyglot java import org.enso.base.time.Date_Period_Utils
polyglot java import java.time.temporal.TemporalAdjuster
polyglot java import java.time.temporal.TemporalAdjusters
# TODO Dubious constructor export
from project.Data.Time.Date_Period.Date_Period import all
from project.Data.Time.Date_Period.Date_Period export all
## Represents a period of time longer on the scale of days (longer than a day).
## Represents a unit of time longer on the scale of days (longer than a day).
type Date_Period
Year
Quarter
Month
## Represents a 7-day week starting at a given day.
@ -31,18 +32,18 @@ type Date_Period
adjust_start : (Date | Date_Time) -> (Date | Date_Time)
adjust_start self date =
adjuster = case self of
Year -> TemporalAdjusters.firstDayOfYear
Quarter -> Date_Period_Utils.quarter_start
Month -> TemporalAdjusters.firstDayOfMonth
Week first_day -> TemporalAdjusters.previousOrSame first_day.to_java
Date_Period.Year -> TemporalAdjusters.firstDayOfYear
Date_Period.Quarter -> Date_Period_Utils.quarter_start
Date_Period.Month -> TemporalAdjusters.firstDayOfMonth
Date_Period.Week first_day -> TemporalAdjusters.previousOrSame first_day.to_java
(Time_Utils.utils_for date).apply_adjuster date adjuster
## PRIVATE
adjust_end : (Date | Date_Time) -> (Date | Date_Time)
adjust_end self date =
adjuster = case self of
Year -> TemporalAdjusters.lastDayOfYear
Quarter -> Date_Period_Utils.quarter_end
Month -> TemporalAdjusters.lastDayOfMonth
Week first_day -> Date_Period_Utils.end_of_week first_day.to_java
Date_Period.Year -> TemporalAdjusters.lastDayOfYear
Date_Period.Quarter -> Date_Period_Utils.quarter_end
Date_Period.Month -> TemporalAdjusters.lastDayOfMonth
Date_Period.Week first_day -> Date_Period_Utils.end_of_week first_day.to_java
(Time_Utils.utils_for date).apply_adjuster date adjuster

View File

@ -1,7 +1,25 @@
from Standard.Base import all
import project.Data.Any.Any
import project.Data.Json.Json
import project.Data.Locale.Locale
import project.Data.Numbers.Integer
import project.Data.Ordering.Ordering
import project.Data.Text.Text
import project.Data.Time.Date.Date
import project.Data.Time.Date_Period.Date_Period
import project.Data.Time.Day_Of_Week.Day_Of_Week
import project.Data.Time.Day_Of_Week_From
import project.Data.Time.Duration.Duration
import project.Data.Time.Period.Period
import project.Data.Time.Time_Of_Day.Time_Of_Day
import project.Data.Time.Time_Period.Time_Period
import project.Data.Time.Time_Zone.Time_Zone
import project.Data.Vector.Vector
import project.Meta
import project.Nothing.Nothing
import project.Warning
from Standard.Base.Data.Time import Duration, Period, Date_Period, Time_Period
from Standard.Base.Error.Common import Time_Error
from project.Data.Boolean import Boolean, True, False
from project.Error.Common import Error, Panic, Time_Error, Time_Error_Data, Type_Error_Data
polyglot java import java.time.format.DateTimeFormatter
polyglot java import java.time.temporal.ChronoField
@ -11,21 +29,6 @@ polyglot java import java.lang.ArithmeticException
polyglot java import org.enso.base.Time_Utils
## Obtains the start of the epoch for Enso.
? Start of the epoch
For Enso, start of the epoch is equal to the start of the Gregorian calendar,
which is on 15th October 1582.
Invoking some Gregorian calendar related functionalities, like `is_leap_year`,
is computed for all the date times, including those before an epoch start,
with today's rules. Trying to get some Gregorian calendar related properties
for a historical date times that is defined before the epoch is likely an error,
as the Gregorian calendar had not yet been introduced. Therefore, for such
historical date times, a warning is attached to the result.
enso_epoch_start : Date_Time
enso_epoch_start = @Builtin_Method "Date_Time.epoch_start"
## PRIVATE
unix_epoch_start : Date_Time
unix_epoch_start = Date_Time.new 1970
@ -35,63 +38,13 @@ ensure_in_epoch : (Date_Time | Date) -> (Any -> Any) -> Any
ensure_in_epoch date ~action =
datetime = case date of
x : Date_Time -> x
x : Date.Date -> x.to_date_time
x : Date -> x.to_date_time
ret_value = action
case enso_epoch_start <= datetime of
case Date_Time.enso_epoch_start <= datetime of
True -> ret_value
False ->
Warning.attach (Time_Error_Data ("Date time '" + datetime.to_text + "' start before Enso epoch")) ret_value
## ALIAS Current Time
Obtains the current date-time from the system clock in the system timezone.
> Example
Get the current time
from Standard.Base import Date_Time
example_now = Date_Time.now
now : Date_Time
now = @Builtin_Method "Date_Time.now"
## Obtains an instance of `Date_Time` from a year, month, day, hour, minute,
second, nanosecond and timezone.
Arguments:
- year: The year to represent, any Integer is valid.
- month: the month-of-year to represent, from 1 (January) to 12 (December)
- day: the day-of-month to represent, from 1 to 31 and must be valid for the
year and month
- hour: the hour-of-day to represent, from 0 to 23
- minute: the minute-of-hour to represent, from 0 to 59
- second: the second-of-minute to represent, from 0 to 59
- millisecond: the millisecond-of-second to represent, from 0 to 999.
- microsecond: the microsecond-of-second to represent, from 0 to 999,999.
- nanosecond: The nano-of-second to represent, from 0 to 999,999,999.
- zone: the timezone
Returns a `Time_Error` if the provided time cannot be represented.
> Example
Create a new zoned date time at Unix epoch.
from Standard.Base import Date_Time, Time_Zone
example_new = Date_Time.new 1970 (zone = Time_Zone.utc)
> Example
Get the 5 August 1986 at midnight.
from Standard.Base import Date_Time, Time_Zone
example_new = Date_Time.new 1986 8 5
new : Integer -> Integer -> Integer -> Integer -> Integer -> Integer -> Integer -> Integer -> Integer -> Time_Zone -> Date_Time ! Time_Error
new year (month = 1) (day = 1) (hour = 0) (minute = 0) (second = 0) (millisecond = 0) (microsecond = 0) (nanosecond = 0) (zone = Time_Zone.system) =
total_nanoseconds = nanosecond + microsecond * 1000 + millisecond * 1000000
Panic.catch_java Any (new_builtin year month day hour minute second total_nanoseconds zone) java_exception->
Error.throw (Time_Error_Data java_exception.getMessage)
## PRIVATE
Obtains an instance of `Date_Time` from a year, month, day, hour, minute,
@ -112,91 +65,6 @@ new year (month = 1) (day = 1) (hour = 0) (minute = 0) (second = 0) (millisecond
new_builtin : Integer -> Integer -> Integer -> Integer -> Integer -> Integer -> Integer -> Time_Zone -> Date_Time
new_builtin year month day hour minute second nanosecond zone = @Builtin_Method "Date_Time.new_builtin"
## ALIAS Time from Text
Obtains an instance of `Time` from a text such as
"2007-12-03T10:15:30+01:00 Europe/Paris".
Arguments:
- text: The text representing the time to be parsed.
- pattern: The pattern to use for parsing the input text.
- locale: The locale in which the pattern should be interpreted.
? Pattern Syntax
For the list of accepted symbols in pattern refer to `Time.format` doc.
? Default Date_Time Format
The text must represent a valid date-time and is parsed using the ISO-8601
extended offset date-time format to add the timezone. The section in square
brackets is not part of the ISO-8601 standard. The format consists of:
- The ISO offset date time.
- If the zone ID is not available or is a zone offset then the format is
complete.
- An open square bracket '['.
- The zone ID. This is not part of the ISO-8601 standard. Parsing is case
sensitive.
- A close square bracket ']'.
This method will return a `Time_Error` if the provided time cannot be parsed
using the above format.
> Example
Parse UTC time.
from Standard.Base import Date_Time
example_parse = Date_Time.parse "2020-10-01T04:11:12Z"
> Example
Parse UTC-04:00 time.
from Standard.Base import Date_Time
example_parse = Date_Time.parse "2020-10-01T04:11:12-04:00"
> Example
Parse UTC-04:00 time specifying New York timezone.
from Standard.Base import Date_Time
example_parse = Date_Time.parse "2020-10-01T04:11:12-04:00[America/New_York]"
> Example
Parse UTC-04:00 time with nanoseconds.
from Standard.Base import Date_Time
example_parse = Date_Time.parse "2020-10-01T04:11:12.177528-04:00"
> Example
Recover from the parse error.
from Standard.Base import Date_Time
example_parse = Date_Time.parse "2020-10-01" . catch Time_Error (_->Date_Time.now)
> Example
Parse "2020-05-06 04:30:20" as Date_Time
from Standard.Base import Date_Time
example_parse = Date_Time.parse "2020-05-06 04:30:20" "yyyy-MM-dd HH:mm:ss"
> Example
Parse "06 of May 2020 at 04:30AM" as Date_Tme
from Standard.Base import Date_Time
example_parse =
Date_Time.parse "06 of May 2020 at 04:30AM" "dd 'of' MMMM yyyy 'at' hh:mma"
parse : Text -> Text | Nothing -> Locale -> Date_Time ! Time_Error
parse text pattern=Nothing locale=Locale.default =
Panic.catch_java Any handler=(java_exception -> Error.throw (Time_Error_Data java_exception.getMessage)) <|
case pattern of
Nothing -> parse_builtin text
_ : Text -> Time_Utils.parse_datetime_format text pattern locale.java_locale
## PRIVATE
Obtains an instance of `Time` from a text such as
@ -224,6 +92,156 @@ parse_builtin text = @Builtin_Method "Date_Time.parse_builtin"
the Europe/Paris timezone" can be stored as `Time`.
@Builtin_Type
type Date_Time
## Obtains the start of the epoch for Enso.
? Start of the epoch
For Enso, start of the epoch is equal to the start of the Gregorian calendar,
which is on 15th October 1582.
Invoking some Gregorian calendar related functionalities, like `is_leap_year`,
is computed for all the date times, including those before an epoch start,
with today's rules. Trying to get some Gregorian calendar related properties
for a historical date times that is defined before the epoch is likely an error,
as the Gregorian calendar had not yet been introduced. Therefore, for such
historical date times, a warning is attached to the result.
enso_epoch_start : Date_Time
enso_epoch_start = @Builtin_Method "Date_Time.epoch_start"
## ALIAS Current Time
Obtains the current date-time from the system clock in the system timezone.
> Example
Get the current time
from Standard.Base import Date_Time
example_now = Date_Time.now
now : Date_Time
now = @Builtin_Method "Date_Time.now"
## Obtains an instance of `Date_Time` from a year, month, day, hour, minute,
second, nanosecond and timezone.
Arguments:
- year: The year to represent, any Integer is valid.
- month: the month-of-year to represent, from 1 (January) to 12 (December)
- day: the day-of-month to represent, from 1 to 31 and must be valid for the
year and month
- hour: the hour-of-day to represent, from 0 to 23
- minute: the minute-of-hour to represent, from 0 to 59
- second: the second-of-minute to represent, from 0 to 59
- millisecond: the millisecond-of-second to represent, from 0 to 999.
- microsecond: the microsecond-of-second to represent, from 0 to 999,999.
- nanosecond: The nano-of-second to represent, from 0 to 999,999,999.
- zone: the timezone
Returns a `Time_Error` if the provided time cannot be represented.
> Example
Create a new zoned date time at Unix epoch.
from Standard.Base import Date_Time, Time_Zone
example_new = Date_Time.new 1970 (zone = Time_Zone.utc)
> Example
Get the 5 August 1986 at midnight.
from Standard.Base import Date_Time, Time_Zone
example_new = Date_Time.new 1986 8 5
new : Integer -> Integer -> Integer -> Integer -> Integer -> Integer -> Integer -> Integer -> Integer -> Time_Zone -> Date_Time ! Time_Error
new year (month = 1) (day = 1) (hour = 0) (minute = 0) (second = 0) (millisecond = 0) (microsecond = 0) (nanosecond = 0) (zone = Time_Zone.system) =
total_nanoseconds = nanosecond + microsecond * 1000 + millisecond * 1000000
Panic.catch_java Any (new_builtin year month day hour minute second total_nanoseconds zone) java_exception->
Error.throw (Time_Error_Data java_exception.getMessage)
## ALIAS Time from Text
Obtains an instance of `Time` from a text such as
"2007-12-03T10:15:30+01:00 Europe/Paris".
Arguments:
- text: The text representing the time to be parsed.
- pattern: The pattern to use for parsing the input text.
- locale: The locale in which the pattern should be interpreted.
? Pattern Syntax
For the list of accepted symbols in pattern refer to `Time.format` doc.
? Default Date_Time Format
The text must represent a valid date-time and is parsed using the ISO-8601
extended offset date-time format to add the timezone. The section in square
brackets is not part of the ISO-8601 standard. The format consists of:
- The ISO offset date time.
- If the zone ID is not available or is a zone offset then the format is
complete.
- An open square bracket '['.
- The zone ID. This is not part of the ISO-8601 standard. Parsing is case
sensitive.
- A close square bracket ']'.
This method will return a `Time_Error` if the provided time cannot be parsed
using the above format.
> Example
Parse UTC time.
from Standard.Base import Date_Time
example_parse = Date_Time.parse "2020-10-01T04:11:12Z"
> Example
Parse UTC-04:00 time.
from Standard.Base import Date_Time
example_parse = Date_Time.parse "2020-10-01T04:11:12-04:00"
> Example
Parse UTC-04:00 time specifying New York timezone.
from Standard.Base import Date_Time
example_parse = Date_Time.parse "2020-10-01T04:11:12-04:00[America/New_York]"
> Example
Parse UTC-04:00 time with nanoseconds.
from Standard.Base import Date_Time
example_parse = Date_Time.parse "2020-10-01T04:11:12.177528-04:00"
> Example
Recover from the parse error.
from Standard.Base import Date_Time
example_parse = Date_Time.parse "2020-10-01" . catch Time_Error (_->Date_Time.now)
> Example
Parse "2020-05-06 04:30:20" as Date_Time
from Standard.Base import Date_Time
example_parse = Date_Time.parse "2020-05-06 04:30:20" "yyyy-MM-dd HH:mm:ss"
> Example
Parse "06 of May 2020 at 04:30AM" as Date_Tme
from Standard.Base import Date_Time
example_parse =
Date_Time.parse "06 of May 2020 at 04:30AM" "dd 'of' MMMM yyyy 'at' hh:mma"
parse : Text -> Text | Nothing -> Locale -> Date_Time ! Time_Error
parse text pattern=Nothing locale=Locale.default =
Panic.catch_java Any handler=(java_exception -> Error.throw (Time_Error_Data java_exception.getMessage)) <|
case pattern of
Nothing -> parse_builtin text
_ : Text -> Time_Utils.parse_datetime_format text pattern locale.java_locale
## Get the year portion of the time.
@ -346,14 +364,14 @@ type Date_Time
See `Date_Time.enso_epoch_start`.
to_enso_epoch_seconds : Integer
to_enso_epoch_seconds self =
(Duration.between enso_epoch_start self).total_seconds.floor
(Duration.between Date_Time.enso_epoch_start self).total_seconds.floor
## Return the number of milliseconds from the Enso epoch start.
See `Date_Time.enso_epoch_start`.
to_enso_epoch_milliseconds : Integer
to_enso_epoch_milliseconds self =
(Duration.between enso_epoch_start self).total_milliseconds.floor
(Duration.between Date_Time.enso_epoch_start self).total_milliseconds.floor
## Convert this point in time to time of day, discarding the time zone
information.
@ -483,16 +501,16 @@ type Date_Time
> Example
Add 15 years and 3 hours to a zoned date time.
from Standard.Base import Date_Time
from Standard.Base.Data.Time import Duration, Period
from Standard.Base import Date_Time, Period
from Standard.Base.Data.Time import Duration
example_plus = Date_Time.new 2020 + (Period.years 15) + (Duration.hours 3)
example_plus = Date_Time.new 2020 + (Period.new years=15) + (Duration.new hours=3)
+ : (Duration | Period) -> Date_Time ! Time_Error
+ self amount =
case amount of
duration : Duration.Duration ->
duration : Duration ->
Panic.catch ArithmeticException (self.plus_builtin duration) (err -> Error.throw (Time_Error_Data err.getMessage))
period : Period.Period ->
period : Period ->
Time_Utils.datetime_adjust self Time_Utils.AdjustOp.PLUS period.internal_period
## Shift the date by the specified amount of business days.
@ -552,13 +570,13 @@ type Date_Time
from Standard.Base import Date_Time
import Standard.Base.Data.Time.Duration
example_minus = Date_Time.new 2020 - (Period.years 1) - (Period.months 9) - (Duration.hours 5)
example_minus = Date_Time.new 2020 - (Period.new years=1) - (Period.new months=9) - (Duration.new hours=5)
- : (Duration | Period) -> Date_Time ! Time_Error
- self amount =
result = case amount of
duration : Duration.Duration ->
duration : Duration ->
Panic.catch ArithmeticException (self.minus_builtin duration) (err -> Error.throw (Time_Error_Data err.getMessage))
period : Period.Period ->
period : Period ->
Time_Utils.datetime_adjust self Time_Utils.AdjustOp.MINUS period.internal_period
ensure_in_epoch result result
@ -672,7 +690,7 @@ type Date_Time
Compare two times for their ordering.
(Date_Time.new 2000).compare_to (Date_Time.new 2001)
compare_to : Time -> Ordering
compare_to : Date_Time -> Ordering
compare_to self that = case that of
_ : Date_Time ->
sign = Time_Utils.compare_to_zoneddatetime self that
@ -682,6 +700,7 @@ type Date_Time
## Compares two Date_Time for equality.
== : Date_Time -> Boolean
== self that = case that of
Date_Time -> Meta.is_same_object self Date_Time
_ : Date_Time ->
sign = Time_Utils.compare_to_zoneddatetime self that
0 == sign

View File

@ -1,4 +1,5 @@
from Standard.Base import all
import project.Data.Numbers.Integer
from project.Data.Boolean import Boolean, True, False
polyglot java import java.time.DayOfWeek

View File

@ -1,6 +1,8 @@
from Standard.Base import all
import project.Data.Numbers.Integer
import project.Data.Time.Day_Of_Week.Day_Of_Week
from project.Data.Boolean import Boolean, True, False
from project.Error.Common import Error, Illegal_Argument_Error_Data
## Convert from an integer to a Day_Of_Week

View File

@ -1,31 +1,23 @@
from Standard.Base import all
import Standard.Base.System
from Standard.Base.Data.Time import Period
import project.Data.Any.Any
import project.Data.Json.Json
import project.Data.Numbers.Integer
import project.Data.Ordering.Ordering
import project.Data.Pair.Pair
import project.Data.Time.Date_Time.Date_Time
import project.Data.Time.Period.Period
import project.Data.Vector.Vector
import project.Meta
import project.Runtime
import project.System
from project.Data.Boolean import Boolean, True, False
from project.Error.Common import Error, Panic, Type_Error_Data, Time_Error, Time_Error_Data, Illegal_Argument_Error, Illegal_Argument_Error_Data, Illegal_State_Error, Illegal_State_Error_Data
polyglot java import java.time.Duration as Java_Duration
polyglot java import java.time.Period as Java_Period
polyglot java import org.enso.base.Time_Utils
polyglot java import java.lang.ArithmeticException
## Create an interval representing the duration between two points in time.
Arguments:
- start_inclusive: The start datetime of the duration, included.
- end_exclusive: The end datetime of the duration, excluded.
- timezone_aware: Whether the duration between two given times should be
aware of the timezone, that can be set for start or end times.
> Example
An hour interval between two points in time.
from Standard.Base import Date_Time
import Standard.Base.Data.Time.Duration
example_between = Duration.between Date_Time.now (Date_Time.new 2010 10 20)
between : Date_Time -> Date_Time -> Boolean -> Duration
between start_inclusive end_exclusive timezone_aware=True =
between_builtin start_inclusive end_exclusive timezone_aware
## PRIVATE
Create an interval representing the duration between two points in time.
@ -39,25 +31,6 @@ between start_inclusive end_exclusive timezone_aware=True =
between_builtin : Date_Time -> Date_Time -> Boolean -> Duration
between_builtin start_inclusive end_exclusive timezone_aware = @Builtin_Method "Duration.between_builtin"
## Create a duration from time units.
Arguments:
- hours: hours
- minutes: minutes
- seconds: seconds
- milliseconds: milliseconds
- nanoseconds: nanoseconds
> Example
Duration of 2 hours.
import Standard.Base.Data.Time.Duration
example_duration = Duration.new hours=2
new : Integer -> Integer -> Integer -> Integer -> Integer -> Integer -> Duration
new hours=0 minutes=0 seconds=0 milliseconds=0 nanoseconds=0 =
new_builtin hours minutes seconds milliseconds nanoseconds
## PRIVATE
Create a duration from time units.
@ -72,63 +45,80 @@ new hours=0 minutes=0 seconds=0 milliseconds=0 nanoseconds=0 =
new_builtin : Integer -> Integer -> Integer -> Integer -> Integer -> Integer -> Duration
new_builtin hours minutes seconds milliseconds nanoseconds = @Builtin_Method "Duration.new_builtin"
## Create a Duration from hours.
hours : Integer -> Duration
hours h = new hours=h
## Create a Duration from minutes.
minutes : Integer -> Duration
minutes m = new minutes=m
## Create a Duration from seconds.
seconds : Integer -> Duration
seconds s = new seconds=s
## Create a Duration from milliseconds.
milliseconds : Integer -> Duration
milliseconds ms = new milliseconds=ms
## Create a Duration from nanoseconds.
nanoseconds : Integer -> Duration
nanoseconds ns = new nanoseconds=ns
## Create a zero (empty) duration.
> Example
Folding a vector of durations.
import Standard.Base.Data.Time.Duration
durations = [(Duration.seconds 1), (Duration.seconds 2), (Duration.seconds 3)]
example_sum = durations.fold Duration.zero (+)
zero : Duration
zero = new
## ADVANCED
Time the evaluation of a function, return a Pair of Duration and Result
Arguments:
- function: Function to execute.
time_execution : Any -> Pair Duration Any
time_execution ~function =
start = System.nano_time
result = Runtime.no_inline function
end = System.nano_time
duration = new nanoseconds=(end - start)
Pair_Data duration result
## PRIVATE
ensure_duration : Any -> Suspend (Any -> Any) -> Any ! (Time_Error | Illegal_Argument_Error)
ensure_duration : Any -> (Any -> Any) -> Any ! (Time_Error | Illegal_Argument_Error)
ensure_duration object ~action =
case object of
_ : Duration -> action
_ : Period.Period -> Error.throw (Time_Error_Data "Cannot use Period as a parameter")
_ : Period -> Error.throw (Time_Error_Data "Cannot use Period as a parameter")
x ->
Error.throw Illegal_Argument_Error_Data <|
"Expected Duration type, got: " + (Meta.get_qualified_type_name x)
@Builtin_Type
type Duration
## Create an interval representing the duration between two points in time.
Arguments:
- start_inclusive: The start datetime of the duration, included.
- end_exclusive: The end datetime of the duration, excluded.
- timezone_aware: Whether the duration between two given times should be
aware of the timezone, that can be set for start or end times.
> Example
An hour interval between two points in time.
from Standard.Base import Date_Time
import Standard.Base.Data.Time.Duration
example_between = Duration.between Date_Time.now (Date_Time.new 2010 10 20)
between : Date_Time -> Date_Time -> Boolean -> Duration
between start_inclusive end_exclusive timezone_aware=True =
between_builtin start_inclusive end_exclusive timezone_aware
## Create a duration from time units.
Arguments:
- hours: hours
- minutes: minutes
- seconds: seconds
- milliseconds: milliseconds
- nanoseconds: nanoseconds
> Example
Duration of 2 hours.
import Standard.Base.Data.Time.Duration
example_duration = Duration.new hours=2
new : Integer -> Integer -> Integer -> Integer -> Integer -> Integer -> Duration
new hours=0 minutes=0 seconds=0 milliseconds=0 nanoseconds=0 =
new_builtin hours minutes seconds milliseconds nanoseconds
## Create a zero (empty) duration.
> Example
Folding a vector of durations.
import Standard.Base.Data.Time.Duration
durations = [(Duration.new seconds=1), (Duration.new seconds=2), (Duration.new seconds=3)]
example_sum = durations.fold Duration.zero (+)
zero : Duration
zero = Duration.new
## ADVANCED
Time the evaluation of a function, return a Pair of Duration and Result
Arguments:
- function: Function to execute.
time_execution : Any -> Pair
time_execution ~function =
start = System.nano_time
result = Runtime.no_inline function
end = System.nano_time
duration = Duration.new nanoseconds=(end - start)
Pair.Value duration result
## Add the specified amount of time to this duration.
@ -140,14 +130,14 @@ type Duration
import Standard.Base.Data.Time.Duration
example_add = (Duration.minutes 3) + (Duration.seconds 6)
example_add = (Duration.new minutes=3) + (Duration.new seconds=6)
> Example
Add 12 hours to a duration of 30 minutes.
import Standard.Base.Data.Time.Duration
example_add = (Duration.minutes 30) + (Duration.hours 12)
example_add = (Duration.new minutes=30) + (Duration.new hours=12)
+ : Duration -> Duration ! Time_Error
+ self that =
ensure_duration that <|
@ -164,7 +154,7 @@ type Duration
import Standard.Base.Data.Time.Duration
example_subtract = (Duration.hours 6) - (Duration.minutes 30)
example_subtract = (Duration.new hours=6) - (Duration.new minutes=30)
- : Duration -> Duration ! Time_Error
- self that =
ensure_duration that <|
@ -181,7 +171,7 @@ type Duration
import Standard.Base.Data.Time.Duration
example_eq = (Duration.seconds 60).total_minutes == (Duration.minutes 1).total_minutes
example_eq = (Duration.new seconds=60).total_minutes == (Duration.new minutes=1).total_minutes
== : Duration -> Boolean
== self that =
case that of
@ -200,7 +190,7 @@ type Duration
example_compare_to =
duration_1 = (Duration.new hour=1)
duration_2 = (Duration.minutes 60) + (Duration.minutes 5)
duration_2 = (Duration.new minutes=60) + (Duration.new minutes=5)
duration_1.compare_to duration_2
compare_to : Duration -> Ordering
compare_to self that =
@ -301,7 +291,7 @@ type Duration
import Standard.Base.Data.Time.Duration
example_to_vec = (Duration.nanoseconds 800)).to_vector
example_to_vec = (Duration.new nanoseconds=800)).to_vector
to_vector : Vector Integer
to_vector self = [self.hours, self.minutes, self.seconds, self.milliseconds, self.nanoseconds]
@ -312,7 +302,7 @@ type Duration
import Standard.Base.Data.Time.Duration
example_to_json = (Duration.seconds 10).to_json
example_to_json = (Duration.new seconds=10).to_json
to_json : Json
to_json self =
b = Vector.new_builder

View File

@ -1,63 +1,24 @@
from Standard.Base import all
import project.Data.Any.Any
import project.Data.Numbers.Integer
import project.Data.Time.Date.Date
import project.Data.Time.Duration.Duration
import project.Data.Text.Text
import project.Meta
import project.Nothing.Nothing
import Standard.Base.Data.Time.Duration
from project.Data.Boolean import Boolean, True, False
from project.Error.Common import Error, Panic, Time_Error, Time_Error_Data, Illegal_Argument_Error, Illegal_Argument_Error_Data, Incomparable_Values_Error
polyglot java import java.time.Period as Java_Period
polyglot java import java.time.DateTimeException
polyglot java import java.lang.ArithmeticException
## Create a Period representing the time interval between two dates.
Arguments:
- start_date_inclusive: The start date of the period, included.
- end_date_exclusive: The end date of the period, excluded.
> Example
Get a Period between 2022-10-21 and 2022-09-12
import Standard.Base.Data.Time.Period
example_period = Period.between (Date.new 2022 10 21) (Date.new 2022 9 12)
between : Date -> Date -> Period
between start_date_inclusive end_date_exclusive =
Period.Period_Data (Java_Period.between start_date_inclusive end_date_exclusive)
## Create a new Period from years, months and days.
Arguments:
- years: Amount of years.
- months: Amount of months.
- days: Amount of days.
> Example
Create a Period of 2 years and 5 days
import Standard.Base.Data.Time.Period
example_period = Period.new 2 0 5
new : Integer -> Integer -> Integer -> Period
new years=0 months=0 days=0 =
Period.Period_Data (Java_Period.of years months days)
## Create a new Period from days.
days : Integer -> Period
days d = new days=d
## Create a new Period from months.
months : Integer -> Period
months m = new months=m
## Create a new Period from years.
years : Integer -> Period
years y = new years=y
## PRIVATE
ensure_period : Any -> Suspend (Any -> Any) -> Text -> Any ! (Time_Error | Illegal_Argument_Error)
ensure_period : Any -> (Any -> Any) -> Text -> Any ! (Time_Error | Illegal_Argument_Error)
ensure_period object ~action error_msg="Cannot use Duration as a parameter" =
case object of
_ : Period -> action
_ : Duration.Duration ->
_ : Duration ->
Error.throw (Time_Error_Data error_msg)
x ->
Error.throw Illegal_Argument_Error_Data <|
@ -71,13 +32,45 @@ ensure_period object ~action error_msg="Cannot use Duration as a parameter" =
daylight saving time. This means that a Period of 1 day does not necessarily
have to be 24 hours of Duration.
type Period
## Create a Period representing the time interval between two dates.
Arguments:
- start_date_inclusive: The start date of the period, included.
- end_date_exclusive: The end date of the period, excluded.
> Example
Get a Period between 2022-10-21 and 2022-09-12
import Standard.Base.Data.Time.Period
example_period = Period.between (Date.new 2022 10 21) (Date.new 2022 9 12)
between : Date -> Date -> Period
between start_date_inclusive end_date_exclusive =
Period.Value (Java_Period.between start_date_inclusive end_date_exclusive)
## Create a new Period from years, months and days.
Arguments:
- years: Amount of years.
- months: Amount of months.
- days: Amount of days.
> Example
Create a Period of 2 years and 5 days
import Standard.Base.Data.Time.Period
example_period = Period.new 2 0 5
new : Integer -> Integer -> Integer -> Period
new years=0 months=0 days=0 =
Period.Value (Java_Period.of years months days)
## PRIVATE
Arguments:
- internal_period: An internal representation of period of type
java.time.Period.
Period_Data internal_period
Value internal_period
## Get the portion of the period expressed in years.
years : Integer
@ -102,11 +95,11 @@ type Period
import Standard.Base.Data.Time.Period
example_add = (Period.months 1) + (Period.days 1)
example_add = (Period.new months=1) + (Period.new days=1)
+ : Period -> Period ! (Time_Error | Illegal_Argument_Error)
+ self other_period =
ensure_period other_period <|
Panic.catch Any (Period.Period_Data (self.internal_period.plus other_period.internal_period)) err->
Panic.catch Any (Period.Value (self.internal_period.plus other_period.internal_period)) err->
case err of
_ : DateTimeException -> Error.throw Time_Error_Data "Period addition failed:"+err.getMessage
_ : ArithmeticException -> Error.throw Illegal_Argument_Error_Data "Arithmetic error:"+err.getMessage cause=err
@ -122,12 +115,12 @@ type Period
import Standard.Base.Data.Time.Period
example_subtract = (Period.years 3) - (Period.months 11)
example_subtract = (Period.new years=3) - (Period.new months=11)
- : Period -> Period ! (Time_Error | Illegal_Argument_Error)
- self other_period =
ensure_period other_period <|
Panic.catch Any (Period.Period_Data (self.internal_period.minus other_period.internal_period)) err->
Panic.catch Any (Period.Value (self.internal_period.minus other_period.internal_period)) err->
case err of
DateTimeException -> Error.throw Time_Error_Data "Period subtraction failed"
ArithmeticException -> Error.throw Illegal_Argument_Error_Data "Arithmetic error"
@ -135,9 +128,9 @@ type Period
## Check two periods for equality.
Note that two periods are equal if they have the exact same amount of
years, months, and days. So `(Period.days 30)` and
`(Period.months 1)` are not equal. Even `(Period.years 1)` and
`(Period.months 12)` are not equal.
years, months, and days. So `(Period.new days=30)` and
`(Period.new months=1)` are not equal. Even `(Period.new years=1)` and
`(Period.new months=12)` are not equal.
Arguments:
- other_period: The period to compare against `self`.

View File

@ -1,58 +1,25 @@
from Standard.Base import all
import project.Data.Any.Any
import project.Data.Json.Json
import project.Data.Locale.Locale
import project.Data.Numbers.Integer
import project.Data.Ordering.Ordering
import project.Data.Text.Text
import project.Data.Time.Date.Date
import project.Data.Time.Duration.Duration
import project.Data.Time.Period.Period
import project.Data.Time.Time_Period.Time_Period
import project.Data.Time.Time_Zone.Time_Zone
import project.Meta
import project.Nothing.Nothing
import Standard.Base.Data.Time.Duration
import Standard.Base.Data.Time.Period
import Standard.Base.Data.Time.Time_Period
from Standard.Base.Error.Common import Time_Error
from project.Data.Boolean import Boolean, True, False
from project.Error.Common import Error, Panic, Time_Error, Time_Error_Data, Type_Error_Data
polyglot java import java.time.format.DateTimeFormatter
polyglot java import java.time.Instant
polyglot java import java.time.LocalTime
polyglot java import org.enso.base.Time_Utils
## Obtains the current time from the system clock in the default time-zone.
> Example
Get the current time in the default time zone.
from Standard.Base import Time_Of_Day
example_now = Time_Of_Day.now
now : Time_Of_Day
now = @Builtin_Method "Time_Of_Day.now"
## Obtains an instance of `Time_Of_Day` from an hour, minute, second
and nanosecond.
Arguments:
- hour: The hour-of-day to represent, from 0 to 23.
- minute: The minute-of-hour to represent, from 0 to 59.
- second: The second-of-minute to represent, from 0 to 59.
- millisecond: the millisecond-of-second to represent, from 0 to 999.
- microsecond: the microsecond-of-second to represent, from 0 to 999,999.
- nanosecond: The nano-of-second to represent, from 0 to 999,999,999.
Returns a `Time_Error` if the provided time is not a valid time.
> Example
Create a new local time at Unix epoch.
from Standard.Base import Time_Of_Day
example_epoch = Time_Of_Day.new
> Example
Get the local time at 9:30.
from Standard.Base import Time_Of_Day
example_epoch = Time_Of_Day.new hour=9 minute=30
new : Integer -> Integer -> Integer -> Integer -> Integer -> Integer -> Time_Of_Day ! Time_Error
new (hour = 0) (minute = 0) (second = 0) (millisecond = 0) (microsecond = 0) (nanosecond = 0) =
total_nanoseconds = nanosecond + microsecond * 1000 + millisecond * 1000000
Panic.catch_java Any (new_builtin hour minute second total_nanoseconds) java_exception->
Error.throw (Time_Error_Data java_exception.getMessage)
## PRIVATE
Obtains an instance of `Time_Of_Day` from an hour, minute, second
@ -68,74 +35,6 @@ new (hour = 0) (minute = 0) (second = 0) (millisecond = 0) (microsecond = 0) (na
new_builtin : Integer -> Integer -> Integer -> Integer -> Time_Of_Day
new_builtin hour minute second nanosecond = @Builtin_Method "Time_Of_Day.new_builtin"
## Obtains an instance of `Time_Of_Day` from a text such as "10:15".
Arguments:
- text: The text to parse as a time of day.
- pattern: The pattern to use for parsing the input text.
- locale: The locale in which the pattern should be interpreted.
Returns a `Time_Error` if the provided text cannot be parsed using the
default format.
? Pattern Syntax
For the list of accepted symbols in pattern refer to `Time.format` doc.
? Default Time Format
The text must represent a valid time and is parsed using the ISO-8601
extended local time format. The format consists of:
- Two digits for the hour-of-day. This is pre-padded by zero to ensure two
digits.
- A colon
- Two digits for the minute-of-hour. This is pre-padded by zero to ensure
two digits.
- If the second-of-minute is not available then the format is complete.
- A colon
- Two digits for the second-of-minute. This is pre-padded by zero to ensure
two digits.
- If the nano-of-second is zero or not available then the format is
complete.
- A decimal point
- One to nine digits for the nano-of-second. As many digits will be output
as required.
> Example
Get the time 15:05:30.
from Standard.Base import Time_Of_Day
example_parse = Time_Of_Day.parse "15:05:30"
> Example
Recover from the parse error.
from Standard.Base import Time_Of_Day
from Standard.Base.Error.Common import Time_Error
example_parse = Time_Of_Day.parse "half twelve" . catch Time_Error _->
Time_Of_Day.new
> Example
Parse "04:30:20" as Time_Of_Day.
from Standard.Base import Time_Of_Day
example_parse = Time_Of_Day.parse "04:30:20" "HH:mm:ss"
> Example
Parse "4:30AM" as Time_Of_Day
from Standard.Base import Time_Of_Day
example_parse = Time_Of_Day.parse "4:30AM" "h:mma"
parse : Text -> Text | Nothing -> Locale -> Time_Of_Day ! Time_Error
parse text pattern=Nothing locale=Locale.default =
Panic.catch_java Any handler=(java_exception -> Error.throw (Time_Error_Data java_exception.getMessage)) <|
case pattern of
Nothing -> parse_builtin text
_ : Text -> Time_Utils.parse_time text pattern locale.java_locale
## PRIVATE
Obtains an instance of `Time_Of_Day` from a text such as "10:15".
@ -157,6 +56,116 @@ parse_builtin text = @Builtin_Method "Time_Of_Day.parse_builtin"
"13:45.30.123456789" can be stored in a `Time_Of_Day`.
@Builtin_Type
type Time_Of_Day
## Obtains the current time from the system clock in the default time-zone.
> Example
Get the current time in the default time zone.
from Standard.Base import Time_Of_Day
example_now = Time_Of_Day.now
now : Time_Of_Day
now = @Builtin_Method "Time_Of_Day.now"
## Obtains an instance of `Time_Of_Day` from an hour, minute, second
and nanosecond.
Arguments:
- hour: The hour-of-day to represent, from 0 to 23.
- minute: The minute-of-hour to represent, from 0 to 59.
- second: The second-of-minute to represent, from 0 to 59.
- millisecond: the millisecond-of-second to represent, from 0 to 999.
- microsecond: the microsecond-of-second to represent, from 0 to 999,999.
- nanosecond: The nano-of-second to represent, from 0 to 999,999,999.
Returns a `Time_Error` if the provided time is not a valid time.
> Example
Create a new local time at Unix epoch.
from Standard.Base import Time_Of_Day
example_epoch = Time_Of_Day.new
> Example
Get the local time at 9:30.
from Standard.Base import Time_Of_Day
example_epoch = Time_Of_Day.new hour=9 minute=30
new : Integer -> Integer -> Integer -> Integer -> Integer -> Integer -> Time_Of_Day ! Time_Error
new (hour = 0) (minute = 0) (second = 0) (millisecond = 0) (microsecond = 0) (nanosecond = 0) =
total_nanoseconds = nanosecond + microsecond * 1000 + millisecond * 1000000
Panic.catch_java Any (new_builtin hour minute second total_nanoseconds) java_exception->
Error.throw (Time_Error_Data java_exception.getMessage)
## Obtains an instance of `Time_Of_Day` from a text such as "10:15".
Arguments:
- text: The text to parse as a time of day.
- pattern: The pattern to use for parsing the input text.
- locale: The locale in which the pattern should be interpreted.
Returns a `Time_Error` if the provided text cannot be parsed using the
default format.
? Pattern Syntax
For the list of accepted symbols in pattern refer to `Time.format` doc.
? Default Time Format
The text must represent a valid time and is parsed using the ISO-8601
extended local time format. The format consists of:
- Two digits for the hour-of-day. This is pre-padded by zero to ensure two
digits.
- A colon
- Two digits for the minute-of-hour. This is pre-padded by zero to ensure
two digits.
- If the second-of-minute is not available then the format is complete.
- A colon
- Two digits for the second-of-minute. This is pre-padded by zero to ensure
two digits.
- If the nano-of-second is zero or not available then the format is
complete.
- A decimal point
- One to nine digits for the nano-of-second. As many digits will be output
as required.
> Example
Get the time 15:05:30.
from Standard.Base import Time_Of_Day
example_parse = Time_Of_Day.parse "15:05:30"
> Example
Recover from the parse error.
from Standard.Base import Time_Of_Day
from Standard.Base.Error.Common import Time_Error
example_parse = Time_Of_Day.parse "half twelve" . catch Time_Error _->
Time_Of_Day.new
> Example
Parse "04:30:20" as Time_Of_Day.
from Standard.Base import Time_Of_Day
example_parse = Time_Of_Day.parse "04:30:20" "HH:mm:ss"
> Example
Parse "4:30AM" as Time_Of_Day
from Standard.Base import Time_Of_Day
example_parse = Time_Of_Day.parse "4:30AM" "h:mma"
parse : Text -> Text | Nothing -> Locale -> Time_Of_Day ! Time_Error
parse text pattern=Nothing locale=Locale.default =
Panic.catch_java Any handler=(java_exception -> Error.throw (Time_Error_Data java_exception.getMessage)) <|
case pattern of
Nothing -> parse_builtin text
_ : Text -> Time_Utils.parse_time text pattern locale.java_locale
## Get the hour portion of the time of day.
@ -244,15 +253,14 @@ type Time_Of_Day
> Example
from Standard.Base import Time_Of_Day
import Standard.Base.Data.Time.Duration
from Standard.Base import Time_Of_Day, Duration
example_plus = Time_Of_Day.new + (Duration.seconds 3)
example_plus = Time_Of_Day.new + (Duration.new seconds=3)
+ : Duration -> Time_Of_Day ! Time_Error
+ self amount =
case amount of
duration : Duration.Duration -> self.plus_builtin duration
_ : Period.Period -> Error.throw (Time_Error_Data "Time_Of_Day does not support date intervals (periods)")
duration : Duration -> self.plus_builtin duration
_ : Period -> Error.throw (Time_Error_Data "Time_Of_Day does not support date intervals (periods)")
## Subtract the specified amount of time from this instant to get a new
instant.
@ -263,15 +271,14 @@ type Time_Of_Day
> Example
Subtract 12 hours from a local time.
from Standard.Base import Time_Of_Day
import Standard.Base.Data.Time.Duration
from Standard.Base import Time_Of_Day, Duration
example_minus = Time_Of_Day.now - (Duration.hours 12)
example_minus = Time_Of_Day.now - (Duration.new hours=12)
- : Duration -> Time_Of_Day ! Time_Error
- self amount =
case amount of
duration : Duration.Duration -> self.minus_builtin duration
_ : Period.Period -> Error.throw (Time_Error_Data "Time_Of_Day does not support date intervals (periods)")
duration : Duration -> self.minus_builtin duration
_ : Period -> Error.throw (Time_Error_Data "Time_Of_Day does not support date intervals (periods)")
## Format this time of day as text using the default formatter.
@ -368,6 +375,7 @@ type Time_Of_Day
## Compares two Time_Of_Day for equality.
== : Date -> Boolean
== self that = case that of
Time_Of_Day -> Meta.is_same_object self Time_Of_Day
_ : Time_Of_Day ->
sign = Time_Utils.compare_to_localtime self that
0 == sign

View File

@ -1,17 +1,19 @@
from Standard.Base import all
import project.Data.Time.Time_Of_Day.Time_Of_Day
import project.Data.Time.Date_Time.Date_Time
from project.Data.Boolean import Boolean, True, False
polyglot java import org.enso.base.Time_Utils
polyglot java import java.time.temporal.ChronoUnit
polyglot java import java.time.temporal.TemporalUnit
# TODO Dubious constructor export
from project.Data.Time.Time_Period.Time_Period import all
from project.Data.Time.Time_Period.Time_Period export all
## Represents a period of time of a day or shorter.
## Represents a unit of time of a day or shorter.
type Time_Period
Day
Hour
Minute
Second
## PRIVATE
@ -23,10 +25,10 @@ type Time_Period
## PRIVATE
to_java_unit : TemporalUnit
to_java_unit self = case self of
Day -> ChronoUnit.DAYS
Hour -> ChronoUnit.HOURS
Minute -> ChronoUnit.MINUTES
Second -> ChronoUnit.SECONDS
Time_Period.Day -> ChronoUnit.DAYS
Time_Period.Hour -> ChronoUnit.HOURS
Time_Period.Minute -> ChronoUnit.MINUTES
Time_Period.Second -> ChronoUnit.SECONDS
## PRIVATE
adjust_start : (Time_Of_Day | Date_Time) -> (Time_Of_Day | Date_Time)

View File

@ -1,67 +1,15 @@
from Standard.Base import all
from Standard.Base.Error.Common import Time_Error
import project.Data.Any.Any
import project.Data.Json.Json
import project.Data.Numbers.Integer
import project.Data.Text.Text
from project.Data.Boolean import Boolean, True, False
from project.Error.Common import Panic, Error, Time_Error, Time_Error_Data
polyglot java import java.time.ZoneId
polyglot java import java.time.ZoneOffset
polyglot java import org.enso.base.Time_Utils
## The system default timezone.
> Example
Get the system default timezone.
from Standard.Base import Time_Zone
example_system = Time_Zone.system
system : Time_Zone
system = @Builtin_Method "Time_Zone.system"
## ALIAS Current Time Zone
The system's local timezone.
> Example
Get the system's local timezone.
from Standard.Base import Time_Zone
example_local = Time_Zone.local
local : Time_Zone
local = system
## ALIAS UTC Time Zone
The UTC timezone.
> Example
Get the UTC timezone.
from Standard.Base import Time_Zone
example_utc = Time_Zone.utc
utc : Time_Zone
utc = parse "UTC"
## Obtains an instance of `Time_Zone` using an offset in hours, minutes and seconds
from the UTC zone.
Arguments:
- hours: The timezone offset in hours from UTC, from -18 to +18.
- minutes: The timezone offset in minutes from the nearest hour, from 0 to
±59. The sign must match that of the hours argument.
- seconds: The timezone offset in seconds from the nearest minute, from 0 to
±59. The sign must match that of the minutes argument.
> Example
Get time zone 1 hour 1 minute and 50 seconds from UTC.
from Standard.Base import Zone
example_new = Zone.new 1 1 50
new : Integer -> Integer -> Integer -> Time_Zone
new (hours = 0) (minutes = 0) (seconds = 0) =
new_builtin hours minutes seconds
## PRIVATE
Obtains an instance of `Time_Zone` using an offset in hours, minutes and seconds
from the UTC zone.
@ -77,45 +25,6 @@ new (hours = 0) (minutes = 0) (seconds = 0) =
new_builtin : Integer -> Integer -> Integer -> Time_Zone
new_builtin hours minutes seconds = @Builtin_Method "Time_Zone.new_builtin"
## ALIAS Time Zone from Text
This method parses the ID producing a `Time_Zone`.
Arguments:
- text: The text representing a zone identifier.
> Example
Get Central European Time.
from Standard.Base import Time_Zone
example_parse = Time_Zone.parse "CET"
> Example
Get Moscow time.
from Standard.Base import Time_Zone
example_parse = Time_Zone.parse "Europe/Moscow"
> Example
Get time zone -06:00.
from Standard.Base import Time_Zone
example_parse = Time_Zone.parse "-06:00"
> Example
Get custom offset +03:02:01 of 3 hours 2 minutes an 1 second.
from Standard.Base import Time_Zone
example_parse = Time_Zone.parse "+03:02:01"
parse : Text -> Time_Zone ! Time_Error
parse text =
Panic.catch_java Any handler=(java_exception -> Error.throw (Time_Error_Data java_exception.getMessage)) <|
parse_builtin text
## PRIVATE
This method parses the ID producing a `Time_Zone`.
@ -127,9 +36,7 @@ parse text =
parse_builtin : Text -> Time_Zone
parse_builtin text = @Builtin_Method "Time_Zone.parse_builtin"
## PRIVATE
A type representing a time zone.
## A type representing a time zone.
Arguments:
- internal_zone_id: The identifier for the internal zone of the
@ -139,8 +46,101 @@ parse_builtin text = @Builtin_Method "Time_Zone.parse_builtin"
"Europe/Paris".
@Builtin_Type
type Time_Zone
## The system default timezone.
> Example
Get the system default timezone.
from Standard.Base import Time_Zone
example_system = Time_Zone.system
system : Time_Zone
system = @Builtin_Method "Time_Zone.system"
## ALIAS Current Time Zone
The system's local timezone.
> Example
Get the system's local timezone.
from Standard.Base import Time_Zone
example_local = Time_Zone.local
local : Time_Zone
local = Time_Zone.system
## ALIAS UTC Time Zone
The UTC timezone.
> Example
Get the UTC timezone.
from Standard.Base import Time_Zone
example_utc = Time_Zone.utc
utc : Time_Zone
utc = Time_Zone.parse "UTC"
## Obtains an instance of `Time_Zone` using an offset in hours, minutes and seconds
from the UTC zone.
Arguments:
- hours: The timezone offset in hours from UTC, from -18 to +18.
- minutes: The timezone offset in minutes from the nearest hour, from 0 to
±59. The sign must match that of the hours argument.
- seconds: The timezone offset in seconds from the nearest minute, from 0 to
±59. The sign must match that of the minutes argument.
> Example
Get time zone 1 hour 1 minute and 50 seconds from UTC.
from Standard.Base import Zone
example_new = Zone.new 1 1 50
new : Integer -> Integer -> Integer -> Time_Zone
new (hours = 0) (minutes = 0) (seconds = 0) =
new_builtin hours minutes seconds
## ALIAS Time Zone from Text
This method parses the ID producing a `Time_Zone`.
Arguments:
- text: The text representing a zone identifier.
> Example
Get Central European Time.
from Standard.Base import Time_Zone
example_parse = Time_Zone.parse "CET"
> Example
Get Moscow time.
from Standard.Base import Time_Zone
example_parse = Time_Zone.parse "Europe/Moscow"
> Example
Get time zone -06:00.
from Standard.Base import Time_Zone
example_parse = Time_Zone.parse "-06:00"
> Example
Get custom offset +03:02:01 of 3 hours 2 minutes an 1 second.
from Standard.Base import Time_Zone
example_parse = Time_Zone.parse "+03:02:01"
parse : Text -> Time_Zone ! Time_Error
parse text =
Panic.catch_java Any handler=(java_exception -> Error.throw (Time_Error_Data java_exception.getMessage)) <|
parse_builtin text
## Get the unique timezone ID.
@ -161,7 +161,7 @@ type Time_Zone
from Standard.Base import Time_Zone
example_to_json = Time_Zone.system.to_json
to_json : Json.Object
to_json : Json
to_json self = Json.from_pairs [["type", "Time_Zone"], ["id", self.zone_id]]
## Compares two Zones for equality.

View File

@ -375,12 +375,12 @@ type Vector a
[1, 2, 3, 4, 5].partition (x -> x % 2 == 0) == (Pair [2, 4] [1, 3, 5])
partition : (Any -> Boolean) -> Pair (Vector Any) (Vector Any)
partition self predicate =
pair = self.fold (Pair_Data Vector.new_builder Vector.new_builder) acc-> elem->
pair = self.fold (Pair.Value Vector.new_builder Vector.new_builder) acc-> elem->
case predicate elem of
True ->
Pair_Data (acc.first.append elem) acc.second
Pair.Value (acc.first.append elem) acc.second
False ->
Pair_Data acc.first (acc.second.append elem)
Pair.Value acc.first (acc.second.append elem)
pair.map .to_vector
## Partitions the vector into vectors of elements which satisfy a given
@ -403,10 +403,10 @@ type Vector a
["a", "b", "c", "d"].partition_with_index (ix -> _ -> ix % 2 == 0) == (Pair ["a", "c"] ["b", "d"])
partition_with_index : (Integer -> Any -> Boolean) -> Pair (Vector Any) (Vector Any)
partition_with_index self predicate =
pair = self.fold_with_index (Pair_Data Vector.new_builder Vector.new_builder) acc-> ix-> elem->
pair = self.fold_with_index (Pair.Value Vector.new_builder Vector.new_builder) acc-> ix-> elem->
case predicate ix elem of
True -> Pair_Data (acc.first.append elem) acc.second
False -> Pair_Data acc.first (acc.second.append elem)
True -> Pair.Value (acc.first.append elem) acc.second
False -> Pair.Value acc.first (acc.second.append elem)
pair.map .to_vector
## Applies a function to each element of the vector, returning the vector of

View File

@ -5,6 +5,7 @@ import project.Data.List.List
import project.Data.Numbers
import project.Data.Map.Map
import project.Data.Vector.Vector
import project.Math
import project.Nothing.Nothing
export project.Data.Any.Any
@ -12,6 +13,7 @@ export project.Data.Array.Array
export project.Data.List.List
export project.Data.Map.Map
export project.Data.Vector.Vector
export project.Math
export project.Nothing.Nothing
from project.Data.Boolean export Boolean, True, False
@ -25,9 +27,25 @@ import project.Data.Json
import project.Data.Locale.Locale
import project.Data.Maybe.Maybe
import project.Data.Noise
import project.Data.Ordering.Natural_Order
import project.Data.Ordering.Ordering
import project.Data.Ordering.Sort_Direction.Sort_Direction
import project.Data.Pair.Pair
import project.Data.Regression
import project.Data.Statistics
import project.Data.Text.Case_Sensitivity.Case_Sensitivity
import project.Data.Text.Line_Ending_Style.Line_Ending_Style
import project.Data.Text.Text_Sub_Range.Text_Sub_Range
import project.Data.Time.Date.Date
import project.Data.Time.Date_Period.Date_Period
import project.Data.Time.Date_Time.Date_Time
import project.Data.Time.Day_Of_Week.Day_Of_Week
import project.Data.Time.Day_Of_Week_From
import project.Data.Time.Duration.Duration
import project.Data.Time.Period.Period
import project.Data.Time.Time_Of_Day.Time_Of_Day
import project.Data.Time.Time_Period.Time_Period
import project.Data.Time.Time_Zone.Time_Zone
export project.Data.Filter_Condition.Filter_Condition
export project.Data.Index_Sub_Range.Index_Sub_Range
@ -36,12 +54,28 @@ export project.Data.Interval.Interval
export project.Data.Json.Json
export project.Data.Locale.Locale
export project.Data.Maybe.Maybe
export project.Data.Ordering.Natural_Order
export project.Data.Ordering.Ordering
export project.Data.Ordering.Sort_Direction.Sort_Direction
export project.Data.Pair.Pair
export project.Data.Regression
export project.Data.Text.Case_Sensitivity.Case_Sensitivity
export project.Data.Text.Line_Ending_Style.Line_Ending_Style
export project.Data.Text.Text_Sub_Range.Text_Sub_Range
export project.Data.Time.Date.Date
export project.Data.Time.Date_Period.Date_Period
export project.Data.Time.Date_Time.Date_Time
export project.Data.Time.Day_Of_Week.Day_Of_Week
export project.Data.Time.Day_Of_Week_From
export project.Data.Time.Duration.Duration
export project.Data.Time.Period.Period
export project.Data.Time.Time_Of_Day.Time_Of_Day
export project.Data.Time.Time_Period.Time_Period
export project.Data.Time.Time_Zone.Time_Zone
from project.Data.Json export all hiding Json_Parse_Error, No_Such_Field, Marshalling_Error
from project.Data.Noise export all hiding Noise, Generator, Deterministic_Random
from project.Data.Noise export all hiding Noise, Generator, Deterministic_Random, Long, Random
from project.Data.Statistics export all hiding to_moment_statistic, wrap_java_call, calculate_correlation_statistics, calculate_spearman_rank, calculate_correlation_statistics_matrix, Moments, CountMinMax, CorrelationStatistics, Rank, ClassCastException, NullPointerException
from project.Data.Index_Sub_Range import First, Last
from project.Data.Index_Sub_Range export First, Last
@ -50,16 +84,11 @@ from project.Data.Index_Sub_Range export First, Last
# Not refactored modules below:
import project.Data.Ordering.Natural_Order
import project.Data.Ordering.Sort_Direction
import project.Data.Pair
import project.Data.Range
import project.Data.Regression
import project.Data.Statistics
import project.Data.Statistics.Rank_Method
from project.Data.Range export all
import project.Data.Text
import project.Data.Text.Case
import project.Data.Text.Case_Sensitivity
import project.Data.Text.Encoding
import project.Data.Text.Extensions
import project.Data.Text.Matching
@ -67,20 +96,10 @@ import project.Data.Text.Text_Matcher
import project.Data.Text.Regex_Matcher
import project.Data.Text.Text_Ordering
import project.Data.Text.Span
import project.Data.Time.Date
import project.Data.Time.Date_Time
import project.Data.Time.Duration
import project.Data.Time.Time_Of_Day
import project.Data.Time.Time_Zone
import project.Data.Time.Day_Of_Week.Day_Of_Week
import project.Data.Time.Day_Of_Week_From
import project.Error.Common
import project.Error.Problem_Behavior
import project.Function
import project.IO
import project.Math
import project.Meta
import project.Polyglot
import project.Polyglot.Java
@ -95,27 +114,15 @@ import project.Data.Text.Regex
import project.Data.Text.Regex.Regex_Mode
import project.Warning
export project.Data.Ordering.Natural_Order
export project.Data.Ordering.Sort_Direction
export project.Data.Regression
export project.Data.Statistics
export project.Data.Statistics.Rank_Method
export project.Data.Text.Case_Sensitivity
export project.Data.Text.Regex
export project.Data.Text.Regex.Regex_Mode
export project.Data.Text.Text_Ordering
export project.Data.Text.Text_Matcher
export project.Data.Text.Regex_Matcher
export project.Data.Time.Date
export project.Data.Time.Date_Time
export project.Data.Time.Duration
export project.Data.Time.Time_Of_Day
export project.Data.Time.Time_Zone
export project.Data.Time.Day_Of_Week.Day_Of_Week
export project.Data.Time.Day_Of_Week_From
export project.Error.Problem_Behavior
export project.IO
export project.Math
export project.Meta
export project.Polyglot
export project.Polyglot.Java
@ -126,12 +133,9 @@ export project.System.File
export project.System.File.Existing_File_Behavior
export project.Warning
from project.Data.Pair export Pair, Pair_Data
from project.Data.Range export all
from project.Data.Text.Extensions export Text, Case, Location, Matching_Mode
from project.Data.Text.Matching export No_Matches_Found_Data
from project.Data.Text export all hiding Encoding, Span, Line_Ending_Style
from project.Data.Text export all hiding Encoding, Span, Line_Ending_Style, Case_Sensitivity
from project.Data.Text.Encoding export Encoding, Encoding_Error, Encoding_Error_Data
from project.Data.Text.Span export all
from project.Error.Common export all

View File

@ -1,4 +1,5 @@
from Standard.Base import all
import project.Data.Numbers.Decimal
import project.Data.Numbers.Number
## Alias Pi (Constant)

View File

@ -325,11 +325,11 @@ is_a value typ = if is_same_object value typ then True else
_ : Number -> if typ == Number then True else case value of
_ : Integer -> typ == Integer
_ : Decimal -> typ == Decimal
_ : Date_Time.Date_Time -> typ.is_same_object_as Date_Time.Date_Time
_ : Date.Date -> typ.is_same_object_as Date.Date
_ : Duration.Duration -> typ.is_same_object_as Duration.Duration
_ : Time_Of_Day.Time_Of_Day -> typ.is_same_object_as Time_Of_Day.Time_Of_Day
_ : Time_Zone.Time_Zone -> typ.is_same_object_as Time_Zone.Time_Zone
_ : Date_Time -> typ.is_same_object_as Date_Time
_ : Date -> typ.is_same_object_as Date
_ : Duration -> typ.is_same_object_as Duration
_ : Time_Of_Day -> typ.is_same_object_as Time_Of_Day
_ : Time_Zone -> typ.is_same_object_as Time_Zone
Base.Polyglot.Polyglot ->
typ==Base.Polyglot.Polyglot || java_instance_check value typ
_ ->

View File

@ -1,14 +1,8 @@
import Standard.Base.System.File
## TODO Dubious constructor export
from project.Meta.Enso_Project.Project_Description import all
from project.Meta.Enso_Project.Project_Description export all
import project.System.File
## Functionality for inspecting the current project.
@Builtin_Type
type Project_Description
## A representation of an Enso project.
Arguments:

View File

@ -1,6 +1,5 @@
from Standard.Base import all
import Standard.Base.Data.Time.Duration
from Standard.Base.Error.Common import Time_Error
import Standard.Base.Network.Http.Form
@ -36,19 +35,19 @@ polyglot java import org.enso.base.Http_Utils
> Example
Create an HTTP client with extended timeout.
Http.new timeout=(Duration.seconds 30)
Http.new timeout=(Duration.new seconds=30)
> Example
Create an HTTP client with extended timeout and proxy settings.
import Standard.Base.Data.Time.Duration
import Standard.Base.Data.Time.Duration.Duration
import Standard.Base.Network.Http
import Standard.Base.Network.Proxy
example_new =
Http.new (timeout = (Duration.seconds 30)) (proxy = Proxy.new "example.com" 8080)
Http.new (timeout = (Duration.new seconds=30)) (proxy = Proxy.new "example.com" 8080)
new : Duration -> Boolean -> Proxy -> Http
new (timeout = (Duration.seconds 10)) (follow_redirects = True) (proxy = Proxy.System) (version = Version.Http_1_1) =
new (timeout = (Duration.new seconds=10)) (follow_redirects = True) (proxy = Proxy.System) (version = Version.Http_1_1) =
Http_Data timeout follow_redirects proxy version
## Send an Options request.
@ -586,7 +585,7 @@ type Http
Configure HTTP client and send a Post request with form. NOTE: This
example will send a network request.
import Standard.Base.Data.Time.Duration
import Standard.Base.Data.Time.Duration.Duration
import Standard.Base.Network.Http
import Standard.Base.Network.Http.Form
import Standard.Base.Network.Http.Method
@ -595,7 +594,7 @@ type Http
example_request =
form = [Form.text_field "name" "John Doe"]
req = Request.new Method.Post "http://httpbin.org/post" . with_form form
http = Http.new (timeout = (Duration.seconds 30))
http = Http.new (timeout = (Duration.new seconds=30))
http.request req
request : Request -> Response ! Request_Error
request self req =
@ -609,14 +608,14 @@ type Http
# prepare headers and body
req_with_body = case req.body of
Request_Body.Empty ->
Pair_Data req body_publishers.noBody
Pair.Value req body_publishers.noBody
Request_Body.Text text ->
builder.header Header.text_plain.name Header.text_plain.value
Pair_Data req (body_publishers.ofString text)
Pair.Value req (body_publishers.ofString text)
Request_Body.Json json ->
builder.header Header.application_json.name Header.application_json.value
json_body = if json.is_a Text then json else json.to_text
Pair_Data req (body_publishers.ofString json_body)
Pair.Value req (body_publishers.ofString json_body)
Request_Body.Form form ->
add_multipart form =
body_builder = Http_Utils.multipart_body_builder
@ -624,18 +623,18 @@ type Http
Form.Part_Text text -> body_builder.add_part_text part.key text
Form.Part_File file -> body_builder.add_part_file part.key file.path
boundary = body_builder.get_boundary
Pair_Data (req.with_headers [Header.multipart_form_data boundary]) body_builder.build
Pair.Value (req.with_headers [Header.multipart_form_data boundary]) body_builder.build
add_urlencoded form =
body_builder = Http_Utils.urlencoded_body_builder
form.parts.map part-> case part.value of
Form.Part_Text text -> body_builder.add_part_text part.key text
Form.Part_File file -> body_builder.add_part_file part.key file.path
Pair_Data req body_builder.build
Pair.Value req body_builder.build
if req.headers.contains Header.multipart_form_data then add_multipart form else
add_urlencoded form
Request_Body.Bytes bytes ->
builder.header Header.application_octet_stream.name Header.application_octet_stream.value
Pair_Data req (body_publishers.ofByteArray bytes.to_array)
Pair.Value req (body_publishers.ofByteArray bytes.to_array)
# method
req_http_method = case req.method of
Method.Options -> "OPTIONS"
@ -647,7 +646,7 @@ type Http
Method.Trace -> "TRACE"
Method.Connect -> "CONNECT"
case req_with_body of
Pair_Data req body ->
Pair.Value req body ->
# set method and body
builder.method req_http_method body
# set headers

View File

@ -157,12 +157,12 @@ type Request
with_header self key val =
new_header = Header.new key val
update_header p h = case p of
Pair_Data acc True -> Pair_Data (acc + [h]) True
Pair_Data acc False ->
if h.name . equals_ignore_case key then Pair_Data (acc + [new_header]) True else Pair_Data (acc + [h]) False
new_headers = case self.headers.fold (Pair_Data [] False) update_header of
Pair_Data acc True -> acc
Pair_Data acc False -> acc + [new_header]
Pair.Value acc True -> Pair.Value (acc + [h]) True
Pair.Value acc False ->
if h.name . equals_ignore_case key then Pair.Value (acc + [new_header]) True else Pair.Value (acc + [h]) False
new_headers = case self.headers.fold (Pair.Value [] False) update_header of
Pair.Value acc True -> acc
Pair.Value acc False -> acc + [new_header]
Request_Data self.method self.uri new_headers self.body
## Sets the headers in the request.

View File

@ -200,7 +200,7 @@ detach_selected_warnings value predicate =
result = warnings.partition w-> predicate w.value
matched = result.first
remaining = result.second
Pair_Data (set remaining value) matched
Pair.Value (set remaining value) matched
## UNSTABLE
A helper function which gathers warnings matching some predicate and passes

View File

@ -18,5 +18,5 @@ type Client_Certificate
- sslpass: password for the client key file.
properties : Vector
properties self =
base = [Pair_Data 'sslcert' (File.new self.cert_file).absolute.path, Pair_Data 'sslkey' (File.new self.key_file).absolute.path]
if self.key_password == "" then base else base + [Pair_Data 'sslpassword' self.key_password]
base = [Pair.Value 'sslcert' (File.new self.cert_file).absolute.path, Pair.Value 'sslkey' (File.new self.key_file).absolute.path]
if self.key_password == "" then base else base + [Pair.Value 'sslpassword' self.key_password]

View File

@ -54,24 +54,24 @@ type Postgres_Options
Nothing ->
env_user = Environment.get "PGUSER"
env_password = Environment.get "PGPASSWORD"
case Pair_Data env_user env_password of
Pair_Data Nothing Nothing ->
case Pair.Value env_user env_password of
Pair.Value Nothing Nothing ->
Pgpass.read self.host self.port self.database
Pair_Data Nothing _ ->
Pair.Value Nothing _ ->
Error.throw (Illegal_State_Error_Data "PGPASSWORD is set, but PGUSER is not.")
Pair_Data username Nothing ->
Pair.Value username Nothing ->
Pgpass.read self.host self.port self.database username
Pair_Data username password ->
[Pair_Data 'user' username, Pair_Data 'password' password]
Pair.Value username password ->
[Pair.Value 'user' username, Pair.Value 'password' password]
Credentials.Username_And_Password username password ->
[Pair_Data 'user' username, Pair_Data 'password' password]
[Pair.Value 'user' username, Pair.Value 'password' password]
ssl_properties = ssl_mode_to_jdbc_properties self.use_ssl
cert_properties = if self.client_cert.is_nothing then [] else
self.client_cert.properties
(if self.schema == '' then [] else [Pair_Data 'currentSchema' self.schema]) + credentials + ssl_properties + cert_properties
(if self.schema == '' then [] else [Pair.Value 'currentSchema' self.schema]) + credentials + ssl_properties + cert_properties
## PRIVATE
Given an `SSL_Mode`, create the JDBC properties to secure a Postgres-based
@ -79,14 +79,14 @@ type Postgres_Options
ssl_mode_to_jdbc_properties : SSL_Mode -> [Pair Text Text]
ssl_mode_to_jdbc_properties use_ssl = case use_ssl of
SSL_Mode.Disable -> []
SSL_Mode.Prefer -> [Pair_Data 'sslmode' 'prefer']
SSL_Mode.Require -> [Pair_Data 'sslmode' 'require']
SSL_Mode.Prefer -> [Pair.Value 'sslmode' 'prefer']
SSL_Mode.Require -> [Pair.Value 'sslmode' 'require']
SSL_Mode.Verify_CA cert_file ->
if cert_file.is_nothing then [Pair_Data 'sslmode' 'verify-ca'] else
[Pair_Data 'sslmode' 'verify-ca', Pair_Data 'sslrootcert' (File.new cert_file).absolute.path]
if cert_file.is_nothing then [Pair.Value 'sslmode' 'verify-ca'] else
[Pair.Value 'sslmode' 'verify-ca', Pair.Value 'sslrootcert' (File.new cert_file).absolute.path]
SSL_Mode.Full_Verification cert_file ->
if cert_file.is_nothing then [Pair_Data 'sslmode' 'verify-full'] else
[Pair_Data 'sslmode' 'verify-full', Pair_Data 'sslrootcert' (File.new cert_file).absolute.path]
if cert_file.is_nothing then [Pair.Value 'sslmode' 'verify-full'] else
[Pair.Value 'sslmode' 'verify-full', Pair.Value 'sslrootcert' (File.new cert_file).absolute.path]
## PRIVATE
default_postgres_host = Environment.get_or_else "PGHOST" "localhost"

View File

@ -58,11 +58,11 @@ type Redshift_Options
credentials = case self.credentials of
Nothing -> Pgpass.read self.host self.port self.schema
AWS_Credential.Profile db_user profile ->
[Pair_Data 'user' db_user] + (if profile == '' then [] else [Pair_Data 'profile' profile])
[Pair.Value 'user' db_user] + (if profile == '' then [] else [Pair.Value 'profile' profile])
AWS_Credential.Key db_user access_key secret_access_key ->
[Pair_Data 'user' db_user, Pair_Data 'AccessKeyID' access_key, Pair_Data 'SecretAccessKey' secret_access_key]
[Pair.Value 'user' db_user, Pair.Value 'AccessKeyID' access_key, Pair.Value 'SecretAccessKey' secret_access_key]
Credentials.Username_And_Password username password ->
[Pair_Data 'user' username, Pair_Data 'password' password]
[Pair.Value 'user' username, Pair.Value 'password' password]
## Disabled as Redshift SSL settings are different to PostgreSQL.
ssl_properties = []

View File

@ -766,9 +766,9 @@ type Column
column has any nulls and if so, we will do `IS NULL` checks for
our columns too. That is because, we want the containment check
for `NULL` to work the same way as for any other value.
in_subquery = Query.Select [Pair_Data column.name column.expression] column.context
in_subquery = Query.Select [Pair.Value column.name column.expression] column.context
has_nulls_expression = SQL_Expression.Operation "BOOL_OR" [column.is_missing.expression]
has_nulls_subquery = Query.Select [Pair_Data "has_nulls" has_nulls_expression] column.context
has_nulls_subquery = Query.Select [Pair.Value "has_nulls" has_nulls_expression] column.context
new_expr = SQL_Expression.Operation "IS_IN_COLUMN" [self.expression, in_subquery, has_nulls_subquery]
Column.Value self.name self.connection SQL_Type.boolean new_expr self.context

View File

@ -46,9 +46,9 @@ type SQL_Statement
# TODO #183734954: date and time formatting is limited and will lose sub-second precision and timezone offset.
SQL_Fragment.Interpolation _ obj -> case obj of
Number -> obj.to_text
Date_Time.Date_Time -> "'" + (obj.format "yyyy-MM-dd HH:mm:ss") + "'"
Date.Date -> "'" + (obj.format "yyyy-MM-dd") + "'"
Time_Of_Day.Time_Of_Day -> "'" + (obj.format "HH:mm:ss") + "'"
Date_Time -> "'" + (obj.format "yyyy-MM-dd HH:mm:ss") + "'"
Date -> "'" + (obj.format "yyyy-MM-dd") + "'"
Time_Of_Day -> "'" + (obj.format "HH:mm:ss") + "'"
_ -> "'" + obj.to_text.replace "'" "''" + "'"
strings.join ""

View File

@ -84,9 +84,9 @@ type SQL_Type
_ : Integer -> SQL_Type.integer
_ : Decimal -> SQL_Type.double
_ : Text -> SQL_Type.varchar
_ : Date.Date -> SQL_Type.date
_ : Time_Of_Day.Time_Of_Day -> SQL_Type.time_of_day
_ : Date_Time.Date_Time -> SQL_Type.date_time
_ : Date -> SQL_Type.date
_ : Time_Of_Day -> SQL_Type.time_of_day
_ : Date_Time -> SQL_Type.date_time
_ -> Error.throw (Illegal_Argument_Error_Data "Unsupported type.")
## PRIVATE

View File

@ -795,8 +795,8 @@ type Table
> Example
Group by the Key column, count the rows
table.aggregate [Group_By "Key", Count Nothing]
aggregate : [Aggregate_Column] -> Problem_Behavior -> Table
table.aggregate [Aggregate_Column.Group_By "Key", Aggregate_Column.Count Nothing]
aggregate : Vector Aggregate_Column -> Problem_Behavior -> Table
aggregate self columns (on_problems=Report_Warning) =
validated = Aggregate_Column_Helper.prepare_aggregate_columns columns self
on_problems.attach_problems_before validated.problems <|
@ -820,7 +820,7 @@ type Table
## Parsing values is not supported in database tables, the table has to be
loaded into memory first with `read`.
parse_values : Data_Formatter -> (Nothing | [Column_Type_Selection]) -> Problem_Behavior -> Table
parse_values : Data_Formatter -> (Nothing | Vector Column_Type_Selection) -> Problem_Behavior -> Table
parse_values self value_formatter=Data_Formatter column_types=Nothing on_problems=Report_Warning =
## Avoid unused arguments warning. We cannot rename arguments to `_`,
because we need to keep the API consistent with the in-memory table.

View File

@ -33,7 +33,7 @@ read host port database username=Nothing =
entry.matches host port database username
case found.catch Nothing of
Nothing -> []
entry -> [Pair_Data 'user' entry.username, Pair_Data 'password' entry.password]
entry -> [Pair.Value 'user' entry.username, Pair.Value 'password' entry.password]
type Pgpass_Entry
## PRIVATE

View File

@ -1,6 +1,5 @@
from Standard.Base import all
import Standard.Base.Data.Time.Duration
import Standard.Base.Network.Http
import Standard.Base.System.Platform
import Standard.Base.Network.URI

View File

@ -14,11 +14,12 @@ from Standard.Base import all
without parsing.
type Auto
## Specifies the desired datatype for parsing a particular column.
type Column_Type_Selection
## Specifies the desired datatype for parsing a particular column.
Arguments:
- column: the column selector which can either be the column name or the
index.
- datatype: The desired datatype for the column or `Auto` to infer the type
from the data.
type Column_Type_Selection (column:Text|Integer) datatype:(Auto|Integer|Decimal|Date|Date_Time|Time_Of_Day|Boolean)=Auto
Arguments:
- column: the column selector which can either be the column name or the
index.
- datatype: The desired datatype for the column or `Auto` to infer the type
from the data.
Value (column:Text|Integer) datatype:(Auto|Integer|Decimal|Date|Date_Time|Time_Of_Day|Boolean)=Auto

View File

@ -680,7 +680,7 @@ type Table
reversible (e.g., 0123 would not be converted to an integer as there is
a leading 0). However, settings in the `Data_Formatter` can
control this.
parse_values : Data_Formatter -> (Nothing | [Column_Type_Selection]) -> Problem_Behavior -> Table
parse_values : Data_Formatter -> (Nothing | Vector Column_Type_Selection) -> Problem_Behavior -> Table
parse_values self value_formatter=Data_Formatter.Value column_types=Nothing on_problems=Report_Warning =
columns = self.columns
problem_builder = Vector.new_builder
@ -696,7 +696,7 @@ type Table
if matching_input.length == 1 then matching_input.first.datatype else
first_type = matching_input.first.datatype
ambiguous = matching_input.exists s-> s.datatype != first_type
problem_builder.append (Duplicate_Type_Selector column.name ambiguous)
problem_builder.append (Duplicate_Type_Selector.Duplicate_Type_Selector_Data column.name ambiguous)
if ambiguous then Nothing else first_type
expected_types = case column_types of
@ -716,9 +716,9 @@ type Table
if found.not then
missing_columns.append selector
if missing_columns.is_empty.not then
problem_builder.append (Missing_Input_Columns missing_columns.to_vector)
problem_builder.append (Missing_Input_Columns.Missing_Input_Columns_Data missing_columns.to_vector)
if invalid_indices.is_empty.not then
problem_builder.append (Column_Indexes_Out_Of_Range invalid_indices.to_vector)
problem_builder.append (Column_Indexes_Out_Of_Range.Column_Indexes_Out_Of_Range_Data invalid_indices.to_vector)
columns.map_with_index find_datatype
new_columns = columns.zip expected_types column-> expected_type-> case expected_type of

View File

@ -108,8 +108,8 @@ prepare_reader java_reader format max_columns on_problems newline_override=Nothi
Error.throw (Illegal_Argument_Error_Data "`row_limit` should be Integer or Nothing.")
warnings_as_errors = on_problems == Report_Error
quote_characters = case format.quote_style of
Quote_Style.No_Quotes -> Pair_Data Nothing Nothing
Quote_Style.With_Quotes _ quote quote_escape -> Pair_Data quote quote_escape
Quote_Style.No_Quotes -> Pair.Value Nothing Nothing
Quote_Style.With_Quotes _ quote quote_escape -> Pair.Value quote quote_escape
base_parser = case format.quote_style of
Quote_Style.No_Quotes -> IdentityParser.new
Quote_Style.With_Quotes _ quote _ ->
@ -207,7 +207,7 @@ newline_at_eof : File -> Encoding -> Text|Nothing
newline_at_eof file encoding =
newlines = ['\r\n', '\n', '\r']
newline_bytes = newlines.map (x-> x.bytes encoding Report_Error)
most_bytes = newline_bytes.map .length . compute Statistics.Maximum
most_bytes = newline_bytes.map .length . compute Statistic.Maximum
file_last_bytes = file.read_last_bytes most_bytes
result = newlines.zip newline_bytes . find pair->
bytes = pair.second

View File

@ -144,8 +144,8 @@ write_to_writer table format java_writer separator_override=Nothing needs_leadin
Quote_Style.With_Quotes always _ _ ->
if always then WriteQuoteBehavior.ALWAYS else WriteQuoteBehavior.NECESSARY
quote_characters = case format.quote_style of
Quote_Style.No_Quotes -> Pair_Data Nothing Nothing
Quote_Style.With_Quotes _ quote quote_escape -> Pair_Data quote quote_escape
Quote_Style.No_Quotes -> Pair.Value Nothing Nothing
Quote_Style.With_Quotes _ quote quote_escape -> Pair.Value quote quote_escape
write_headers = should_write_headers format.headers
newline = separator_override.if_nothing <|
case format.line_endings of

View File

@ -69,7 +69,7 @@ prepare_aggregate_columns aggregates table =
renamed_columns = pass_1.map_with_index i->name->
agg = valid_resolved_aggregate_columns.at i
new_name = name.if_nothing (unique.make_unique (default_aggregate_column_name agg))
Pair_Data new_name agg
Pair.Value new_name agg
# Build Problems Output
if renamed_columns.is_empty then

View File

@ -546,7 +546,7 @@ select_columns_by_name internal_columns name_selectors matcher problem_builder n
associated_selector_index = matching_selector_indices.first
associated_selector = name_selectors.at associated_selector_index
element = Column_Transform_Element.Value column associated_selector
results.append (Pair_Data element [associated_selector_index, i])
results.append (Pair.Value element [associated_selector_index, i])
# We sort the results by the associated selector index, breaking ties by the column index.
sorted = results.to_vector.sort on=(_.second) by=Vector_Lexicographic_Order.compare
sorted.map .first

View File

@ -1,6 +1,5 @@
from Standard.Base import all
import Standard.Base.Runtime.State
import Standard.Base.Data.Time.Duration
import project.Test_Reporter
import project.Test_Result.Test_Result
@ -69,7 +68,7 @@ type Test
specify label ~behavior pending=Nothing =
pair = case pending of
Nothing -> Duration.time_execution (run_spec behavior)
reason -> Pair_Data Duration.zero (Test_Result.Pending reason)
reason -> Pair.Value Duration.zero (Test_Result.Pending reason)
result = pair.second
time_taken = pair.first
spec = State.get Spec

View File

@ -1,5 +1,4 @@
from Standard.Base import all
import Standard.Base.Data.Time.Duration
import project.Suite_Config.Suite_Config
import project.Test_Result.Test_Result

View File

@ -1,5 +1,4 @@
from Standard.Base import all
import Standard.Base.Data.Time.Duration
import Standard.Base.Runtime.State
import Standard.Base.System

View File

@ -1,5 +1,4 @@
from Standard.Base import all
from Standard.Base.Data.Time import Duration, Period
from Standard.Test import Bench

View File

@ -1,4 +1,4 @@
from Standard.Base import IO, Integer, Vector, Statistics
from Standard.Base import IO, Integer, Vector
from Standard.Base.Data.Statistics import all
from Standard.Test import Bench, Faker
@ -20,4 +20,4 @@ main =
vector = create_vector vector_size
IO.println <| "Testing..."
Bench.measure (vector.compute_bulk [Count, Minimum, Maximum]) "Count Min Max" iter_size num_iterations
Bench.measure (vector.compute_bulk [Statistic.Count, Statistic.Minimum, Statistic.Maximum]) "Count Min Max" iter_size num_iterations

View File

@ -1,5 +1,4 @@
from Standard.Base import all
import Standard.Base.Data.Time.Duration
from Standard.Table import Table, Sort_Column_Selector
from Standard.Table.Data.Aggregate_Column import all

View File

@ -1,5 +1,4 @@
from Standard.Base import all
import Standard.Base.Data.Time.Period
from Standard.Test import Bench

View File

@ -235,7 +235,7 @@ pgpass_file = enso_project.data / "pgpass.conf"
pgpass_spec = Test.group "[PostgreSQL] .pgpass" <|
make_pair username password =
[Pair_Data "user" username, Pair_Data "password" password]
[Pair.Value "user" username, Pair.Value "password" password]
Test.specify "should correctly parse the file, including escapes, blank lines and comments" <|
result = Pgpass.parse_file pgpass_file
result.length . should_equal 12
@ -310,11 +310,11 @@ connection_setup_spec = Test.group "[PostgreSQL] Connection setup" <|
c4.database . should_equal ""
c4.jdbc_url . should_equal "jdbc:postgresql://localhost:5432"
add_ssl props = props+[Pair_Data 'sslmode' 'prefer']
add_ssl props = props+[Pair.Value 'sslmode' 'prefer']
Test.specify "should use the given credentials" <|
c = Postgres credentials=(Credentials.Username_And_Password "myuser" "mypass")
c.jdbc_url . should_equal "jdbc:postgresql://localhost:5432"
c.jdbc_properties . should_equal <| add_ssl [Pair_Data "user" "myuser", Pair_Data "password" "mypass"]
c.jdbc_properties . should_equal <| add_ssl [Pair.Value "user" "myuser", Pair.Value "password" "mypass"]
Test.specify "should fallback to environment variables and fill-out missing information based on the PGPASS file (if available)" <|
c1 = Postgres
@ -326,7 +326,7 @@ connection_setup_spec = Test.group "[PostgreSQL] Connection setup" <|
c1.jdbc_properties.catch.message . should_equal "PGPASSWORD is set, but PGUSER is not."
Test_Environment.unsafe_with_environment_override "PGUSER" "someuser" <|
c1.jdbc_properties . should_equal <| add_ssl [Pair_Data "user" "someuser", Pair_Data "password" "somepassword"]
c1.jdbc_properties . should_equal <| add_ssl [Pair.Value "user" "someuser", Pair.Value "password" "somepassword"]
c2 = Postgres "192.168.4.0" 1234 "foo"
c3 = Postgres "::1" 55999 "database_name"
@ -336,24 +336,24 @@ connection_setup_spec = Test.group "[PostgreSQL] Connection setup" <|
c4.jdbc_properties . should_equal <| add_ssl []
Test_Environment.unsafe_with_environment_override "PGPASSFILE" pgpass_file.absolute.path <|
c2.jdbc_properties . should_equal <| add_ssl [Pair_Data "user" "bar", Pair_Data "password" "baz"]
c3.jdbc_properties . should_equal <| add_ssl [Pair_Data "user" "user_that_has_no_password", Pair_Data "password" ""]
c4.jdbc_properties . should_equal <| add_ssl [Pair_Data "user" "*", Pair_Data "password" "fallback_password"]
c2.jdbc_properties . should_equal <| add_ssl [Pair.Value "user" "bar", Pair.Value "password" "baz"]
c3.jdbc_properties . should_equal <| add_ssl [Pair.Value "user" "user_that_has_no_password", Pair.Value "password" ""]
c4.jdbc_properties . should_equal <| add_ssl [Pair.Value "user" "*", Pair.Value "password" "fallback_password"]
Test_Environment.unsafe_with_environment_override "PGUSER" "bar" <|
c2.jdbc_properties . should_equal <| add_ssl [Pair_Data "user" "bar", Pair_Data "password" "baz"]
c2.jdbc_properties . should_equal <| add_ssl [Pair.Value "user" "bar", Pair.Value "password" "baz"]
[c3, c4].each c->
c.jdbc_properties . should_equal <|
add_ssl [Pair_Data "user" "*", Pair_Data "password" "fallback_password"]
add_ssl [Pair.Value "user" "*", Pair.Value "password" "fallback_password"]
Test_Environment.unsafe_with_environment_override "PGUSER" "other user" <|
[c2, c3, c4].each c->
c.jdbc_properties . should_equal <|
add_ssl [Pair_Data "user" "*", Pair_Data "password" "fallback_password"]
add_ssl [Pair.Value "user" "*", Pair.Value "password" "fallback_password"]
Test_Environment.unsafe_with_environment_override "PGPASSWORD" "other password" <|
[c2, c3, c4].each c->
c.jdbc_properties . should_equal <| add_ssl [Pair_Data "user" "other user", Pair_Data "password" "other password"]
c.jdbc_properties . should_equal <| add_ssl [Pair.Value "user" "other user", Pair.Value "password" "other password"]
spec =
table_spec

View File

@ -6,9 +6,11 @@ import project.In_Memory_Tests
import project.Database.Main as Database_Tests
import project.Data_Formatter_Spec
import project.Expression_Spec
import project.Parse_Values_Spec
main = Test_Suite.run_main <|
In_Memory_Tests.in_memory_spec
Data_Formatter_Spec.spec
Expression_Spec.spec
Parse_Values_Spec.spec
Database_Tests.databases_spec

View File

@ -12,21 +12,21 @@ from Standard.Test import Test, Test_Suite, Problems
spec = Test.group "Table.parse_values" <|
Test.specify "should correctly parse integers" <|
t1 = Table.new [["ints", ["0", "+0", "-0", "+1", "-1", "1", "000", "0010", "12345", Nothing]]]
t2 = t1.parse_values column_types=[Column_Type_Selection 0 Integer]
t2 = t1.parse_values column_types=[Column_Type_Selection.Value 0 Integer]
t2.at "ints" . to_vector . should_equal [0, 0, 0, 1, -1, 1, Nothing, Nothing, 12345, Nothing]
Test.specify "should correctly parse decimals" <|
t1 = Table.new [["ints", ["0", "+0", "-0", "+1", "-1", "1", "12345", Nothing]]]
t2 = t1.parse_values column_types=[Column_Type_Selection 0 Decimal]
t2 = t1.parse_values column_types=[Column_Type_Selection.Value 0 Decimal]
t2.at "ints" . to_vector . should_equal [0, 0, 0, 1, -1, 1, 12345, Nothing]
t2.at "ints" . to_vector . map .to_text . should_equal ["0.0", "0.0", "-0.0", "1.0", "-1.0", "1.0", "12345.0", "Nothing"]
t3 = Table.new [["floats", ["0.0", "+0.0", "-0.0", "+1.0", "-1.0", "1.0", "0.0000", "10.", "12345."]]]
t4 = t3.parse_values column_types=[Column_Type_Selection 0 Decimal]
t4 = t3.parse_values column_types=[Column_Type_Selection.Value 0 Decimal]
t4.at "floats" . to_vector . should_equal [0, 0, 0, 1, -1, 1, 0, 10, 12345]
t5 = Table.new [["floats", [".0", "0.", "1.", ".1", ".123", "-.1", "+.1", "+0.0", "0.1234", Nothing, "11111111.111"]]]
t6 = t5.parse_values column_types=[Column_Type_Selection 0 Decimal]
t6 = t5.parse_values column_types=[Column_Type_Selection.Value 0 Decimal]
t6.at "floats" . to_vector . should_equal [0.0, 0.0, 1.0, 0.1, 0.123, -0.1, 0.1, 0.0, 0.1234, Nothing, 11111111.111]
Test.specify "should warn on leading zeros in numbers, if asked" <|
@ -35,66 +35,63 @@ spec = Test.group "Table.parse_values" <|
t1_parsed = [0, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, 12345, Nothing]
t1_zeros = ["+00", "-00", "+01", "-01", "01", "000", "0010"]
t3 = t1.parse_values column_types=[Column_Type_Selection 0 Integer]
t3 = t1.parse_values column_types=[Column_Type_Selection.Value 0 Integer]
t3.at "ints" . to_vector . should_equal t1_parsed
Warning.get_all t3 . map .value . should_equal [Leading_Zeros "ints" Integer t1_zeros]
Warning.get_all t3 . map .value . should_equal [Leading_Zeros.Leading_Zeros_Data "ints" Integer t1_zeros]
t4 = t1.parse_values column_types=[Column_Type_Selection 0 Decimal]
t4 = t1.parse_values column_types=[Column_Type_Selection.Value 0 Decimal]
t4.at "ints" . to_vector . should_equal t1_parsed
Warning.get_all t4 . map .value . should_equal [Leading_Zeros "ints" Decimal t1_zeros]
Warning.get_all t4 . map .value . should_equal [Leading_Zeros.Leading_Zeros_Data "ints" Decimal t1_zeros]
t5 = t2.parse_values column_types=[Column_Type_Selection 0 Decimal]
t5 = t2.parse_values column_types=[Column_Type_Selection.Value 0 Decimal]
t5.at "floats" . to_vector . should_equal [0.0, 0.0, Nothing, Nothing, Nothing, 1.0]
Warning.get_all t5 . map .value . should_equal [Leading_Zeros "floats" Decimal ["00.", "01.0", '-0010.0000']]
Warning.get_all t5 . map .value . should_equal [Leading_Zeros.Leading_Zeros_Data "floats" Decimal ["00.", "01.0", '-0010.0000']]
opts = Data_Formatter allow_leading_zeros=True
opts = Data_Formatter.Value allow_leading_zeros=True
t1_parsed_zeros = [0, 0, 0, 1, -1, 1, 0, 10, 12345, Nothing]
t6 = t1.parse_values value_formatter=opts column_types=[Column_Type_Selection 0 Integer]
t6 = t1.parse_values value_formatter=opts column_types=[Column_Type_Selection.Value 0 Integer]
t6.at "ints" . to_vector . should_equal t1_parsed_zeros
Warning.get_all t6 . should_equal []
t7 = t1.parse_values value_formatter=opts column_types=[Column_Type_Selection 0 Decimal]
t7 = t1.parse_values value_formatter=opts column_types=[Column_Type_Selection.Value 0 Decimal]
t7.at "ints" . to_vector . should_equal t1_parsed_zeros
Warning.get_all t7 . should_equal []
t8 = t2.parse_values value_formatter=opts column_types=[Column_Type_Selection 0 Decimal]
t8 = t2.parse_values value_formatter=opts column_types=[Column_Type_Selection.Value 0 Decimal]
t8.at "floats" . to_vector . should_equal [0.0, 0.0, 0.0, 1.0, -10.0, 1.0]
Warning.get_all t8 . should_equal []
Test.specify "should correctly parse booleans" <|
t1 = Table.new [["bools", ["true", "false", "True", "TRUE", "FALSE", Nothing, "False"]]]
t2 = t1.parse_values column_types=[Column_Type_Selection 0 Boolean]
t2 = t1.parse_values column_types=[Column_Type_Selection.Value 0 Boolean]
t2.at "bools" . to_vector . should_equal [True, False, True, True, False, Nothing, False]
date_as_vector d = if d.is_nothing then Nothing else [d.getYear, d.getMonthValue, d.getDayOfMonth]
time_as_vector d = if d.is_nothing then Nothing else [d.getHour, d.getMinute, d.getSecond, d.getNano]
datetime_as_vector d = if d.is_nothing then Nothing else [d.getYear, d.getMonthValue, d.getDayOfMonth, d.getHour, d.getMinute, d.getSecond, d.getNano]
Test.specify "should correctly parse date and time" <|
t1 = Table.new [["dates", ["2022-05-07", "2000-01-01", "2010-12-31"]]]
t2 = t1.parse_values column_types=[Column_Type_Selection 0 Date.Date]
t2.at "dates" . to_vector . map date_as_vector . should_equal [[2022, 5, 7], [2000, 1, 1], [2010, 12, 31]]
t2 = t1.parse_values column_types=[Column_Type_Selection.Value 0 Date]
t2.at "dates" . to_vector . should_equal [Date.new 2022 5 7, Date.new 2000 1 1, Date.new 2010 12 31]
t3 = Table.new [["datetimes", ["2022-05-07 23:59:59", "2000-01-01 00:00:00", "2010-12-31 12:34:56"]]]
t4 = t3.parse_values column_types=[Column_Type_Selection 0 Time.Time]
t4.at "datetimes" . to_vector . map datetime_as_vector . should_equal [[2022, 5, 7, 23, 59, 59, 0], [2000, 1, 1, 0, 0, 0, 0], [2010, 12, 31, 12, 34, 56, 0]]
t4 = t3.parse_values column_types=[Column_Type_Selection.Value 0 Date_Time]
t4.at "datetimes" . to_vector . should_equal [Date_Time.new 2022 5 7 23 59 59, Date_Time.new 2000 1 1, Date_Time.new 2010 12 31 12 34 56]
t5 = Table.new [["times", ["23:59:59", "00:00:00", "12:34:56"]]]
t6 = t5.parse_values column_types=[Column_Type_Selection 0 Time_Of_Day.Time_Of_Day]
t6.at "times" . to_vector . map time_as_vector . should_equal [[23, 59, 59, 0], [0, 0, 0, 0], [12, 34, 56, 0]]
t6 = t5.parse_values column_types=[Column_Type_Selection.Value 0 Time_Of_Day]
t6.at "times" . to_vector . should_equal [Time_Of_Day.new 23 59 59, Time_Of_Day.new, Time_Of_Day.new 12 34 56]
Test.specify "should parse date and time in various formats" <|
opts = Data_Formatter date_formats=["d.M.y", "d MMM y[ G]", "E, d MMM y"] datetime_formats=["yyyy-MM-dd'T'HH:mm:ss", "dd/MM/yyyy HH:mm"] time_formats=["H:mm:ss.n", "h:mma"]
opts = Data_Formatter.Value date_formats=["d.M.y", "d MMM y[ G]", "E, d MMM y"] datetime_formats=["yyyy-MM-dd'T'HH:mm:ss", "dd/MM/yyyy HH:mm"] time_formats=["H:mm:ss.n", "h:mma"]
t1 = Table.new [["dates", ["1.2.476", "10 Jan 1900 AD", "Tue, 3 Jun 2008"]]]
t2 = t1.parse_values opts column_types=[Column_Type_Selection 0 Date.Date]
t2.at "dates" . to_vector . map date_as_vector . should_equal [[476, 2, 1], [1900, 1, 10], [2008, 6, 3]]
t2 = t1.parse_values opts column_types=[Column_Type_Selection.Value 0 Date]
t2.at "dates" . to_vector . should_equal [Date.new 476 2 1, Date.new 1900 1 10, Date.new 2008 6 3]
t3 = Table.new [["datetimes", ["2011-12-03T10:15:30", "31/12/2012 22:33"]]]
t4 = t3.parse_values opts column_types=[Column_Type_Selection 0 Time.Time]
t4.at "datetimes" . to_vector . map datetime_as_vector . should_equal [[2011, 12, 3, 10, 15, 30, 0], [2012, 12, 31, 22, 33, 0, 0]]
t4 = t3.parse_values opts column_types=[Column_Type_Selection.Value 0 Date_Time]
t4.at "datetimes" . to_vector . should_equal [Date_Time.new 2011 12 3 10 15 30, Date_Time.new 2012 12 31 22 33]
t5 = Table.new [["times", ["1:02:03.987654321", "1:30PM"]]]
t6 = t5.parse_values opts column_types=[Column_Type_Selection 0 Time_Of_Day.Time_Of_Day]
t6.at "times" . to_vector . map time_as_vector . should_equal [[1, 2, 3, 987654321], [13, 30, 0, 0]]
t6 = t5.parse_values opts column_types=[Column_Type_Selection.Value 0 Time_Of_Day]
t6.at "times" . to_vector . should_equal [Time_Of_Day.new 1 2 3 nanosecond=987654321, Time_Of_Day.new 13 30 0 0]
Test.specify "should warn when cells do not fit the expected format" <|
ints = ["ints", ["0", "1", "1.0", "foobar", "", "--1", "+-1", "10", "-+1"]]
@ -105,51 +102,51 @@ spec = Test.group "Table.parse_values" <|
t = Table.new [ints, floats, bools, ["dates", time_content], ["datetimes", time_content], ["times", time_content]]
action pb =
t.parse_values column_types=[Column_Type_Selection 0 Integer, Column_Type_Selection "floats" Decimal, Column_Type_Selection "bools" Boolean, Column_Type_Selection "dates" Date.Date, Column_Type_Selection "datetimes" Time.Time, Column_Type_Selection "times" Time_Of_Day.Time_Of_Day] on_problems=pb
t.parse_values column_types=[Column_Type_Selection.Value 0 Integer, Column_Type_Selection.Value "floats" Decimal, Column_Type_Selection.Value "bools" Boolean, Column_Type_Selection.Value "dates" Date, Column_Type_Selection.Value "datetimes" Date_Time, Column_Type_Selection.Value "times" Time_Of_Day] on_problems=pb
tester t =
t.at "ints" . to_vector . should_equal [0, 1, Nothing, Nothing, Nothing, Nothing, Nothing, 10, Nothing]
t.at "floats" . to_vector . should_equal [0, 2, Nothing, Nothing, Nothing, Nothing, Nothing, 100, Nothing]
t.at "bools" . to_vector . should_equal [True, False, Nothing, Nothing, Nothing, Nothing, Nothing, True, Nothing]
t.at "dates" . to_vector . map date_as_vector . should_equal [[2001, 1, 1], Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing]
t.at "datetimes" . to_vector . map datetime_as_vector . should_equal [Nothing, [2001, 1, 1, 12, 34, 56, 0], Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing]
t.at "times" . to_vector . map time_as_vector . should_equal [Nothing, Nothing, [10, 0, 10, 0], Nothing, Nothing, Nothing, Nothing, Nothing, Nothing]
t.at "dates" . to_vector . should_equal [Date.new 2001 1 1, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing]
t.at "datetimes" . to_vector . should_equal [Nothing, Date_Time.new 2001 1 1 12 34 56, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing]
t.at "times" . to_vector . should_equal [Nothing, Nothing, Time_Of_Day.new 10 0 10, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing]
problems =
ints = Invalid_Format "ints" Integer ["1.0", "foobar", "", "--1", "+-1", "-+1"]
floats = Invalid_Format "floats" Decimal ["1e6", "foobar", "", "--1", "+-1", "-+1"]
bools = Invalid_Format "bools" Boolean ["fAlSE", "foobar", "", "0", "1", "truefalse"]
dates = Invalid_Format "dates" Date.Date ["2001-01-01 12:34:56", "10:00:10", "Tuesday", "foobar", "", "10:99:99", "1/2/2003", "2001-30-10"]
datetimes = Invalid_Format "datetimes" Time.Time ["2001-01-01", "10:00:10", "Tuesday", "foobar", "", "10:99:99", "1/2/2003", "2001-30-10"]
times = Invalid_Format "times" Time_Of_Day.Time_Of_Day ["2001-01-01", "2001-01-01 12:34:56", "Tuesday", "foobar", "", "10:99:99", "1/2/2003", "2001-30-10"]
ints = Invalid_Format.Invalid_Format_Data "ints" Integer ["1.0", "foobar", "", "--1", "+-1", "-+1"]
floats = Invalid_Format.Invalid_Format_Data "floats" Decimal ["1e6", "foobar", "", "--1", "+-1", "-+1"]
bools = Invalid_Format.Invalid_Format_Data "bools" Boolean ["fAlSE", "foobar", "", "0", "1", "truefalse"]
dates = Invalid_Format.Invalid_Format_Data "dates" Date ["2001-01-01 12:34:56", "10:00:10", "Tuesday", "foobar", "", "10:99:99", "1/2/2003", "2001-30-10"]
datetimes = Invalid_Format.Invalid_Format_Data "datetimes" Date_Time ["2001-01-01", "10:00:10", "Tuesday", "foobar", "", "10:99:99", "1/2/2003", "2001-30-10"]
times = Invalid_Format.Invalid_Format_Data "times" Time_Of_Day ["2001-01-01", "2001-01-01 12:34:56", "Tuesday", "foobar", "", "10:99:99", "1/2/2003", "2001-30-10"]
[ints, floats, bools, dates, datetimes, times]
Problems.test_problem_handling action problems tester
Test.specify "should leave not selected columns unaffected" <|
t1 = Table.new [["A", ["1", "2"]], ["B", ["3", "4"]]]
t2 = t1.parse_values column_types=[Column_Type_Selection "B" Integer]
t2 = t1.parse_values column_types=[Column_Type_Selection.Value "B" Integer]
t2.at "A" . to_vector . should_equal ["1", "2"]
t2.at "B" . to_vector . should_equal [3, 4]
Test.specify "should report missing columns in Column_Type_Selection" <|
t = Table.new [["floats", ["1.0"]], ["ints", ["1"]], ["bools", ["True"]]]
action pb =
t.parse_values column_types=[Column_Type_Selection -1 Boolean, Column_Type_Selection "floats" Decimal, Column_Type_Selection "bars" Boolean, Column_Type_Selection "foos" Boolean, Column_Type_Selection 100 Boolean, Column_Type_Selection -100 Boolean] on_problems=pb
t.parse_values column_types=[Column_Type_Selection.Value -1 Boolean, Column_Type_Selection.Value "floats" Decimal, Column_Type_Selection.Value "bars" Boolean, Column_Type_Selection.Value "foos" Boolean, Column_Type_Selection.Value 100 Boolean, Column_Type_Selection.Value -100 Boolean] on_problems=pb
tester t =
t.at "floats" . to_vector . should_equal [1.0]
t.at "ints" . to_vector . should_equal ["1"]
t.at "bools" . to_vector . should_equal [True]
problems = [Missing_Input_Columns ["bars", "foos"], Column_Indexes_Out_Of_Range [100, -100]]
problems = [Missing_Input_Columns.Missing_Input_Columns_Data ["bars", "foos"], Column_Indexes_Out_Of_Range.Column_Indexes_Out_Of_Range_Data [100, -100]]
Problems.test_problem_handling action problems tester
Test.specify "should report duplicate columns in Column_Type_Selection" <|
t = Table.new [["floats", ["1.0"]], ["ints", ["1"]], ["bools", ["True"]], ["foo", ["42"]]]
action pb =
t.parse_values column_types=[Column_Type_Selection -2 Boolean, Column_Type_Selection "bools" Boolean, Column_Type_Selection "floats" Decimal, Column_Type_Selection "floats" Integer, Column_Type_Selection "ints" Integer] on_problems=pb
t.parse_values column_types=[Column_Type_Selection.Value -2 Boolean, Column_Type_Selection.Value "bools" Boolean, Column_Type_Selection.Value "floats" Decimal, Column_Type_Selection.Value "floats" Integer, Column_Type_Selection.Value "ints" Integer] on_problems=pb
tester t =
t.at "floats" . to_vector . should_equal ["1.0"]
t.at "ints" . to_vector . should_equal [1]
t.at "bools" . to_vector . should_equal [True]
t.at "foo" . to_vector . should_equal ["42"]
problems = [(Duplicate_Type_Selector "floats" ambiguous=True), (Duplicate_Type_Selector "bools" ambiguous=False)]
problems = [(Duplicate_Type_Selector.Duplicate_Type_Selector_Data "floats" ambiguous=True), (Duplicate_Type_Selector.Duplicate_Type_Selector_Data "bools" ambiguous=False)]
Problems.test_problem_handling action problems tester
Test.specify "should guess the datatype for columns" <|
@ -175,20 +172,20 @@ spec = Test.group "Table.parse_values" <|
t2.at "bools" . to_vector . should_equal [True, False, Nothing, True]
t2.at "floats+ints" . to_vector . should_equal [1.0, 2.2, -1.0, Nothing]
t2.at "text" . to_vector . should_equal ["foobar", "foo", "", Nothing]
t2.at "dates" . to_vector . map date_as_vector . should_equal [[2022, 10, 1], [2000, 1, 1], [1999, 1, 2], Nothing]
t2.at "datetimes" . to_vector . map datetime_as_vector . should_equal [[2022, 10, 1, 1, 2, 3, 0], [2000, 1, 1, 1, 2, 3, 0], [1999, 1, 2, 1, 2, 3, 0], Nothing]
t2.at "times" . to_vector . map time_as_vector . should_equal [[1, 2, 3, 0], [0, 0, 0, 0], [1, 2, 3, 0], Nothing]
t2.at "dates" . to_vector . should_equal [Date.new 2022 10 1, Date.new 2000 1 1, Date.new 1999 1 2, Nothing]
t2.at "datetimes" . to_vector . should_equal [Date_Time.new 2022 10 1 1 2 3, Date_Time.new 2000 1 1 1 2 3, Date_Time.new 1999 1 2 1 2 3, Nothing]
t2.at "times" . to_vector . should_equal [Time_Of_Day.new 1 2 3, Time_Of_Day.new, Time_Of_Day.new 1 2 3, Nothing]
t2.at "mixeddates" . to_vector . should_equal ["2022-10-01", "2000-01-01 01:02:03", "01:02:03", Nothing]
t2.at "text+ints" . to_vector . should_equal ["1", "2", "foobar", Nothing]
t3 = Table.new [["bools", ["1", "0", "True"]], ["ints", ["1", "0", "0"]]] . parse_values (Data_Formatter true_values=["1", "True"] false_values=["0", "False"])
t3 = Table.new [["bools", ["1", "0", "True"]], ["ints", ["1", "0", "0"]]] . parse_values (Data_Formatter.Value true_values=["1", "True"] false_values=["0", "False"])
t3.at "bools" . to_vector . should_equal [True, False, True]
t3.at "ints" . to_vector . should_equal [1, 0, 0]
t4 = Table.new [c2] . parse_values (Data_Formatter allow_leading_zeros=True)
t4 = Table.new [c2] . parse_values (Data_Formatter.Value allow_leading_zeros=True)
t4 . at "ints0" . to_vector . should_equal [1, 2, Nothing, -1]
t5 = t.parse_values column_types=[Column_Type_Selection "ints" Decimal, Column_Type_Selection "floats" Auto, Column_Type_Selection "text+ints" Auto]
t5 = t.parse_values column_types=[Column_Type_Selection.Value "ints" Decimal, Column_Type_Selection.Value "floats" Auto, Column_Type_Selection.Value "text+ints" Auto]
t5.at "ints" . to_vector . should_equal [1.0, 2.0, -123.0, Nothing]
# `ints` are requested to be parsed as decimals.
t5.at "ints" . to_vector . first . should_be_a Decimal
@ -200,29 +197,29 @@ spec = Test.group "Table.parse_values" <|
t5.at "bools" . to_vector . should_equal ["true", " False", Nothing, "True"]
Test.specify "should allow to specify a thousands separator and a custom decimal point" <|
opts = Data_Formatter decimal_point=',' thousand_separator='_'
opts = Data_Formatter.Value decimal_point=',' thousand_separator='_'
t1 = Table.new [["floats", ["0,0", "+0,0", "-0,0", "+1,5", "-1,2", "1,0", "0,0000", "10_000,", ",0"]]]
t2 = t1.parse_values opts column_types=[Column_Type_Selection 0 Decimal]
t2 = t1.parse_values opts column_types=[Column_Type_Selection.Value 0 Decimal]
t2.at "floats" . to_vector . should_equal [0.0, 0.0, 0.0, 1.5, -1.2, 1.0, 0.0, 10000.0, 0.0]
t3 = Table.new [["xs", ["1,2", "1.2", "_0", "0_", "1_0_0"]]]
t4 = t3.parse_values opts column_types=[Column_Type_Selection 0 Decimal]
t4 = t3.parse_values opts column_types=[Column_Type_Selection.Value 0 Decimal]
t4.at "xs" . to_vector . should_equal [1.2, Nothing, Nothing, Nothing, 100.0]
Warning.get_all t4 . map .value . should_equal [Invalid_Format "xs" Decimal ["1.2", "_0", "0_"]]
t5 = t3.parse_values opts column_types=[Column_Type_Selection 0 Integer]
Warning.get_all t4 . map .value . should_equal [Invalid_Format.Invalid_Format_Data "xs" Decimal ["1.2", "_0", "0_"]]
t5 = t3.parse_values opts column_types=[Column_Type_Selection.Value 0 Integer]
t5.at "xs" . to_vector . should_equal [Nothing, Nothing, Nothing, Nothing, 100.0]
Warning.get_all t5 . map .value . should_equal [Invalid_Format "xs" Integer ["1,2", "1.2", "_0", "0_"]]
Warning.get_all t5 . map .value . should_equal [Invalid_Format.Invalid_Format_Data "xs" Integer ["1,2", "1.2", "_0", "0_"]]
Test.specify "should allow to specify custom values for booleans" <|
opts_1 = Data_Formatter true_values=["1", "YES"] false_values=["0"]
opts_1 = Data_Formatter.Value true_values=["1", "YES"] false_values=["0"]
t1 = Table.new [["bools", ["1", "0", "YES", "1", "0"]]]
t2 = t1.parse_values opts_1 column_types=[Column_Type_Selection 0 Boolean]
t2 = t1.parse_values opts_1 column_types=[Column_Type_Selection.Value 0 Boolean]
t2.at "bools" . to_vector . should_equal [True, False, True, True, False]
t3 = Table.new [["bools", ["1", "NO", "False", "True", "YES", "no", "oui", "0"]]]
t4 = t3.parse_values opts_1 column_types=[Column_Type_Selection 0 Boolean]
t4 = t3.parse_values opts_1 column_types=[Column_Type_Selection.Value 0 Boolean]
t4.at "bools" . to_vector . should_equal [True, Nothing, Nothing, Nothing, True, Nothing, Nothing, False]
Warning.get_all t4 . map .value . should_equal [Invalid_Format "bools" Boolean ["NO", "False", "True", "no", "oui"]]
Warning.get_all t4 . map .value . should_equal [Invalid_Format.Invalid_Format_Data "bools" Boolean ["NO", "False", "True", "no", "oui"]]
whitespace_table =
ints = ["ints", ["0", "1 ", "0 1", " 2"]]
@ -234,26 +231,26 @@ spec = Test.group "Table.parse_values" <|
Table.new [ints, floats, bools, dates, datetimes, times]
Test.specify "should trim input values by default" <|
t1 = whitespace_table.parse_values column_types=[Column_Type_Selection 0 Integer, Column_Type_Selection "floats" Decimal, Column_Type_Selection "bools" Boolean, Column_Type_Selection "dates" Date.Date, Column_Type_Selection "datetimes" Time.Time, Column_Type_Selection "times" Time_Of_Day.Time_Of_Day]
t1 = whitespace_table.parse_values column_types=[Column_Type_Selection.Value 0 Integer, Column_Type_Selection.Value "floats" Decimal, Column_Type_Selection.Value "bools" Boolean, Column_Type_Selection.Value "dates" Date, Column_Type_Selection.Value "datetimes" Date_Time, Column_Type_Selection.Value "times" Time_Of_Day]
t1.at "ints" . to_vector . should_equal [0, 1, Nothing, 2]
t1.at "floats" . to_vector . should_equal [0.0, 2.0, Nothing, 10.0]
t1.at "bools" . to_vector . should_equal [True, False, Nothing, False]
t1.at "dates" . to_vector . map date_as_vector . should_equal [[2022, 1, 1], [2022, 7, 17], Nothing, Nothing]
t1.at "datetimes" . to_vector . map datetime_as_vector . should_equal [[2022, 1, 1, 11, 59, 0, 0], Nothing, Nothing, Nothing]
t1.at "times" . to_vector . map time_as_vector . should_equal [[11, 0, 0, 0], [0, 0, 0, 0], Nothing, Nothing]
t1.at "dates" . to_vector . should_equal [Date.new 2022 1 1, Date.new 2022 7 17, Nothing, Nothing]
t1.at "datetimes" . to_vector . should_equal [Date_Time.new 2022 1 1 11 59, Nothing, Nothing, Nothing]
t1.at "times" . to_vector . should_equal [Time_Of_Day.new 11 0 0, Time_Of_Day.new, Nothing, Nothing]
warnings = Warning.get_all t1 . map .value
expected_warnings = Vector.new_builder
expected_warnings.append (Invalid_Format "ints" Integer ["0 1"])
expected_warnings.append (Invalid_Format "floats" Decimal ["- 1"])
expected_warnings.append (Invalid_Format "bools" Boolean ["t rue"])
expected_warnings.append (Invalid_Format "dates" Date.Date ["2022 - 07 - 17", ""])
expected_warnings.append (Invalid_Format "datetimes" Time.Time ["2022 - 07 - 17 1:2:3", "2022-01-01 11:59:00"])
expected_warnings.append (Invalid_Format "times" Time_Of_Day.Time_Of_Day ["00 : 00 : 00"])
expected_warnings.append (Invalid_Format.Invalid_Format_Data "ints" Integer ["0 1"])
expected_warnings.append (Invalid_Format.Invalid_Format_Data "floats" Decimal ["- 1"])
expected_warnings.append (Invalid_Format.Invalid_Format_Data "bools" Boolean ["t rue"])
expected_warnings.append (Invalid_Format.Invalid_Format_Data "dates" Date ["2022 - 07 - 17", ""])
expected_warnings.append (Invalid_Format.Invalid_Format_Data "datetimes" Date_Time ["2022 - 07 - 17 1:2:3", "2022-01-01 11:59:00"])
expected_warnings.append (Invalid_Format.Invalid_Format_Data "times" Time_Of_Day ["00 : 00 : 00"])
warnings.should_contain_the_same_elements_as expected_warnings.to_vector
Test.specify "should fail to parse if whitespace is present and trimming is turned off" <|
opts = Data_Formatter trim_values=False
t1 = whitespace_table.parse_values opts column_types=[Column_Type_Selection 0 Integer, Column_Type_Selection "floats" Decimal, Column_Type_Selection "bools" Boolean, Column_Type_Selection "dates" Date.Date, Column_Type_Selection "datetimes" Time.Time, Column_Type_Selection "times" Time_Of_Day.Time_Of_Day]
opts = Data_Formatter.Value trim_values=False
t1 = whitespace_table.parse_values opts column_types=[Column_Type_Selection.Value 0 Integer, Column_Type_Selection.Value "floats" Decimal, Column_Type_Selection.Value "bools" Boolean, Column_Type_Selection.Value "dates" Date, Column_Type_Selection.Value "datetimes" Date_Time, Column_Type_Selection.Value "times" Time_Of_Day]
t1.at "ints" . to_vector . should_equal [0, Nothing, Nothing, Nothing]
t1.at "floats" . to_vector . should_equal [Nothing, Nothing, Nothing, 10.0]
t1.at "bools" . to_vector . should_equal [Nothing, Nothing, Nothing, False]
@ -263,12 +260,12 @@ spec = Test.group "Table.parse_values" <|
t1.at "times" . to_vector . should_equal nulls
warnings = Warning.get_all t1 . map .value
expected_warnings = Vector.new_builder
expected_warnings.append (Invalid_Format "ints" Integer ["1 ", "0 1", " 2"])
expected_warnings.append (Invalid_Format "floats" Decimal ["0 ", " 2.0", "- 1"])
expected_warnings.append (Invalid_Format "bools" Boolean ["True ", " false", "t rue"])
expected_warnings.append (Invalid_Format "dates" Date.Date [" 2022-01-01", "2022-07-17 ", "2022 - 07 - 17", ""])
expected_warnings.append (Invalid_Format "datetimes" Time.Time [" 2022-01-01 11:59:00 ", "2022 - 07 - 17 1:2:3 ", "2022-01-01 11:59:00"])
expected_warnings.append (Invalid_Format "times" Time_Of_Day.Time_Of_Day ["11:00:00 ", " 00:00:00", "00 : 00 : 00"])
expected_warnings.append (Invalid_Format.Invalid_Format_Data "ints" Integer ["1 ", "0 1", " 2"])
expected_warnings.append (Invalid_Format.Invalid_Format_Data "floats" Decimal ["0 ", " 2.0", "- 1"])
expected_warnings.append (Invalid_Format.Invalid_Format_Data "bools" Boolean ["True ", " false", "t rue"])
expected_warnings.append (Invalid_Format.Invalid_Format_Data "dates" Date [" 2022-01-01", "2022-07-17 ", "2022 - 07 - 17", ""])
expected_warnings.append (Invalid_Format.Invalid_Format_Data "datetimes" Date_Time [" 2022-01-01 11:59:00 ", "2022 - 07 - 17 1:2:3 ", "2022-01-01 11:59:00"])
expected_warnings.append (Invalid_Format.Invalid_Format_Data "times" Time_Of_Day ["11:00:00 ", " 00:00:00", "00 : 00 : 00"])
warnings.should_contain_the_same_elements_as expected_warnings.to_vector
Test.specify "should fallback to text if whitespace is present and trimming is turned off" <|
@ -276,7 +273,7 @@ spec = Test.group "Table.parse_values" <|
c2 = [" 1.0 ", "2.2", Nothing, "-1.0"]
c3 = ["true", " False", Nothing, "True"]
t = Table.new [["ints", c1], ["floats", c2], ["bools", c3]]
t2 = t.parse_values (Data_Formatter trim_values=False)
t2 = t.parse_values (Data_Formatter.Value trim_values=False)
Warning.get_all t2 . should_equal []
t2.at "ints" . to_vector . should_equal c1

View File

@ -1,6 +1,5 @@
from Standard.Base import all
from Standard.Base.Error.Problem_Behavior import Report_Error
import Standard.Base.Data.Time.Duration
from Standard.Table import Table, Column, Sort_Column, Column_Selector, Sort_Column_Selector, Aggregate_Column
from Standard.Table.Data.Aggregate_Column.Aggregate_Column import all hiding First, Last

View File

@ -109,7 +109,7 @@ spec = Test.group "Maps" <|
m.last . should_equal Nothing
Test.specify "should be able to add a Nothing key to the map" <|
m = Map.empty . insert Nothing 1
m.last . should_equal (Pair_Data Nothing 1)
m.last . should_equal (Pair.Value Nothing 1)
Test.specify "should be able to add a Nothing key to the map of Text" <|
m = Map.empty . insert "A" 2 . insert Nothing 1 . insert "B" 3
m.get "A" . should_equal 2

View File

@ -104,13 +104,13 @@ spec = Test.group "Range" <|
Test.specify "should allow iteration with index" <|
vec_mut = Vector.new_builder
5.up_to 8 . each_with_index ix-> elem->
vec_mut.append (Pair_Data ix elem)
vec_mut.to_vector . should_equal [Pair_Data 0 5, Pair_Data 1 6, Pair_Data 2 7]
vec_mut.append (Pair.Value ix elem)
vec_mut.to_vector . should_equal [Pair.Value 0 5, Pair.Value 1 6, Pair.Value 2 7]
vec_mut_2 = Vector.new_builder
5.up_to 10 . with_step 2 . each_with_index ix-> elem->
vec_mut_2.append (Pair_Data ix elem)
vec_mut_2.to_vector . should_equal [Pair_Data 0 5, Pair_Data 1 7, Pair_Data 2 9]
vec_mut_2.append (Pair.Value ix elem)
vec_mut_2.to_vector . should_equal [Pair.Value 0 5, Pair.Value 1 7, Pair.Value 2 9]
Test.specify "should be able to be folded" <|
1.up_to 6 . fold 0 (+) . should_equal 15
Test.specify "should check all" <|

View File

@ -23,7 +23,7 @@ spec =
Test.specify "return an error if the X values are all the same" <|
known_xs = [2, 2, 2, 2]
known_ys = [4, 5, 7, 10]
Regression.fit_least_squares known_xs known_ys . should_fail_with Regression.Fit_Error_Data
Regression.fit_least_squares known_xs known_ys . should_fail_with Regression.Fit_Error.Error
Test.specify "compute the linear trend line" <|
known_xs = [2, 3, 5, 7, 9]
@ -44,7 +44,7 @@ spec =
Test.specify "compute the linear trend line with an intercept" <|
known_xs = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
known_ys = [8.02128, 11.02421, 13.99566, 17.02678, 20.00486, 22.95283, 26.0143, 29.03238, 31.96427, 35.03896]
fitted = Regression.fit_least_squares known_xs known_ys (Regression.Linear_Model 100)
fitted = Regression.fit_least_squares known_xs known_ys (Regression.Model.Linear 100)
fitted.slope . should_equal -10.57056558 epsilon=double_error
fitted.intercept . should_equal 100.0 epsilon=double_error
fitted.r_squared . should_equal 0.9999900045 epsilon=double_error
@ -52,7 +52,7 @@ spec =
Test.specify "compute the exponential trend line" <|
known_xs = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
known_ys = [0.28652, 0.31735, 0.31963, 0.38482, 0.40056, 0.39013, 0.4976, 0.5665, 0.55457, 0.69135]
fitted = Regression.fit_least_squares known_xs known_ys Regression.Exponential_Model
fitted = Regression.fit_least_squares known_xs known_ys Regression.Model.Exponential
fitted.a . should_equal 0.25356436 epsilon=double_error
fitted.b . should_equal 0.09358242 epsilon=double_error
fitted.r_squared . should_equal 0.9506293649 epsilon=double_error
@ -60,7 +60,7 @@ spec =
Test.specify "predict values on a exponential trend line" <|
known_xs = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
known_ys = [0.28652, 0.31735, 0.31963, 0.38482, 0.40056, 0.39013, 0.4976, 0.5665, 0.55457, 0.69135]
fitted = Regression.fit_least_squares known_xs known_ys Regression.Exponential_Model
fitted = Regression.fit_least_squares known_xs known_ys Regression.Model.Exponential
test_xs = [0, 11, 12, 15]
expected_ys = [0.253564, 0.709829, 0.779464, 1.032103]
vector_compare (test_xs.map fitted.predict) expected_ys
@ -68,7 +68,7 @@ spec =
Test.specify "compute the exponential trend line with an intercept" <|
known_xs = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
known_ys = [0.28652, 0.31735, 0.31963, 0.38482, 0.40056, 0.39013, 0.4976, 0.5665, 0.55457, 0.69135]
fitted = Regression.fit_least_squares known_xs known_ys (Regression.Exponential_Model 0.2)
fitted = Regression.fit_least_squares known_xs known_ys (Regression.Model.Exponential 0.2)
fitted.a . should_equal 0.2 epsilon=double_error
fitted.b . should_equal 0.127482464 epsilon=double_error
fitted.r_squared . should_equal 0.9566066546 epsilon=double_error
@ -76,7 +76,7 @@ spec =
Test.specify "compute the logarithmic trend line" <|
known_xs = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
known_ys = [0.12128, 0.29057, 0.35933, 0.45949, 0.49113, 0.48285, 0.58132, 0.63144, 0.5916, 0.69158]
fitted = Regression.fit_least_squares known_xs known_ys Regression.Logarithmic_Model
fitted = Regression.fit_least_squares known_xs known_ys Regression.Model.Logarithmic
fitted.a . should_equal 0.232702284 epsilon=double_error
fitted.b . should_equal 0.11857587 epsilon=double_error
fitted.r_squared . should_equal 0.9730840179 epsilon=double_error
@ -84,7 +84,7 @@ spec =
Test.specify "predict values on a logarithmic trend line" <|
known_xs = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
known_ys = [0.12128, 0.29057, 0.35933, 0.45949, 0.49113, 0.48285, 0.58132, 0.63144, 0.5916, 0.69158]
fitted = Regression.fit_least_squares known_xs known_ys Regression.Logarithmic_Model
fitted = Regression.fit_least_squares known_xs known_ys Regression.Model.Logarithmic
test_xs = [0.1, 11, 12, 15]
expected_ys = [-0.417241, 0.676572, 0.696819, 0.748745]
vector_compare (test_xs.map fitted.predict) expected_ys
@ -92,7 +92,7 @@ spec =
Test.specify "compute the power trend line" <|
known_xs = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
known_ys = [0.26128, 0.28144, 0.26353, 0.30247, 0.28677, 0.23992, 0.30586, 0.32785, 0.26324, 0.3411]
fitted = Regression.fit_least_squares known_xs known_ys Regression.Power_Model
fitted = Regression.fit_least_squares known_xs known_ys Regression.Model.Power
fitted.a . should_equal 0.258838019 epsilon=double_error
fitted.b . should_equal 0.065513849 epsilon=double_error
fitted.r_squared . should_equal 0.2099579581 epsilon=double_error
@ -100,7 +100,7 @@ spec =
Test.specify "predict values on a power trend line" <|
known_xs = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
known_ys = [0.26128, 0.28144, 0.26353, 0.30247, 0.28677, 0.23992, 0.30586, 0.32785, 0.26324, 0.3411]
fitted = Regression.fit_least_squares known_xs known_ys Regression.Power_Model
fitted = Regression.fit_least_squares known_xs known_ys Regression.Model.Power
test_xs = [0.1, 11, 12, 15]
expected_ys = [0.222594, 0.302868, 0.3046, 0.309085]
vector_compare (test_xs.map fitted.predict) expected_ys

View File

@ -1,5 +1,4 @@
from Standard.Base import all
from Standard.Base.Data.Statistics import all
from Standard.Base.Data.Vector import Empty_Error
from Standard.Test import Test, Test_Suite
@ -41,78 +40,78 @@ spec =
text_set.compute . should_equal 3
Test.specify "should be able to get minimum of maximum values" <|
simple_set.compute Minimum . should_equal 1
number_set.compute Minimum . should_equal -45.84 epsilon=double_error
missing_set.compute Minimum . should_equal -41.02 epsilon=double_error
with_nans_set.compute Minimum . should_equal -41.02 epsilon=double_error
text_set.compute Minimum . should_equal "A"
simple_set.compute Maximum . should_equal 5
number_set.compute Maximum . should_equal 42.01 epsilon=double_error
missing_set.compute Maximum . should_equal 33.85 epsilon=double_error
with_nans_set.compute Maximum . should_equal 33.85 epsilon=double_error
text_set.compute Maximum . should_equal "D"
simple_set.compute Statistic.Minimum . should_equal 1
number_set.compute Statistic.Minimum . should_equal -45.84 epsilon=double_error
missing_set.compute Statistic.Minimum . should_equal -41.02 epsilon=double_error
with_nans_set.compute Statistic.Minimum . should_equal -41.02 epsilon=double_error
text_set.compute Statistic.Minimum . should_equal "A"
simple_set.compute Statistic.Maximum . should_equal 5
number_set.compute Statistic.Maximum . should_equal 42.01 epsilon=double_error
missing_set.compute Statistic.Maximum . should_equal 33.85 epsilon=double_error
with_nans_set.compute Statistic.Maximum . should_equal 33.85 epsilon=double_error
text_set.compute Statistic.Maximum . should_equal "D"
Test.specify "should be able to get sum of values" <|
simple_set.compute Sum . should_equal 15 epsilon=double_error
number_set.compute Sum . should_equal -101.28 epsilon=double_error
missing_set.compute Sum . should_equal -81.8 epsilon=double_error
with_nans_set.compute Sum . should_equal -81.8 epsilon=double_error
simple_set.compute Statistic.Sum . should_equal 15 epsilon=double_error
number_set.compute Statistic.Sum . should_equal -101.28 epsilon=double_error
missing_set.compute Statistic.Sum . should_equal -81.8 epsilon=double_error
with_nans_set.compute Statistic.Sum . should_equal -81.8 epsilon=double_error
Test.specify "should be able to get mean of values" <|
simple_set.compute Mean . should_equal 3 epsilon=double_error
number_set.compute Mean . should_equal -5.064 epsilon=double_error
missing_set.compute Mean . should_equal -5.1125 epsilon=double_error
with_nans_set.compute Mean . should_equal -5.1125 epsilon=double_error
simple_set.compute Statistic.Mean . should_equal 3 epsilon=double_error
number_set.compute Statistic.Mean . should_equal -5.064 epsilon=double_error
missing_set.compute Statistic.Mean . should_equal -5.1125 epsilon=double_error
with_nans_set.compute Statistic.Mean . should_equal -5.1125 epsilon=double_error
Test.specify "should be able to get sample variance of values" <|
simple_set.compute Variance . should_equal 2.5 epsilon=double_error
number_set.compute Variance . should_equal 582.0137832 epsilon=double_error
missing_set.compute Variance . should_equal 431.0218867 epsilon=double_error
with_nans_set.compute Variance . should_equal 431.0218867 epsilon=double_error
[1].compute Variance . is_nan . should_equal True
simple_set.compute Statistic.Variance . should_equal 2.5 epsilon=double_error
number_set.compute Statistic.Variance . should_equal 582.0137832 epsilon=double_error
missing_set.compute Statistic.Variance . should_equal 431.0218867 epsilon=double_error
with_nans_set.compute Statistic.Variance . should_equal 431.0218867 epsilon=double_error
[1].compute Statistic.Variance . is_nan . should_equal True
Test.specify "should be able to get population variance of values" <|
simple_set.compute (Variance True) . should_equal 2 epsilon=double_error
number_set.compute (Variance True) . should_equal 552.913094 epsilon=double_error
missing_set.compute (Variance True) . should_equal 404.0830188 epsilon=double_error
with_nans_set.compute (Variance True) . should_equal 404.0830188 epsilon=double_error
simple_set.compute (Statistic.Variance True) . should_equal 2 epsilon=double_error
number_set.compute (Statistic.Variance True) . should_equal 552.913094 epsilon=double_error
missing_set.compute (Statistic.Variance True) . should_equal 404.0830188 epsilon=double_error
with_nans_set.compute (Statistic.Variance True) . should_equal 404.0830188 epsilon=double_error
Test.specify "should be able to get population standard deviation of values" <|
simple_set.compute Standard_Deviation . should_equal 1.58113883 epsilon=double_error
number_set.compute Standard_Deviation . should_equal 24.12496183 epsilon=double_error
missing_set.compute Standard_Deviation . should_equal 20.76106661 epsilon=double_error
with_nans_set.compute Standard_Deviation . should_equal 20.76106661 epsilon=double_error
[1].compute Standard_Deviation . is_nan . should_equal True
simple_set.compute Statistic.Standard_Deviation . should_equal 1.58113883 epsilon=double_error
number_set.compute Statistic.Standard_Deviation . should_equal 24.12496183 epsilon=double_error
missing_set.compute Statistic.Standard_Deviation . should_equal 20.76106661 epsilon=double_error
with_nans_set.compute Statistic.Standard_Deviation . should_equal 20.76106661 epsilon=double_error
[1].compute Statistic.Standard_Deviation . is_nan . should_equal True
Test.specify "should be able to get sample standard deviation of values" <|
simple_set.compute (Standard_Deviation True) . should_equal 1.414213562 epsilon=double_error
number_set.compute (Standard_Deviation True) . should_equal 23.51410415 epsilon=double_error
missing_set.compute (Standard_Deviation True) . should_equal 20.1018163 epsilon=double_error
with_nans_set.compute (Standard_Deviation True) . should_equal 20.1018163 epsilon=double_error
simple_set.compute (Statistic.Standard_Deviation True) . should_equal 1.414213562 epsilon=double_error
number_set.compute (Statistic.Standard_Deviation True) . should_equal 23.51410415 epsilon=double_error
missing_set.compute (Statistic.Standard_Deviation True) . should_equal 20.1018163 epsilon=double_error
with_nans_set.compute (Statistic.Standard_Deviation True) . should_equal 20.1018163 epsilon=double_error
Test.specify "should be able to get sample skewness of values" <|
simple_set.compute Skew . should_equal 0 epsilon=double_error
number_set.compute Skew . should_equal 0.165086552 epsilon=double_error
missing_set.compute Skew . should_equal 0.084238123 epsilon=double_error
with_nans_set.compute Skew . should_equal 0.084238123 epsilon=double_error
[1, 2].compute Skew . is_nan . should_equal True
simple_set.compute Statistic.Skew . should_equal 0 epsilon=double_error
number_set.compute Statistic.Skew . should_equal 0.165086552 epsilon=double_error
missing_set.compute Statistic.Skew . should_equal 0.084238123 epsilon=double_error
with_nans_set.compute Statistic.Skew . should_equal 0.084238123 epsilon=double_error
[1, 2].compute Statistic.Skew . is_nan . should_equal True
Test.specify "should be able to get population skewness of values" <|
simple_set.compute (Skew True) . should_equal 0 epsilon=double_error
number_set.compute (Skew True) . should_equal 0.152437706 epsilon=double_error
missing_set.compute (Skew True) . should_equal 0.076125664 epsilon=double_error
with_nans_set.compute (Skew True) . should_equal 0.076125664 epsilon=double_error
[1, 2].compute (Skew True) . is_nan . should_equal True
simple_set.compute (Statistic.Skew True) . should_equal 0 epsilon=double_error
number_set.compute (Statistic.Skew True) . should_equal 0.152437706 epsilon=double_error
missing_set.compute (Statistic.Skew True) . should_equal 0.076125664 epsilon=double_error
with_nans_set.compute (Statistic.Skew True) . should_equal 0.076125664 epsilon=double_error
[1, 2].compute (Statistic.Skew True) . is_nan . should_equal True
Test.specify "should be able to get sample kurtosis of values" <|
simple_set.compute Kurtosis . should_equal -1.2 epsilon=double_error
number_set.compute Kurtosis . should_equal -0.449422438 epsilon=double_error
missing_set.compute Kurtosis . should_equal -0.201991074 epsilon=double_error
with_nans_set.compute Kurtosis . should_equal -0.201991074 epsilon=double_error
[1, 2, 3].compute Kurtosis . is_nan . should_equal True
simple_set.compute Statistic.Kurtosis . should_equal -1.2 epsilon=double_error
number_set.compute Statistic.Kurtosis . should_equal -0.449422438 epsilon=double_error
missing_set.compute Statistic.Kurtosis . should_equal -0.201991074 epsilon=double_error
with_nans_set.compute Statistic.Kurtosis . should_equal -0.201991074 epsilon=double_error
[1, 2, 3].compute Statistic.Kurtosis . is_nan . should_equal True
Test.specify "should allow bulk computation" <|
stats = [Count, Minimum, Mean, Variance, Skew]
stats = [Statistic.Count, Statistic.Minimum, Statistic.Mean, Statistic.Variance, Statistic.Skew]
expected = [20, -45.84, -5.064, 582.0137832, 0.165086552]
values = number_set.compute_bulk stats
vector_compare values expected
@ -120,17 +119,17 @@ spec =
Test.group "Statistics - empty Vector " <|
Test.specify "should be able to count and sum on empty Vector" <|
[].compute . should_equal 0
[].compute Sum . should_equal 0
[].compute Statistic.Sum . should_equal 0
Test.specify "should fail with Empty_Error for Minimum and Maximum on empty Vector" <|
[].compute Minimum . should_fail_with Empty_Error
[].compute Maximum . should_fail_with Empty_Error
[].compute Statistic.Minimum . should_fail_with Empty_Error
[].compute Statistic.Maximum . should_fail_with Empty_Error
Test.specify "should be NaN for other statistics sum on empty Vector" <|
[].compute Mean . is_nan . should_equal True
[].compute Variance . is_nan . should_equal True
[].compute Skew . is_nan . should_equal True
[].compute Kurtosis . is_nan . should_equal True
[].compute Statistic.Mean . is_nan . should_equal True
[].compute Statistic.Variance . is_nan . should_equal True
[].compute Statistic.Skew . is_nan . should_equal True
[].compute Statistic.Kurtosis . is_nan . should_equal True
Test.group "Statistics - invalid input" <|
text_set = ["A", "B", Nothing, "D"]
@ -138,55 +137,55 @@ spec =
no_ord_set = [No_Ord.Value 10, No_Ord.Value 2, Nothing, No_Ord.Value 9]
Test.specify "should fail with Illegal_Argument_Error on number based statistics for text Vector" <|
text_set.compute Sum . should_fail_with Illegal_Argument_Error_Data
text_set.compute Mean . should_fail_with Illegal_Argument_Error_Data
text_set.compute Variance . should_fail_with Illegal_Argument_Error_Data
text_set.compute Skew . should_fail_with Illegal_Argument_Error_Data
text_set.compute Kurtosis . should_fail_with Illegal_Argument_Error_Data
text_set.compute Statistic.Sum . should_fail_with Illegal_Argument_Error_Data
text_set.compute Statistic.Mean . should_fail_with Illegal_Argument_Error_Data
text_set.compute Statistic.Variance . should_fail_with Illegal_Argument_Error_Data
text_set.compute Statistic.Skew . should_fail_with Illegal_Argument_Error_Data
text_set.compute Statistic.Kurtosis . should_fail_with Illegal_Argument_Error_Data
Test.specify "should be able to do Count, Minimum and Maximum on custom type with compare_to" <|
ord_set.compute . should_equal 3
ord_set.compute Minimum . should_equal (Ord.Value 10)
ord_set.compute Maximum . should_equal (Ord.Value 2)
ord_set.compute Statistic.Minimum . should_equal (Ord.Value 10)
ord_set.compute Statistic.Maximum . should_equal (Ord.Value 2)
Test.specify "should fail with Incomparable_Values_Error on custom type without compare_to" <|
no_ord_set.compute . should_equal 3
no_ord_set.compute Minimum . should_fail_with Incomparable_Values_Error
no_ord_set.compute Maximum . should_fail_with Incomparable_Values_Error
no_ord_set.compute Statistic.Minimum . should_fail_with Incomparable_Values_Error
no_ord_set.compute Statistic.Maximum . should_fail_with Incomparable_Values_Error
Test.specify "should fail with Incomparable_Values_Error on mixed Vectors" <|
[1, False].compute Minimum . should_fail_with Incomparable_Values_Error
[1, False].compute Statistic.Minimum . should_fail_with Incomparable_Values_Error
Test.group "Rank Data" <|
Test.specify "can rank a Decimal data series" <|
values = [409.892906, 0.839952, 796.468572, 126.931298, -405.265005, -476.675817, 441.651325, 796.468572, 78.50094, 340.163324, 234.861926, 409.892906, 226.467105, 234.861926, 126.931298, 637.870512, -71.008044, -386.399663, -126.534337, -476.675817, 78.50094, -386.399663, 409.892906, 868.54485, 669.113037, 669.113037, 0.839952, 407.162613, -476.675817, 126.931298]
rank_data values . should_equal [9, 21.5, 2.5, 17, 27, 29, 7, 2.5, 19.5, 12, 13.5, 9, 15, 13.5, 17, 6, 23, 25.5, 24, 29, 19.5, 25.5, 9, 1, 4.5, 4.5, 21.5, 11, 29, 17]
rank_data values Rank_Method.Minimum . should_equal [8, 21, 2, 16, 27, 28, 7, 2, 19, 12, 13, 8, 15, 13, 16, 6, 23, 25, 24, 28, 19, 25, 8, 1, 4, 4, 21, 11, 28, 16]
rank_data values Rank_Method.Maximum . should_equal [10, 22, 3, 18, 27, 30, 7, 3, 20, 12, 14, 10, 15, 14, 18, 6, 23, 26, 24, 30, 20, 26, 10, 1, 5, 5, 22, 11, 30, 18]
rank_data values Rank_Method.Ordinal . should_equal [8, 21, 2, 16, 27, 28, 7, 3, 19, 12, 13, 9, 15, 14, 17, 6, 23, 25, 24, 29, 20, 26, 10, 1, 4, 5, 22, 11, 30, 18]
rank_data values Rank_Method.Dense . should_equal [6, 13, 2, 11, 17, 18, 5, 2, 12, 8, 9, 6, 10, 9, 11, 4, 14, 16, 15, 18, 12, 16, 6, 1, 3, 3, 13, 7, 18, 11]
Statistic.rank_data values . should_equal [9, 21.5, 2.5, 17, 27, 29, 7, 2.5, 19.5, 12, 13.5, 9, 15, 13.5, 17, 6, 23, 25.5, 24, 29, 19.5, 25.5, 9, 1, 4.5, 4.5, 21.5, 11, 29, 17]
Statistic.rank_data values Rank_Method.Minimum . should_equal [8, 21, 2, 16, 27, 28, 7, 2, 19, 12, 13, 8, 15, 13, 16, 6, 23, 25, 24, 28, 19, 25, 8, 1, 4, 4, 21, 11, 28, 16]
Statistic.rank_data values Rank_Method.Maximum . should_equal [10, 22, 3, 18, 27, 30, 7, 3, 20, 12, 14, 10, 15, 14, 18, 6, 23, 26, 24, 30, 20, 26, 10, 1, 5, 5, 22, 11, 30, 18]
Statistic.rank_data values Rank_Method.Ordinal . should_equal [8, 21, 2, 16, 27, 28, 7, 3, 19, 12, 13, 9, 15, 14, 17, 6, 23, 25, 24, 29, 20, 26, 10, 1, 4, 5, 22, 11, 30, 18]
Statistic.rank_data values Rank_Method.Dense . should_equal [6, 13, 2, 11, 17, 18, 5, 2, 12, 8, 9, 6, 10, 9, 11, 4, 14, 16, 15, 18, 12, 16, 6, 1, 3, 3, 13, 7, 18, 11]
Test.specify "can rank an Integer data series" <|
values = [10, 1, 124, 10]
rank_data values . should_equal [2.5, 4, 1, 2.5]
Statistic.rank_data values . should_equal [2.5, 4, 1, 2.5]
Test.specify "can rank a Number data series" <|
values = [10.0, 1, 12.4, 10]
rank_data values . should_equal [2.5, 4, 1, 2.5]
Statistic.rank_data values . should_equal [2.5, 4, 1, 2.5]
Test.specify "can rank a Text data series" <|
values = ["G", "AA", "B", "G", "D"]
rank_data values . should_equal [1.5, 5, 4, 1.5, 3]
Statistic.rank_data values . should_equal [1.5, 5, 4, 1.5, 3]
Test.specify "should fail with Incomparable_Values_Error on custom type without compare_to" <|
values = [No_Ord.Value 10, No_Ord.Value 2, No_Ord.Value 9]
rank_data values . should_fail_with Incomparable_Values_Error
Statistic.rank_data values . should_fail_with Incomparable_Values_Error
Test.specify "should fail with Incomparable_Values_Error on mixed Vectors" <|
rank_data [1, "A"] . should_fail_with Incomparable_Values_Error
Statistic.rank_data [1, "A"] . should_fail_with Incomparable_Values_Error
Test.specify "should fail with Illegal_Argument_Error on Vectors with Nothing" <|
rank_data [1, Nothing, 4] . should_fail_with Illegal_Argument_Error_Data
Statistic.rank_data [1, Nothing, 4] . should_fail_with Illegal_Argument_Error_Data
Test.group "Correlation Statistics" <|
series_a = [0.22345,0.258315,0.74663,Nothing,0.686843,0.692246,Nothing,0.401859,0.725442,Nothing,0.963527,0.520363,0.633053,0.397123,Nothing,0.458942,0.036499,0.368194,0.598939,0.296476,0.093746,0.609329]
@ -195,27 +194,27 @@ spec =
series = [series_a, series_b, series_c]
Test.specify "can compute Covariance, Correlation and R Squared between a pair of series" <|
series_a.compute (Covariance series_b) . should_equal -0.0053554 epsilon=double_error
series_a.compute (Pearson series_b) . should_equal -0.08263943 epsilon=double_error
series_a.compute (Spearman series_b) . should_equal -0.09313725 epsilon=double_error
series_a.compute (R_Squared series_b) . should_equal 0.006829275 epsilon=double_error
series_a.compute (Statistic.Covariance series_b) . should_equal -0.0053554 epsilon=double_error
series_a.compute (Statistic.Pearson series_b) . should_equal -0.08263943 epsilon=double_error
series_a.compute (Statistic.Spearman series_b) . should_equal -0.09313725 epsilon=double_error
series_a.compute (Statistic.R_Squared series_b) . should_equal 0.006829275 epsilon=double_error
Test.specify "can calculate a covariance matrix" <|
matrix = covariance_matrix series
matrix = Statistic.covariance_matrix series
matrix.length . should_equal 3
vector_compare (matrix.at 0) [0.0571699, -0.0053554, -0.02378204]
vector_compare (matrix.at 1) [-0.0053554, 0.07707381, -0.00098274]
vector_compare (matrix.at 2) [-0.02378204, -0.00098274, 0.05837098]
Test.specify "can calculate a pearson correlation matrix" <|
matrix = pearson_correlation series
matrix = Statistic.pearson_correlation series
matrix.length . should_equal 3
vector_compare (matrix.at 0) [1, -0.08263943, -0.40469045]
vector_compare (matrix.at 1) [-0.08263943, 1, -0.01537537]
vector_compare (matrix.at 2) [-0.40469045, -0.01537537, 1]
Test.specify "can calculate a spearman rank correlation matrix" <|
matrix = spearman_correlation series
matrix = Statistic.spearman_correlation series
matrix.length . should_equal 3
vector_compare (matrix.at 0) [1, -0.09313725, -0.43382353]
vector_compare (matrix.at 1) [-0.09313725, 1, 0]
@ -223,28 +222,28 @@ spec =
Test.specify "should fail with Illegal_Argument_Error if different lengths" <|
data = [[1,2,3,4],[10,20,30]]
data.first.compute (Covariance data.second) . should_fail_with Illegal_Argument_Error_Data
data.first.compute (Pearson data.second) . should_fail_with Illegal_Argument_Error_Data
data.first.compute (Spearman data.second) . should_fail_with Illegal_Argument_Error_Data
data.first.compute (R_Squared data.second) . should_fail_with Illegal_Argument_Error_Data
covariance_matrix data . should_fail_with Illegal_Argument_Error_Data
pearson_correlation data . should_fail_with Illegal_Argument_Error_Data
spearman_correlation data . should_fail_with Illegal_Argument_Error_Data
data.first.compute (Statistic.Covariance data.second) . should_fail_with Illegal_Argument_Error_Data
data.first.compute (Statistic.Pearson data.second) . should_fail_with Illegal_Argument_Error_Data
data.first.compute (Statistic.Spearman data.second) . should_fail_with Illegal_Argument_Error_Data
data.first.compute (Statistic.R_Squared data.second) . should_fail_with Illegal_Argument_Error_Data
Statistic.covariance_matrix data . should_fail_with Illegal_Argument_Error_Data
Statistic.pearson_correlation data . should_fail_with Illegal_Argument_Error_Data
Statistic.spearman_correlation data . should_fail_with Illegal_Argument_Error_Data
Test.specify "should fail with Illegal_Argument_Error if not number based" <|
text = [["A","BC","CD"], ["0", "1", "2"], ["H", "I", "J"]]
text.first.compute (Covariance text.second) . should_fail_with Illegal_Argument_Error_Data
text.first.compute (Pearson text.second) . should_fail_with Illegal_Argument_Error_Data
text.first.compute (Spearman text.second) . should_fail_with Illegal_Argument_Error_Data
text.first.compute (R_Squared text.second) . should_fail_with Illegal_Argument_Error_Data
covariance_matrix text . should_fail_with Illegal_Argument_Error_Data
pearson_correlation text . should_fail_with Illegal_Argument_Error_Data
spearman_correlation text . should_fail_with Illegal_Argument_Error_Data
text.first.compute (Statistic.Covariance text.second) . should_fail_with Illegal_Argument_Error_Data
text.first.compute (Statistic.Pearson text.second) . should_fail_with Illegal_Argument_Error_Data
text.first.compute (Statistic.Spearman text.second) . should_fail_with Illegal_Argument_Error_Data
text.first.compute (Statistic.R_Squared text.second) . should_fail_with Illegal_Argument_Error_Data
Statistic.covariance_matrix text . should_fail_with Illegal_Argument_Error_Data
Statistic.pearson_correlation text . should_fail_with Illegal_Argument_Error_Data
Statistic.spearman_correlation text . should_fail_with Illegal_Argument_Error_Data
Test.group "Statistics - invalid input" <|
Test.specify "should fail with Illegal_Argument_Error on number based statistics for text Vector" <|
series = [["A", "B", Nothing, "D"], ["A", "B", Nothing, "D"]]
covariance_matrix series . should_fail_with Illegal_Argument_Error_Data
pearson_correlation series . should_fail_with Illegal_Argument_Error_Data
Statistic.covariance_matrix series . should_fail_with Illegal_Argument_Error_Data
Statistic.pearson_correlation series . should_fail_with Illegal_Argument_Error_Data
main = Test_Suite.run_main spec

View File

@ -57,8 +57,8 @@ spec = Test.group 'Matching Helper' <|
Problems.test_problem_handling action_2 problems_2 tester
Test.specify 'should correctly work with complex object using a function extracting their names' <|
pairs = [Pair_Data "foo" 42, Pair_Data "bar" 33, Pair_Data "baz" 10, Pair_Data "foo" 0, Pair_Data 10 10]
selected = [Pair_Data "bar" 33, Pair_Data "foo" 42, Pair_Data "foo" 0]
pairs = [Pair.Value "foo" 42, Pair.Value "bar" 33, Pair.Value "baz" 10, Pair.Value "foo" 0, Pair.Value 10 10]
selected = [Pair.Value "bar" 33, Pair.Value "foo" 42, Pair.Value "foo" 0]
Text_Matcher.Case_Sensitive.match_criteria pairs ["bar", "foo"] reorder=True name_mapper=_.first . should_equal selected
Text_Matcher.Case_Sensitive.match_criteria [1, 2, 3] ["2"] name_mapper=_.to_text . should_equal [2]

View File

@ -1,9 +1,5 @@
from Standard.Base import all
import Standard.Base.Data.Text.Text_Sub_Range
import Standard.Base.Data.Time.Date_Period
import Standard.Base.Data.Time.Duration
import Standard.Base.Data.Time.Period
from Standard.Base.Error.Common import Time_Error
from Standard.Test import Test, Test_Suite
@ -94,32 +90,32 @@ spec_with name create_new_date parse_date =
Json.from_pairs [["type", "Date"], ["day", date.day], ["month", date.month], ["year", date.year]]
Test.specify "should add date-based interval" <|
date = create_new_date 1970 + (Period.days 1)
date = create_new_date 1970 + (Period.new days=1)
date . year . should_equal 1970
date . month . should_equal 1
date . day . should_equal 2
Test.specify "should subtract date-based interval" <|
date = create_new_date 1970 - (Period.years 1)
date = create_new_date 1970 - (Period.new years=1)
date . year . should_equal 1969
date . month . should_equal 1
date . day . should_equal 1
Test.specify "should support mixed interval operators" <|
date = create_new_date 1970 + (Period.months 1) - (Period.years 1)
date = create_new_date 1970 + (Period.new months=1) - (Period.new years=1)
date . year . should_equal 1969
date . month . should_equal 2
date . day . should_equal 1
Test.specify "should throw error when adding time-based Duration" <|
case (create_new_date 1970 + (Duration.hours 1)) . catch of
case (create_new_date 1970 + (Duration.new hours=1)) . catch of
Time_Error_Data message ->
message . should_equal "Date does not support adding/subtracting Duration. Use Period instead."
result ->
Test.fail ("Unexpected result: " + result.to_text)
Test.specify "should throw error when subtracting time-based Duration" <|
case (create_new_date 1970 - (Duration.minutes 1)) . catch of
case (create_new_date 1970 - (Duration.new minutes=1)) . catch of
Time_Error_Data message ->
message . should_equal "Date does not support adding/subtracting Duration. Use Period instead."
result ->
@ -173,8 +169,8 @@ spec_with name create_new_date parse_date =
new_date = create_new_date 2020 6 1
parsed_date = parse_date "2021-01-02"
Meta.type_of new_date . should_equal_type Date.Date
Meta.type_of parsed_date . should_equal_type Date.Date
Meta.type_of new_date . should_equal_type Date
Meta.type_of parsed_date . should_equal_type Date
Test.specify "should allow to find start and end of a Date_Period containing the current date" <|
d1 = create_new_date 2022 9 12
@ -376,7 +372,7 @@ spec_with name create_new_date parse_date =
Test.specify "should allow to shift the date by N working days, skipping custom holidays" <|
all_year_holiday year =
first_day = create_new_date year 1 1
Vector.new first_day.length_of_year (n -> first_day + (Period.days n))
Vector.new first_day.length_of_year (n -> first_day + (Period.new days=n))
two_years_vacation = all_year_holiday 1999 + all_year_holiday 2000

View File

@ -1,5 +1,4 @@
from Standard.Base import all
from Standard.Base.Data.Time import Duration, Period, Date_Period, Time_Period
from Standard.Test import Test, Test_Suite
@ -229,7 +228,7 @@ spec_with name create_new_datetime parse_datetime nanoseconds_loss_in_precision=
time . day . should_equal 1
Test.specify "should add time interval" <|
time = create_new_datetime 1970 (zone = Time_Zone.utc) + (Duration.nanoseconds 1)
time = create_new_datetime 1970 (zone = Time_Zone.utc) + (Duration.new nanoseconds=1)
time . year . should_equal 1970
time . month . should_equal 1
time . day . should_equal 1
@ -240,7 +239,7 @@ spec_with name create_new_datetime parse_datetime nanoseconds_loss_in_precision=
time . zone . should_equal Time_Zone.utc
Test.specify "should add date interval" <|
time = (create_new_datetime 1970 (zone = Time_Zone.utc)) + (Period.months 1)
time = (create_new_datetime 1970 (zone = Time_Zone.utc)) + (Period.new months=1)
time . year . should_equal 1970
time . month . should_equal 2
time . day . should_equal 1
@ -251,7 +250,7 @@ spec_with name create_new_datetime parse_datetime nanoseconds_loss_in_precision=
time . zone . zone_id . should_equal Time_Zone.utc.zone_id
Test.specify "should add mixed date time interval" <|
time = create_new_datetime 1970 (zone = Time_Zone.utc) + (Period.months 1) + (Duration.hours 3)
time = create_new_datetime 1970 (zone = Time_Zone.utc) + (Period.new months=1) + (Duration.new hours=3)
time . year . should_equal 1970
time . month . should_equal 2
time . day . should_equal 1
@ -262,7 +261,7 @@ spec_with name create_new_datetime parse_datetime nanoseconds_loss_in_precision=
time . zone . zone_id . should_equal Time_Zone.utc.zone_id
Test.specify "should subtract time interval" <|
time = create_new_datetime 1970 (zone = Time_Zone.utc) - (Duration.hours 1)
time = create_new_datetime 1970 (zone = Time_Zone.utc) - (Duration.new hours=1)
time . year . should_equal 1969
time . month . should_equal 12
time . day . should_equal 31
@ -273,7 +272,7 @@ spec_with name create_new_datetime parse_datetime nanoseconds_loss_in_precision=
time . zone . zone_id . should_equal Time_Zone.utc.zone_id
Test.specify "should subtract date interval" <|
time = create_new_datetime 1970 (zone = Time_Zone.utc) - (Period.months 1)
time = create_new_datetime 1970 (zone = Time_Zone.utc) - (Period.new months=1)
time . year . should_equal 1969
time . month . should_equal 12
time . day . should_equal 1
@ -284,7 +283,7 @@ spec_with name create_new_datetime parse_datetime nanoseconds_loss_in_precision=
time . zone . zone_id . should_equal Time_Zone.utc.zone_id
Test.specify "should subtract mixed date time interval" <|
time = create_new_datetime 1970 (zone = Time_Zone.utc) - (Period.months 1) - (Duration.hours 3)
time = create_new_datetime 1970 (zone = Time_Zone.utc) - (Period.new months=1) - (Duration.new hours=3)
time . year . should_equal 1969
time . month . should_equal 11
time . day . should_equal 30
@ -295,7 +294,7 @@ spec_with name create_new_datetime parse_datetime nanoseconds_loss_in_precision=
time . zone . zone_id . should_equal Time_Zone.utc.zone_id
Test.specify "should support mixed interval operators" <|
time = create_new_datetime 1970 (zone = Time_Zone.utc) - (Period.months 1) + (Duration.hours 12)
time = create_new_datetime 1970 (zone = Time_Zone.utc) - (Period.new months=1) + (Duration.new hours=12)
time . year . should_equal 1969
time . month . should_equal 12
time . day . should_equal 1
@ -348,8 +347,8 @@ spec_with name create_new_datetime parse_datetime nanoseconds_loss_in_precision=
new_datetime = create_new_datetime 2020 6 1 10 0 0
parsed_datetime = parse_datetime "2021-02-01T00:30:12.7102[UTC]"
Meta.type_of new_datetime . should_equal_type Date_Time.Date_Time
Meta.type_of parsed_datetime . should_equal_type Date_Time.Date_Time
Meta.type_of new_datetime . should_equal_type Date_Time
Meta.type_of parsed_datetime . should_equal_type Date_Time
max_nanos = 999999999
Test.specify "should allow to find start/end of a Date_Period containing the current datetime" <|
@ -448,7 +447,7 @@ spec_with name create_new_datetime parse_datetime nanoseconds_loss_in_precision=
Test.specify "should find start/end of a Date_Period or Time_Period containing the current datetime correctly near the spring DST switch" pending=js_dst_pending <|
d1 = create_new_datetime 2022 3 27 1 34 15 0 tz
d2 = create_new_datetime 2022 3 27 3 34 15 0 tz
d1_plus = d1 + (Duration.hours 1)
d1_plus = d1 + (Duration.new hours=1)
d1_plus . should_equal d2
check_dates_spring date =
@ -480,7 +479,7 @@ spec_with name create_new_datetime parse_datetime nanoseconds_loss_in_precision=
dst_overlap_message = "We cannot correctly migrate the datetime inside of the timeline overlap through the polyglot boundar - as due to polyglot conversion limitation, always the earlier one is chosen. See the bug report: https://github.com/oracle/graal/issues/4918"
Test.specify "should find start/end of a Date_Period or Time_Period containing the current datetime correctly near the autumn DST switch" pending=dst_overlap_message <|
d3 = create_new_datetime 2022 10 30 2 30 15 0 tz
d4 = d3 + (Duration.hours 1)
d4 = d3 + (Duration.new hours=1)
d3.hour . should_equal 2
d4.hour . should_equal 2
@ -554,7 +553,7 @@ spec_with name create_new_datetime parse_datetime nanoseconds_loss_in_precision=
Test.specify "should handle shifting dates around autumn DST edge cases" pending=dst_overlap_message <|
d3 = create_new_datetime 2022 10 30 2 30 15 0 tz
d4 = d3 + (Duration.hours 1)
d4 = d3 + (Duration.new hours=1)
# TODO we need to check and document the actual behaviour once it is expressible, it may be equally acceptable to shift to 3:30 instead of 2:30.
d4 . add_work_days 0 . should_equal (Date_Time.new 2022 10 31 2 30 15 0 tz)

View File

@ -1,7 +1,5 @@
from Standard.Base import all
import Standard.Base.Data.Time.Duration
import Standard.Base.Data.Time.Period
from Standard.Test import Test, Test_Suite
polyglot java import java.time.Duration as Java_Duration
@ -25,7 +23,7 @@ spec =
Test.group "Duration" <|
Test.specify "should create interval seconds" <|
duration = (Duration.seconds 5)
duration = (Duration.new seconds=5)
duration.seconds . should_equal 5
duration.milliseconds . should_equal 0
@ -39,26 +37,26 @@ spec =
interval.is_empty . should_be_true
Test.specify "should normalize periods" <|
(Duration.seconds 60).total_minutes . should_equal 1
(Duration.milliseconds 1000).total_seconds . should_equal 1
(Duration.new seconds=60).total_minutes . should_equal 1
(Duration.new milliseconds=1000).total_seconds . should_equal 1
Test.specify "should normalize addition" <|
duration = (Duration.hours 11) + (Duration.hours 1)
duration = (Duration.new hours=11) + (Duration.new hours=1)
duration.hours . should_equal 12
Test.specify "should normalize subtraction" <|
duration = (Duration.hours 13) - (Duration.hours 1)
duration = (Duration.new hours=13) - (Duration.new hours=1)
duration.hours . should_equal 12
Test.specify "should convert to Json" <|
interval = (Duration.nanoseconds 120) + (Duration.seconds 30) + (Duration.hours 14)
interval = (Duration.new nanoseconds=120) + (Duration.new seconds=30) + (Duration.new hours=14)
interval.to_json.should_equal <|
duration_pairs = [["nanoseconds", interval.nanoseconds], ["seconds", interval.seconds], ["hours", interval.hours]]
Json.from_pairs ([["type", "Duration"]] + duration_pairs)
Test.specify "should be comparable" <|
duration_1 = (Duration.hours 5)
duration_2 = (Duration.minutes 1)
duration_1 = (Duration.new hours=5)
duration_2 = (Duration.new minutes=1)
duration_1.compare_to duration_1 . should_equal Ordering.Equal
duration_1==duration_1 . should_be_true
duration_1!=duration_2 . should_be_true
@ -66,8 +64,8 @@ spec =
duration_1<duration_2 . should_be_false
Test.specify "should not mix Duration and Period" <|
durations = [(Duration.hours 1), (Duration.zero), (Duration.new hours=1 seconds=30)]
periods = [(Period.days 1), (Period.new 0), (Period.years 30), (Period.new years=3 months=2)]
durations = [(Duration.new hours=1), (Duration.zero), (Duration.new hours=1 seconds=30)]
periods = [(Period.new days=1), (Period.new 0), (Period.new years=30), (Period.new years=3 months=2)]
durations.each duration->
periods.each period->
(duration + period).should_fail_with Time_Error_Data
@ -78,15 +76,15 @@ spec =
(duration < period).should_fail_with Time_Error_Data
Test.specify "Date_Time supports adding and subtracting Duration" <|
((Date_Time.new 2022 10 1 hour=10) + (Duration.hours 2)) . should_equal (Date_Time.new 2022 10 1 hour=12)
((Date_Time.new 2022 10 1 hour=10) - (Duration.hours 2)) . should_equal (Date_Time.new 2022 10 1 hour=8)
((Date_Time.new 2022 10 2) - (Duration.hours 24)) . should_equal (Date_Time.new 2022 10 1)
((Date_Time.new 2022 10 1 hour=2) - (Duration.minutes 3)) . should_equal (Date_Time.new 2022 10 1 hour=1 minute=57)
((Date_Time.new 2022 10 1 hour=10) + (Duration.new hours=2)) . should_equal (Date_Time.new 2022 10 1 hour=12)
((Date_Time.new 2022 10 1 hour=10) - (Duration.new hours=2)) . should_equal (Date_Time.new 2022 10 1 hour=8)
((Date_Time.new 2022 10 2) - (Duration.new hours=24)) . should_equal (Date_Time.new 2022 10 1)
((Date_Time.new 2022 10 1 hour=2) - (Duration.new minutes=3)) . should_equal (Date_Time.new 2022 10 1 hour=1 minute=57)
Test.specify "Java Duration is equal to Enso Duration" <|
(Duration.hours 1) . should_equal (Java_Duration.ofHours 1)
(Duration.minutes 80) . should_equal (Java_Duration.ofMinutes 80)
(Java_Duration.ofSeconds 30) . should_equal (Duration.seconds 30)
(Duration.new hours=1) . should_equal (Java_Duration.ofHours 1)
(Duration.new minutes=80) . should_equal (Java_Duration.ofMinutes 80)
(Java_Duration.ofSeconds 30) . should_equal (Duration.new seconds=30)
Test.specify "Difference of Java Date and Enso date should be an Enso Duration" <|
(Duration.between (java_datetime 2022 01 01) (Date_Time.new 2022 01 02) timezone_aware=False).total_hours . should_equal 24

View File

@ -1,43 +1,42 @@
from Standard.Base import all
from Standard.Test import Test, Test_Suite
from Standard.Base.Data.Time import Period
spec =
Test.group "Period" <|
Test.specify "should create period years" <|
period = (Period.years 5)
period = (Period.new years=5)
period.years . should_equal 5
period.days . should_equal 0
Test.specify "should add two Periods" <|
((Period.years 1) + (Period.years 2)).years . should_equal 3
((Period.days 1) + (Period.months 2)).days . should_equal 1
((Period.days 1) + (Period.months 2)).months . should_equal 2
((Period.months 2) + (Period.days 1)).days . should_equal 1
((Period.months 2) + (Period.days 1)).months . should_equal 2
((Period.new years=1) + (Period.new years=2)).years . should_equal 3
((Period.new days=1) + (Period.new months=2)).days . should_equal 1
((Period.new days=1) + (Period.new months=2)).months . should_equal 2
((Period.new months=2) + (Period.new days=1)).days . should_equal 1
((Period.new months=2) + (Period.new days=1)).months . should_equal 2
Test.specify "should subtract two Periods" <|
((Period.years 2) - (Period.years 1)).years . should_equal 1
((Period.years 1) - (Period.months 2)).months . should_equal (-2)
((Period.years 1) - (Period.months 2)).years . should_equal 1
((Period.new years=2) - (Period.new years=1)).years . should_equal 1
((Period.new years=1) - (Period.new months=2)).months . should_equal (-2)
((Period.new years=1) - (Period.new months=2)).years . should_equal 1
Test.specify "should get Period between two dates" <|
(Period.between (Date.new year=100) (Date.new year=150)) . should_equal (Period.years 50)
(Period.between (Date.new year=100) (Date.new year=150)) . should_equal (Period.new years=50)
(Period.between (Date.new year=150) (Date.new year=100)) . should_equal (Period.new years=(-50))
(Period.between (Date.new 2022 10 19) (Date.new 2022 11 01)) . should_equal (Period.days 13)
(Period.between (Date.new 2022 10 19) (Date.new 2022 11 01)) . should_equal (Period.new days=13)
Test.specify "should not compare between two periods" <|
((Period.days 10) > (Period.days 1)) . should_fail_with Incomparable_Values_Error
((Period.years 10) > (Period.days 1)) . should_fail_with Incomparable_Values_Error
((Period.new years=10 months=3) > (Period.months 5)) . should_fail_with Incomparable_Values_Error
((Period.new days=10) > (Period.new days=1)) . should_fail_with Incomparable_Values_Error
((Period.new years=10) > (Period.new days=1)) . should_fail_with Incomparable_Values_Error
((Period.new years=10 months=3) > (Period.new months=5)) . should_fail_with Incomparable_Values_Error
Test.specify "two Periods are equal iff their fields are equal" <|
((Period.days 1) == (Period.days 1)) . should_be_true
((Period.months 12) == (Period.years 1)) . should_be_false
((Period.months 3) == (Period.months 3)) . should_be_true
((Period.days (-5)) == (Period.days (-5))) . should_be_true
((Period.new days=1) == (Period.new days=1)) . should_be_true
((Period.new months=12) == (Period.new years=1)) . should_be_false
((Period.new months=3) == (Period.new months=3)) . should_be_true
((Period.new days=(-5)) == (Period.new days=(-5))) . should_be_true
((Period.new years=1 days=10) == (Period.new years=1 days=10)) . should_be_true
((Period.days 1) != (Period.months 1)) . should_be_true
((Period.new days=1) != (Period.new months=1)) . should_be_true
main = Test_Suite.run_main spec

View File

@ -1,5 +1,4 @@
from Standard.Base import all
from Standard.Base.Data.Time import Duration, Period, Time_Period
from Standard.Base.Error.Common import Time_Error_Data
from Standard.Test import Test, Test_Suite
@ -82,26 +81,26 @@ specWith name create_new_time parse_time =
datetime . zone . zone_id . should_equal Time_Zone.utc.zone_id
Test.specify "should add time-based interval" <|
time = create_new_time 0 + (Duration.minutes 1)
time = create_new_time 0 + (Duration.new minutes=1)
time . to_seconds . should_equal 60
Test.specify "should subtract time-based interval" <|
time = create_new_time 0 - (Duration.minutes 1)
time = create_new_time 0 - (Duration.new minutes=1)
time . to_seconds . should_equal 86340
Test.specify "should support mixed interval operators" <|
time = create_new_time 0 + (Duration.hours 1) - (Duration.seconds 1)
time = create_new_time 0 + (Duration.new hours=1) - (Duration.new seconds=1)
time . to_seconds . should_equal 3599
Test.specify "should throw error when adding date-based interval" <|
case (create_new_time 0 + (Period.days 1)) . catch of
case (create_new_time 0 + (Period.new days=1)) . catch of
Time_Error_Data message ->
message . should_equal "Time_Of_Day does not support date intervals (periods)"
result ->
Test.fail ("Unexpected result: " + result.to_text)
Test.specify "should throw error when subtracting date-based interval" <|
case (create_new_time 0 - (Period.days 1)) . catch of
case (create_new_time 0 - (Period.new days=1)) . catch of
Time_Error_Data message ->
message . should_equal "Time_Of_Day does not support date intervals (periods)"
result ->
@ -122,8 +121,8 @@ specWith name create_new_time parse_time =
new_timeofday = create_new_time 15 37 58
parsed_timeofday = parse_time "10:00:00"
Meta.type_of new_timeofday . should_equal_type Time_Of_Day.Time_Of_Day
Meta.type_of parsed_timeofday . should_equal_type Time_Of_Day.Time_Of_Day
Meta.type_of new_timeofday . should_equal_type Time_Of_Day
Meta.type_of parsed_timeofday . should_equal_type Time_Of_Day
max_nanos = 999999999
Test.specify "should allow to find start/end of a Time_Period containing the current time of day" <|

View File

@ -39,7 +39,7 @@ spec =
Test.fail ("Unexpected result: " + result.to_text)
Test.specify "should correctly determine the type of zone" <|
zone = Time_Zone.parse "Europe/Warsaw"
Meta.type_of zone . should_equal_type Time_Zone.Time_Zone
Meta.type_of zone . should_equal_type Time_Zone
Test.group "JavaZoneId" <|
Test.specify "should get system zone id" <|
defaultZone = ZoneId.systemDefault
@ -67,6 +67,6 @@ spec =
Json.from_pairs [["type", "Time_Zone"], ["id", "UTC"]]
Test.specify "should correctly determine the type of zone" <|
zone = ZoneId.systemDefault
Meta.type_of zone . should_equal_type Time_Zone.Time_Zone
Meta.type_of zone . should_equal_type Time_Zone
main = Test_Suite.run_main spec

View File

@ -219,11 +219,11 @@ spec = Test.group "Vectors" <|
([1, 2, 3, 4].filter_with_index ix-> _-> if ix == 1 then Error.throw <| My_Error.Value "foo" else True) . should_fail_with My_Error.Value
Test.specify "should partition elements" <|
[1, 2, 3, 4, 5].partition (x -> x % 2 == 0) . should_equal <| Pair_Data [2, 4] [1, 3, 5]
[1, 2, 3, 4, 5].partition (x -> x % 2 == 0) . should_equal <| Pair.Value [2, 4] [1, 3, 5]
([1, 2, 3, 4].partition x-> if x == 1 then Error.throw <| My_Error.Value "foo" else True) . should_fail_with My_Error.Value
Test.specify "should partition elements with indices" <|
["a", "b", "c", "d"].partition_with_index (ix -> _ -> ix % 2 == 0) == (Pair_Data ["a", "c"] ["b", "d"])
["a", "b", "c", "d"].partition_with_index (ix -> _ -> ix % 2 == 0) == (Pair.Value ["a", "c"] ["b", "d"])
["a", "b", "c", "d"].partition_with_index (ix -> _ -> if ix % 2 == 0 then Error.throw <| My_Error.Value "foo" else True) . should_fail_with My_Error.Value
Test.specify "should allow to join a vector of text elements to form a single text" <|
@ -575,13 +575,13 @@ spec = Test.group "Vectors" <|
Test.specify "should throw a clean error for incomparable types" <|
["a", 2].distinct . should_fail_with Incomparable_Values_Error
[2, "a", Integer, "a", 2].distinct . should_fail_with Incomparable_Values_Error
[Pair_Data 1 2, Pair_Data 3 4].distinct . should_fail_with Incomparable_Values_Error
[Pair.Value 1 2, Pair.Value 3 4].distinct . should_fail_with Incomparable_Values_Error
Test.specify "should correctly handle distinct with custom types like Atoms that implement compare_to" <|
[T.Value 1 2, T.Value 3 3, T.Value 1 2].distinct . should_equal [T.Value 1 2, T.Value 3 3]
Test.specify "should return a vector containing only unique elements up to some criteria" <|
[Pair_Data 1 "a", Pair_Data 2 "b", Pair_Data 1 "c"] . distinct (on = _.first) . should_equal [Pair_Data 1 "a", Pair_Data 2 "b"]
[Pair.Value 1 "a", Pair.Value 2 "b", Pair.Value 1 "c"] . distinct (on = _.first) . should_equal [Pair.Value 1 "a", Pair.Value 2 "b"]
Test.specify "should be able to sort a polyglot vector" <|
input = "beta".utf_8

View File

@ -1,7 +1,5 @@
from Standard.Base import all
import Standard.Base.Data.Time.Duration
import Standard.Base.Network.Http
import Standard.Base.Network.Http.Form
import Standard.Base.Network.Http.Header
@ -30,8 +28,8 @@ spec =
url_get = base_url_with_slash + "get"
url_post = base_url_with_slash + "post"
Test.specify "should create HTTP client with timeout setting" <|
http = Http.new (timeout = (Duration.seconds 30))
http.timeout.should_equal (Duration.seconds 30)
http = Http.new (timeout = (Duration.new seconds=30))
http.timeout.should_equal (Duration.new seconds=30)
Test.specify "should create HTTP client with follow_redirects setting" <|
http = Http.new (follow_redirects = False)
http.follow_redirects.should_equal False

View File

@ -132,48 +132,48 @@ spec = Test.group "Pattern Matches" <|
new_zone = Time_Zone.system
case new_date of
Date_Time.Date_Time -> Test.fail "Expected date value to match Date type."
Time_Of_Day.Time_Of_Day -> Test.fail "Expected date value to match Date type."
Time_Zone.Time_Zone -> Test.fail "Expected date value to match Date type."
Date.Date -> Test.fail "Expected date value to match Date type."
_ : Date_Time.Date_Time -> Test.fail "Expected date value to match Date type."
_ : Time_Of_Day.Time_Of_Day -> Test.fail "Expected date value to match Date type."
_ : Time_Zone.Time_Zone -> Test.fail "Expected date value to match Date type."
_ : Date.Date -> Nothing
Date_Time -> Test.fail "Expected date value to match Date type."
Time_Of_Day -> Test.fail "Expected date value to match Date type."
Time_Zone -> Test.fail "Expected date value to match Date type."
Date -> Test.fail "Expected date value to match Date type."
_ : Date_Time -> Test.fail "Expected date value to match Date type."
_ : Time_Of_Day -> Test.fail "Expected date value to match Date type."
_ : Time_Zone -> Test.fail "Expected date value to match Date type."
_ : Date -> Nothing
_ -> Test.fail "Expected date value to match Date type."
case new_date_time of
Date.Date -> Test.fail "Expected datetime value to match Date_Time type."
Time_Of_Day.Time_Of_Day -> Test.fail "Expected datetime value to match Date_Time type."
Time_Zone.Time_Zone -> Test.fail "Expected datetime value to match Date_Time type."
Date_Time.Date_Time -> Test.fail "Expected datetime value to match Date_Time type."
_ : Date.Date -> Test.fail "Expected datetime value to match Date_Time type."
_ : Time_Of_Day.Time_Of_Day -> Test.fail "Expected datetime value to match Date_Time type."
_ : Time_Zone.Time_Zone -> Test.fail "Expected datetime value to match Date_Time type."
_ : Date_Time.Date_Time -> Nothing
Date -> Test.fail "Expected datetime value to match Date_Time type."
Time_Of_Day -> Test.fail "Expected datetime value to match Date_Time type."
Time_Zone -> Test.fail "Expected datetime value to match Date_Time type."
Date_Time -> Test.fail "Expected datetime value to match Date_Time type."
_ : Date -> Test.fail "Expected datetime value to match Date_Time type."
_ : Time_Of_Day -> Test.fail "Expected datetime value to match Date_Time type."
_ : Time_Zone -> Test.fail "Expected datetime value to match Date_Time type."
_ : Date_Time -> Nothing
_ -> Test.fail "Expected datetime value to match Date_Time type."
case new_time of
Date.Date -> Test.fail "Expected time value to match Time_Of_Day type."
Date_Time.Date_Time -> Test.fail "Expected time value to match Time_Of_Day type."
Time_Zone.Time_Zone -> Test.fail "Expected time value to match Time_Of_Day type."
Time_Of_Day.Time_Of_Day -> Test.fail "Expected time value to match Time_Of_Day type."
_ : Date.Date -> Test.fail "Expected time value to match Time_Of_Day type."
_ : Date_Time.Date_Time -> Test.fail "Expected time value to match Time_Of_Day type."
_ : Time_Zone.Time_Zone -> Test.fail "Expected time value to match Time_Of_Day type."
_ : Time_Of_Day.Time_Of_Day -> Nothing
Date -> Test.fail "Expected time value to match Time_Of_Day type."
Date_Time -> Test.fail "Expected time value to match Time_Of_Day type."
Time_Zone -> Test.fail "Expected time value to match Time_Of_Day type."
Time_Of_Day -> Test.fail "Expected time value to match Time_Of_Day type."
_ : Date -> Test.fail "Expected time value to match Time_Of_Day type."
_ : Date_Time -> Test.fail "Expected time value to match Time_Of_Day type."
_ : Time_Zone -> Test.fail "Expected time value to match Time_Of_Day type."
_ : Time_Of_Day -> Nothing
_ -> Test.fail "Expected time value to match Time_Of_Day type."
case new_zone of
Date.Date -> Test.fail "Expected timezone value to match Time_Zone type."
Date_Time.Date_Time -> Test.fail "Expected timezone value to match Time_Zone type."
Time_Of_Day.Time_Of_Day -> Test.fail "Expected timezone value to match Time_Zone type."
Time_Zone.Time_Zone -> Test.fail "Expected timezone value to match Time_Zone type."
_ : Date.Date -> Test.fail "Expected timezone value to match Time_Zone type."
_ : Date_Time.Date_Time -> Test.fail "Expected timezone value to match Time_Zone type."
_ : Time_Of_Day.Time_Of_Day -> Test.fail "Expected timezone value to match Time_Zone type."
_ : Time_Zone.Time_Zone -> Nothing
Date -> Test.fail "Expected timezone value to match Time_Zone type."
Date_Time -> Test.fail "Expected timezone value to match Time_Zone type."
Time_Of_Day -> Test.fail "Expected timezone value to match Time_Zone type."
Time_Zone -> Test.fail "Expected timezone value to match Time_Zone type."
_ : Date -> Test.fail "Expected timezone value to match Time_Zone type."
_ : Date_Time -> Test.fail "Expected timezone value to match Time_Zone type."
_ : Time_Of_Day -> Test.fail "Expected timezone value to match Time_Zone type."
_ : Time_Zone -> Nothing
_ -> Test.fail "Expected timezone value to match Time_Zone type."
Test.specify "should be able to match on literal values" <|

View File

@ -93,13 +93,13 @@ spec = Test.group "Meta-Value Manipulation" <|
Meta.is_a err Error . should_be_true
Meta.is_a err Text . should_be_false
Meta.is_a Date.now Date.Date . should_be_true
Meta.is_a Date_Time.now Date_Time.Date_Time . should_be_true
Meta.is_a Date_Time.now Date.Date . should_be_false
Meta.is_a Time_Of_Day.now Time_Of_Day.Time_Of_Day . should_be_true
Meta.is_a Time_Of_Day.now Date.Date . should_be_false
Meta.is_a Date_Time.now.zone Time_Zone.Time_Zone . should_be_true
Meta.is_a Date_Time.now.zone Date.Date . should_be_false
Meta.is_a Date.now Date . should_be_true
Meta.is_a Date_Time.now Date_Time . should_be_true
Meta.is_a Date_Time.now Date . should_be_false
Meta.is_a Time_Of_Day.now Time_Of_Day . should_be_true
Meta.is_a Time_Of_Day.now Date . should_be_false
Meta.is_a Date_Time.now.zone Time_Zone . should_be_true
Meta.is_a Date_Time.now.zone Date . should_be_false
Test.specify "should allow for returning the type of a value" <|
n_1 = Meta.type_of 42
@ -125,16 +125,16 @@ spec = Test.group "Meta-Value Manipulation" <|
Meta.type_of True . should_equal_type Boolean
Meta.type_of False . should_not_equal_type Any
(Meta.type_of Date.now) . should_equal_type Date.Date
(Meta.type_of Date.now) . should_not_equal_type Date_Time.Date_Time
(Meta.type_of Date_Time.now) . should_equal_type Date_Time.Date_Time
(Meta.type_of Date_Time.now) . should_not_equal_type Date.Date
(Meta.type_of Time_Of_Day.now) . should_equal_type Time_Of_Day.Time_Of_Day
(Meta.type_of Time_Of_Day.now) . should_not_equal_type Date.Date
(Meta.type_of Date_Time.now.zone) . should_equal_type Time_Zone.Time_Zone
(Meta.type_of Date_Time.now.zone) . should_not_equal_type Date.Date
(Meta.type_of Time_Zone.local) . should_equal_type Time_Zone.Time_Zone
(Meta.type_of Time_Zone.system) . should_equal_type Time_Zone.Time_Zone
(Meta.type_of Date.now) . should_equal_type Date
(Meta.type_of Date.now) . should_not_equal_type Date_Time
(Meta.type_of Date_Time.now) . should_equal_type Date_Time
(Meta.type_of Date_Time.now) . should_not_equal_type Date
(Meta.type_of Time_Of_Day.now) . should_equal_type Time_Of_Day
(Meta.type_of Time_Of_Day.now) . should_not_equal_type Date
(Meta.type_of Date_Time.now.zone) . should_equal_type Time_Zone
(Meta.type_of Date_Time.now.zone) . should_not_equal_type Date
(Meta.type_of Time_Zone.local) . should_equal_type Time_Zone
(Meta.type_of Time_Zone.system) . should_equal_type Time_Zone
list = ArrayList.new
list.add 123