mirror of
https://github.com/enso-org/enso.git
synced 2024-12-23 10:42:05 +03:00
Refactor Base_Tests/src/Data to Test_New (#8890)
Refactor `Base_Tests` to `Test_New` testing framework. Mostly automatic text replacements. # Important Notes List of changes that were not done automatically (not via automatic text replacement): - Fix indexes in Instrumentor_Spec -f590c4a398
- If group or spec is pending, its block is not evaluated -8d797f1a4a
- Spec_Result is not private -5767535af2
Tests marked as *pending*: - #8913 - #8910
This commit is contained in:
parent
1ba28a3c5c
commit
bb8ff8f89e
@ -20,7 +20,12 @@ type Group_Builder
|
||||
- pending: Contains a reason for why the test should be ignored. If Nothing, the test
|
||||
is not ignored.
|
||||
specify self (name : Text) (~code : Any) (pending : (Text | Nothing) = Nothing) =
|
||||
self.builder.append <| Spec.Impl name (_ -> code) pending
|
||||
case pending of
|
||||
Nothing ->
|
||||
self.builder.append <| Spec.Impl name (_ -> code) pending
|
||||
pend_reason : Text ->
|
||||
self.builder.append <| Spec.Impl name (_ -> Nothing) pend_reason
|
||||
|
||||
|
||||
## Provide a teardown of a group - a method that will be called once all the tests from the
|
||||
group are finished.
|
||||
|
@ -21,17 +21,21 @@ get_attached_warnings v =
|
||||
- result_checker: A function which should verify that the result generated by
|
||||
the action is correct. It does not return anything, instead it should use
|
||||
the standard testing approach, like `x.should_equal y`.
|
||||
test_problem_handling : (Problem_Behavior -> Any) -> Vector Any -> (Any -> Nothing) -> Nothing
|
||||
test_problem_handling action expected_problems result_checker =
|
||||
- unwrap_errors: If true, remove any wrapping errors from errors and warnings
|
||||
before checking them.
|
||||
test_problem_handling : (Problem_Behavior -> Any) -> Vector Any -> (Any -> Nothing) -> Boolean -> Nothing
|
||||
test_problem_handling action expected_problems result_checker unwrap_errors=True =
|
||||
unwrap_maybe error = if unwrap_errors then Error.unwrap error else error
|
||||
|
||||
error_checker error_result =
|
||||
first_problem = expected_problems.first
|
||||
first_problem_type = Meta.type_of first_problem
|
||||
error_result . should_fail_with first_problem_type frames_to_skip=3
|
||||
error_result.catch . should_equal first_problem frames_to_skip=3
|
||||
error_result . should_fail_with first_problem_type unwrap_errors=unwrap_errors frames_to_skip=3
|
||||
(unwrap_maybe error_result.catch) . should_equal first_problem frames_to_skip=3
|
||||
warnings_checker warnings =
|
||||
## TODO [RW] we are not checking if there are no duplicate warnings, because the warnings are in fact duplicated - we should figure out how to handle that and then possibly modify the test
|
||||
Test.with_clue "The warnings were "+warnings.to_text+'.\n' <|
|
||||
warnings . should_contain_the_same_elements_as expected_problems frames_to_skip=5
|
||||
warnings . map unwrap_maybe . should_contain_the_same_elements_as expected_problems frames_to_skip=5
|
||||
test_advanced_problem_handling action error_checker warnings_checker result_checker frames_to_skip=1
|
||||
|
||||
## UNSTABLE
|
||||
@ -84,12 +88,15 @@ assume_no_problems result =
|
||||
- expected_warning: The expected warning. It can either by a warning type or
|
||||
a concrete value.
|
||||
- result: The value to check.
|
||||
expect_warning : Any -> Any -> Nothing
|
||||
expect_warning expected_warning result =
|
||||
- unwrap_errors: If true, remove any wrapping errors from the result before
|
||||
checking against the expected warning.
|
||||
expect_warning : Any -> Any -> Boolean -> Nothing
|
||||
expect_warning expected_warning result unwrap_errors=True =
|
||||
unwrap_maybe error = if unwrap_errors then Error.unwrap error else error
|
||||
loc = Meta.get_source_location 1
|
||||
if result.is_error then
|
||||
Test.fail "Expected a warning "+expected_warning.to_text+", but a dataflow error has been matched: "+result.catch.to_display_text+" (at "+loc+")."
|
||||
warnings = get_attached_warnings result
|
||||
warnings = get_attached_warnings result . map unwrap_maybe
|
||||
found = warnings.find if_missing=Nothing x->
|
||||
(x == expected_warning) || (x.is_a expected_warning)
|
||||
found.if_nothing <|
|
||||
@ -105,12 +112,15 @@ expect_warning expected_warning result =
|
||||
- expected_warning: The expected warning. It can either by a warning type or
|
||||
a concrete value.
|
||||
- result: The value to check.
|
||||
expect_only_warning : Any -> Any -> Any
|
||||
expect_only_warning expected_warning result =
|
||||
- unwrap_errors: If true, remove any wrapping errors from the result before
|
||||
checking against the expected warning.
|
||||
expect_only_warning : Any -> Any -> Boolean -> Any
|
||||
expect_only_warning expected_warning result unwrap_errors=True =
|
||||
unwrap_maybe error = if unwrap_errors then Error.unwrap error else error
|
||||
loc = Meta.get_source_location 1
|
||||
if result.is_error then
|
||||
Test.fail "Expected only warning "+expected_warning.to_text+", but a dataflow error has been matched: "+result.catch.to_display_text+" (at "+loc+")."
|
||||
warnings = get_attached_warnings result
|
||||
warnings = get_attached_warnings result . map unwrap_maybe
|
||||
is_expected x =
|
||||
(x == expected_warning) || (x.is_a expected_warning)
|
||||
found = warnings.find if_missing=Nothing is_expected
|
||||
@ -121,7 +131,6 @@ expect_only_warning expected_warning result =
|
||||
Test.fail "Expected the result to contain only the warning: "+found.to_text+", but it also contained: "+invalid.to_text+' (at '+loc+').'
|
||||
found
|
||||
|
||||
|
||||
## UNSTABLE
|
||||
Checks if the provided value does _not_ have a warning of the specified type.
|
||||
|
||||
|
@ -1,5 +1,3 @@
|
||||
private
|
||||
|
||||
from Standard.Base import all
|
||||
|
||||
## Result of one test Spec
|
||||
|
@ -2,6 +2,7 @@
|
||||
from Standard.Base import all
|
||||
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
|
||||
import Standard.Base.Runtime.State
|
||||
from Standard.Base.Runtime import assert
|
||||
|
||||
import project.Group.Group
|
||||
import project.Group.Group_Builder
|
||||
@ -30,9 +31,15 @@ type Suite_Builder
|
||||
group : Text -> (Group_Builder -> Any) -> Nothing
|
||||
group self (name:Text) (fn : (Group_Builder -> Any)) (pending : (Text | Nothing) = Nothing) =
|
||||
group_builder = Group_Builder.Impl
|
||||
fn group_builder
|
||||
group = Group.Impl name group_builder.teardown_ref.get group_builder.builder.to_vector pending
|
||||
self.builder.append group
|
||||
case pending of
|
||||
Nothing ->
|
||||
fn group_builder
|
||||
group = Group.Impl name group_builder.teardown_ref.get group_builder.builder.to_vector pending
|
||||
self.builder.append group
|
||||
pend_reason : Text ->
|
||||
group = Group.Impl name group_builder.teardown_ref.get [] pend_reason
|
||||
self.builder.append group
|
||||
|
||||
|
||||
|
||||
## A testing suite that holds all the groups with test specs.
|
||||
@ -58,17 +65,23 @@ type Suite
|
||||
run_with_filter : (Regex | Text | Nothing) -> (Regex | Text | Nothing) -> Nothing
|
||||
run_with_filter self group_filter=Nothing spec_filter=Nothing should_exit=True =
|
||||
config = Suite_Config.from_environment
|
||||
filtered_groups = self.groups.filter group->
|
||||
group_name_matches = case group_filter of
|
||||
|
||||
groups_with_matching_names = self.groups.filter group->
|
||||
case group_filter of
|
||||
regex : Regex -> (regex.match group.name) != Nothing
|
||||
text : Text -> group.name.contains text
|
||||
Nothing -> True
|
||||
case group_name_matches of
|
||||
False -> False
|
||||
True -> group.pending == Nothing
|
||||
pending_groups = groups_with_matching_names.filter group->
|
||||
group.pending.is_nothing.not
|
||||
groups_to_run = groups_with_matching_names.filter group->
|
||||
pending_groups.contains group . not
|
||||
assert (pending_groups.length <= groups_to_run.length)
|
||||
assert (groups_to_run.length <= groups_with_matching_names.length)
|
||||
assert (groups_with_matching_names.length <= self.groups.length)
|
||||
|
||||
junit_sb_builder = if config.should_output_junit then StringBuilder.new else Nothing
|
||||
all_results = Test_Reporter.wrap_junit_testsuites config junit_sb_builder <|
|
||||
filtered_groups.flat_map group->
|
||||
groups_to_run.flat_map group->
|
||||
results = Helpers.run_group_with_filter group spec_filter
|
||||
Test_Reporter.print_report results config junit_sb_builder
|
||||
results
|
||||
@ -80,6 +93,7 @@ type Suite
|
||||
IO.println <| succ_tests.to_text + " tests succeeded."
|
||||
IO.println <| failed_tests.to_text + " tests failed."
|
||||
IO.println <| skipped_tests.to_text + " tests skipped."
|
||||
IO.println <| pending_groups.length.to_text + " groups skipped."
|
||||
exit_code = if failed_tests > 0 then 1 else 0
|
||||
System.exit exit_code
|
||||
False ->
|
||||
|
@ -1,37 +1,37 @@
|
||||
from Standard.Base import all
|
||||
import Standard.Base.Errors.Common.No_Such_Method
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
spec =
|
||||
Test.group "JavaScript Objects, Arrays & Functions" <|
|
||||
Test.specify "Missing method" <|
|
||||
|
||||
add_specs suite_builder =
|
||||
suite_builder.group "JavaScript Objects, Arrays & Functions" group_builder->
|
||||
group_builder.specify "Missing method" <|
|
||||
Test.expect_panic_with data.missing No_Such_Method
|
||||
|
||||
Test.specify "Found method" <|
|
||||
group_builder.specify "Found method" <|
|
||||
data.classes
|
||||
|
||||
Test.specify "Array length" <|
|
||||
group_builder.specify "Array length" <|
|
||||
len = data.classes.length
|
||||
len.should_equal 3
|
||||
|
||||
Test.specify "Array at" <|
|
||||
group_builder.specify "Array at" <|
|
||||
number = data.classes.at 1
|
||||
number.name.should_equal "java.lang.Number"
|
||||
number.superclass.name.should_equal "java.lang.Object"
|
||||
|
||||
Test.specify "Vector/Array copy & map" <|
|
||||
group_builder.specify "Vector/Array copy & map" <|
|
||||
classes = Vector.from_array data.classes
|
||||
names = classes.map (x -> x.name)
|
||||
names.should_equal ["java.lang.Object", "java.lang.Number", "java.lang.Integer"]
|
||||
|
||||
Test.specify "Vector/Array no copy & map" <|
|
||||
group_builder.specify "Vector/Array no copy & map" <|
|
||||
classes = Vector.from_polyglot_array data.classes
|
||||
names = classes.map (x -> x.name)
|
||||
names.should_equal ["java.lang.Object", "java.lang.Number", "java.lang.Integer"]
|
||||
|
||||
Test.specify "Vector.from_array creates a copy" <|
|
||||
group_builder.specify "Vector.from_array creates a copy" <|
|
||||
js_arr = create_array [1, 2, 3]
|
||||
enso_vector = Vector.from_array js_arr
|
||||
|
||||
@ -47,7 +47,7 @@ spec =
|
||||
(enso_vector.at 2).should_equal 3
|
||||
enso_vector.should_not_equal js_arr
|
||||
|
||||
Test.specify "Vector.from_polyglot_array doesn't create a copy" <|
|
||||
group_builder.specify "Vector.from_polyglot_array doesn't create a copy" <|
|
||||
js_arr = create_array [1, 2, 3]
|
||||
enso_vector = Vector.from_polyglot_array js_arr
|
||||
|
||||
@ -63,7 +63,11 @@ spec =
|
||||
(enso_vector.at 2).should_equal Nothing
|
||||
enso_vector.should_equal js_arr
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
||||
foreign js data = """
|
||||
var object = { name : "java.lang.Object", superclass: null };
|
||||
|
@ -4,8 +4,8 @@ import Standard.Base.Errors.Common.Index_Out_Of_Bounds
|
||||
import Standard.Base.Errors.Common.Type_Error
|
||||
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
|
||||
type Proxy_Object
|
||||
Value length
|
||||
@ -13,9 +13,9 @@ type Proxy_Object
|
||||
at : Integer -> Integer
|
||||
at self ix = ix * 10
|
||||
|
||||
spec =
|
||||
Test.group "Array_Proxy" <|
|
||||
Test.specify "should correctly delegate to the callback" <|
|
||||
add_specs suite_builder =
|
||||
suite_builder.group "Array_Proxy" group_builder->
|
||||
group_builder.specify "should correctly delegate to the callback" <|
|
||||
arr = Array_Proxy.new 3 (ix -> ix + 10)
|
||||
arr.length . should_equal 3
|
||||
arr.at 0 . should_equal 10
|
||||
@ -23,7 +23,7 @@ spec =
|
||||
arr.at 2 . should_equal 12
|
||||
arr.at 3 . should_fail_with Index_Out_Of_Bounds
|
||||
|
||||
Test.specify "should be able to be used to construct a Vector" <|
|
||||
group_builder.specify "should be able to be used to construct a Vector" <|
|
||||
v1 = Vector.from_polyglot_array (Array_Proxy.new 3 (ix -> ix + 10))
|
||||
v1.length . should_equal 3
|
||||
v1 . should_equal [10, 11, 12]
|
||||
@ -34,18 +34,18 @@ spec =
|
||||
v2.should_equal [12, 11, 10]
|
||||
v2.sort . should_equal [10, 11, 12]
|
||||
|
||||
Test.specify "should be able to construct a Vector from a proxy object" <|
|
||||
group_builder.specify "should be able to construct a Vector from a proxy object" <|
|
||||
v1 = Vector.from_polyglot_array (Array_Proxy.from_proxy_object [4, 3, 2])
|
||||
v1.should_equal [4, 3, 2]
|
||||
|
||||
v2 = Vector.from_polyglot_array (Array_Proxy.from_proxy_object (Proxy_Object.Value 5))
|
||||
v2.should_equal [0, 10, 20, 30, 40]
|
||||
|
||||
Test.specify "should check validity at construction" <|
|
||||
group_builder.specify "should check validity at construction" <|
|
||||
Test.expect_panic_with (Array_Proxy.new 0 0) Type_Error
|
||||
Test.expect_panic_with (Array_Proxy.new -1 (x->x)) Illegal_Argument
|
||||
|
||||
Test.specify "should pretend to just be an Array" <|
|
||||
group_builder.specify "should pretend to just be an Array" <|
|
||||
proxy = Array_Proxy.new 3 (ix -> ix + 10)
|
||||
ok_match = case proxy of
|
||||
_ : Vector -> "Vector"
|
||||
@ -55,4 +55,8 @@ spec =
|
||||
Meta.get_qualified_type_name proxy . should_equal "Standard.Base.Data.Array.Array"
|
||||
Meta.get_simple_type_name proxy . should_equal "Array"
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -2,8 +2,8 @@ from Standard.Base import all
|
||||
import Standard.Base.Errors.Common.Index_Out_Of_Bounds
|
||||
import Standard.Base.Errors.Illegal_State.Illegal_State
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
|
||||
Array.method self = 0
|
||||
|
||||
@ -12,48 +12,48 @@ Array.method self = 0
|
||||
make_enso_array vector =
|
||||
vector.to_array
|
||||
|
||||
test_arrays array_from_vector =
|
||||
Test.specify "should allow accessing elements" <|
|
||||
test_arrays group_builder array_from_vector =
|
||||
group_builder.specify "should allow accessing elements" <|
|
||||
arr = array_from_vector [1, 2, 3]
|
||||
arr.at 0 . should_equal 1
|
||||
arr.at 2 . should_equal 3
|
||||
arr.at -1 . should_equal 3
|
||||
|
||||
Test.specify "should panic on out of bounds access" <|
|
||||
group_builder.specify "should panic on out of bounds access" <|
|
||||
arr = array_from_vector [1, 2, 3]
|
||||
(arr.at -4) . should_fail_with Index_Out_Of_Bounds
|
||||
(arr.at 3) . should_fail_with Index_Out_Of_Bounds
|
||||
|
||||
spec =
|
||||
Test.group "Enso Arrays" <|
|
||||
test_arrays make_enso_array
|
||||
add_specs suite_builder =
|
||||
suite_builder.group "Enso Arrays" group_builder->
|
||||
test_arrays group_builder make_enso_array
|
||||
|
||||
Test.specify "should allow for functional dispatch on a method defined in this module" <|
|
||||
group_builder.specify "should allow for functional dispatch on a method defined in this module" <|
|
||||
arr = make_enso_array [1, 2, 3]
|
||||
arr.method . should_equal 0
|
||||
|
||||
Test.specify "should handle ==" <|
|
||||
group_builder.specify "should handle ==" <|
|
||||
(make_enso_array [1,2,3]).should_equal (make_enso_array [1,2,3])
|
||||
(make_enso_array [1]).should_not_equal (make_enso_array [2])
|
||||
|
||||
Test.specify "should not sort in place" <|
|
||||
group_builder.specify "should not sort in place" <|
|
||||
arr = make_enso_array [3, 1, 2]
|
||||
new_arr = arr.sort
|
||||
arr . should_equal [3, 1, 2]
|
||||
new_arr . should_equal [1, 2, 3]
|
||||
|
||||
Test.group "Compare functionality with Vector" <|
|
||||
Test.specify "compare methods" <|
|
||||
suite_builder.group "Compare functionality with Vector" group_builder->
|
||||
group_builder.specify "compare methods" <|
|
||||
vector_methods = Meta.meta Vector . methods . sort
|
||||
array_methods = Meta.meta Array . methods . sort
|
||||
vector_methods . should_equal array_methods
|
||||
|
||||
Test.group "ArrayOverBuffer" <|
|
||||
suite_builder.group "ArrayOverBuffer" group_builder->
|
||||
location_pending = case Platform.os of
|
||||
Platform.OS.Windows -> "This test is disabled on Windows."
|
||||
_ -> Nothing
|
||||
|
||||
Test.specify "should behave like an Array" pending=location_pending <|
|
||||
group_builder.specify "should behave like an Array" pending=location_pending <|
|
||||
array_over_buffer = (File.new (enso_project.data / "sample.txt") . read_last_bytes 10).to_array
|
||||
|
||||
case array_over_buffer of
|
||||
@ -66,4 +66,8 @@ spec =
|
||||
sorted.to_text . should_equal "[10, 32, 46, 98, 101, 106, 106, 115, 117, 117]"
|
||||
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -2,24 +2,28 @@ from Standard.Base import all
|
||||
import Standard.Base.Data.Base_64.Base_64
|
||||
import Standard.Base.Errors.Encoding_Error.Encoding_Error
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
|
||||
polyglot java import java.lang.String as Java_String
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
spec =
|
||||
Test.group "Base_64" <|
|
||||
Test.specify "can encode simple text" <|
|
||||
|
||||
add_specs suite_builder =
|
||||
suite_builder.group "Base_64" group_builder->
|
||||
group_builder.specify "can encode simple text" <|
|
||||
r = Base_64.encode_text "ABCD"
|
||||
r.should_equal "QUJDRA=="
|
||||
|
||||
Test.specify "can decode simple text" <|
|
||||
group_builder.specify "can decode simple text" <|
|
||||
r = Base_64.decode_text "QUJDRA=="
|
||||
r.should_equal "ABCD"
|
||||
|
||||
Test.specify "can support various encodings" <|
|
||||
group_builder.specify "can support various encodings" <|
|
||||
txt = "AąęłśZ"
|
||||
r_utf = Base_64.encode_text txt
|
||||
r_windows = Base_64.encode_text txt Encoding.windows_1250
|
||||
|
@ -1,7 +1,7 @@
|
||||
from Standard.Base import all
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
|
||||
Boolean.method self = self
|
||||
|
||||
@ -11,37 +11,37 @@ type My_Error
|
||||
crash =
|
||||
Error.throw (My_Error.Value "foo")
|
||||
|
||||
spec =
|
||||
Test.group "Booleans" <|
|
||||
Test.specify "should allow converting Bools to Text values" <|
|
||||
add_specs suite_builder =
|
||||
suite_builder.group "Booleans" group_builder->
|
||||
group_builder.specify "should allow converting Bools to Text values" <|
|
||||
True.to_text . should_equal "True"
|
||||
False.to_text . should_equal "False"
|
||||
|
||||
Test.specify "should allow for comparing Bools" <|
|
||||
group_builder.specify "should allow for comparing Bools" <|
|
||||
(True == True) . should_be_true
|
||||
(False == False) . should_be_true
|
||||
(True > False) . should_be_true
|
||||
(False < True) . should_be_true
|
||||
|
||||
Test.specify "should allow == operator" <|
|
||||
group_builder.specify "should allow == operator" <|
|
||||
True.should_equal True
|
||||
False.should_equal False
|
||||
True.should_not_equal False
|
||||
False.should_not_equal True
|
||||
(1 == 1).should_equal True
|
||||
|
||||
Test.specify "should allow for extending Bools in a local module" <|
|
||||
group_builder.specify "should allow for extending Bools in a local module" <|
|
||||
test = 1 == 2
|
||||
test.method . should_equal test
|
||||
|
||||
Test.specify "should short-circuit ||" <|
|
||||
group_builder.specify "should short-circuit ||" <|
|
||||
(1 == 1) || (crash) . should_equal True
|
||||
(1 == 0) || (1 == 1) . should_equal True
|
||||
(1 == 0) || (crash) . should_fail_with My_Error
|
||||
(1 == 1) || "foo" . should_equal True
|
||||
(1 == 0) || "foo" . should_equal "foo"
|
||||
|
||||
Test.specify "should short-circuit &&" <|
|
||||
group_builder.specify "should short-circuit &&" <|
|
||||
(1 == 0) && (crash) . should_equal False
|
||||
(1 == 1) && (1 == 0) . should_equal False
|
||||
(1 == 1) && (1 == 1) . should_equal True
|
||||
@ -49,4 +49,8 @@ spec =
|
||||
(1 == 0) && "foo" . should_equal False
|
||||
(1 == 1) && "foo" . should_equal "foo"
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -1,41 +1,45 @@
|
||||
from Standard.Base import all
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
spec =
|
||||
Test.group "identity" <|
|
||||
Test.specify "identity on number" <|
|
||||
|
||||
add_specs suite_builder =
|
||||
suite_builder.group "identity" group_builder->
|
||||
group_builder.specify "identity on number" <|
|
||||
(identity 5) . should_equal 5
|
||||
|
||||
Test.specify "identity on text" <|
|
||||
group_builder.specify "identity on text" <|
|
||||
(identity '5') . should_equal '5'
|
||||
|
||||
Test.specify "identity on boolean" <|
|
||||
group_builder.specify "identity on boolean" <|
|
||||
(identity False) . should_equal False
|
||||
|
||||
Test.group "flip" <|
|
||||
Test.specify "flip on number" <|
|
||||
suite_builder.group "flip" group_builder->
|
||||
group_builder.specify "flip on number" <|
|
||||
(flip (-) 2 5) . should_equal 3
|
||||
|
||||
Test.specify "flip on text" <|
|
||||
group_builder.specify "flip on text" <|
|
||||
(flip (+) "world" "hello") . should_equal "helloworld"
|
||||
|
||||
Test.group "const" <|
|
||||
Test.specify "const on number" <|
|
||||
suite_builder.group "const" group_builder->
|
||||
group_builder.specify "const on number" <|
|
||||
two = const 2
|
||||
two 5 . should_equal 2
|
||||
|
||||
Test.group "curry" <|
|
||||
Test.specify "curry on number list" <|
|
||||
suite_builder.group "curry" group_builder->
|
||||
group_builder.specify "curry on number list" <|
|
||||
sum = x -> x.fold 0 (+)
|
||||
sum [1, 2, 3, 4] . should_equal 10
|
||||
plus = curry sum
|
||||
plus 6 3 . should_equal 9
|
||||
|
||||
Test.group "uncurry" <|
|
||||
Test.specify "uncurry on number list" <|
|
||||
suite_builder.group "uncurry" group_builder->
|
||||
group_builder.specify "uncurry on number list" <|
|
||||
times = uncurry (*)
|
||||
times [6, 7] . should_equal 42
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -1,17 +1,17 @@
|
||||
from Standard.Base import all
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
spec =
|
||||
Test.group "Bound" <|
|
||||
Test.specify "should allow constructing inclusive bounds" <|
|
||||
|
||||
add_specs suite_builder =
|
||||
suite_builder.group "Bound" group_builder->
|
||||
group_builder.specify "should allow constructing inclusive bounds" <|
|
||||
bound = Bound.Inclusive 0
|
||||
bound.n . should_equal 0
|
||||
Test.specify "should allow constructing exclusive bounds" <|
|
||||
group_builder.specify "should allow constructing exclusive bounds" <|
|
||||
bound = Bound.Exclusive 0
|
||||
bound.n . should_equal 0
|
||||
Test.specify "should be able to be checked for equality" <|
|
||||
group_builder.specify "should be able to be checked for equality" <|
|
||||
inclusive_1 = Bound.Inclusive 10
|
||||
inclusive_2 = Bound.Inclusive 5
|
||||
exclusive_1 = Bound.Exclusive 10
|
||||
@ -22,24 +22,24 @@ spec =
|
||||
(exclusive_1 == exclusive_2) . should_be_false
|
||||
(inclusive_1 == exclusive_1) . should_be_false
|
||||
|
||||
Test.group "Interval" <|
|
||||
Test.specify "should allow constructing exclusive intervals" <|
|
||||
suite_builder.group "Interval" group_builder->
|
||||
group_builder.specify "should allow constructing exclusive intervals" <|
|
||||
interval = Interval.new 1 5 Interval_Type.Exclusive
|
||||
interval.start . should_equal (Bound.Exclusive 1)
|
||||
interval.end . should_equal (Bound.Exclusive 5)
|
||||
Test.specify "should allow constructing start-exclusive intervals" <|
|
||||
group_builder.specify "should allow constructing start-exclusive intervals" <|
|
||||
interval = Interval.new 1 5 Interval_Type.Start_Exclusive
|
||||
interval.start . should_equal (Bound.Exclusive 1)
|
||||
interval.end . should_equal (Bound.Inclusive 5)
|
||||
Test.specify "should allow constructing end-exclusive intervals" <|
|
||||
group_builder.specify "should allow constructing end-exclusive intervals" <|
|
||||
interval = Interval.new 1 5
|
||||
interval.start . should_equal (Bound.Inclusive 1)
|
||||
interval.end . should_equal (Bound.Exclusive 5)
|
||||
Test.specify "should allow constructing inclusive intervals" <|
|
||||
group_builder.specify "should allow constructing inclusive intervals" <|
|
||||
interval = Interval.new 1 5 Interval_Type.Inclusive
|
||||
interval.start . should_equal (Bound.Inclusive 1)
|
||||
interval.end . should_equal (Bound.Inclusive 5)
|
||||
Test.specify "should allow checking if an interval contains a value of the contained type" <|
|
||||
group_builder.specify "should allow checking if an interval contains a value of the contained type" <|
|
||||
interval = Interval.new 1 10
|
||||
interval.contains 0 . should_be_false
|
||||
interval.contains 1 . should_be_true
|
||||
@ -83,7 +83,7 @@ spec =
|
||||
interval_7.contains 1 . should_be_false
|
||||
|
||||
(Interval.new 0.1 1 Interval_Type.Inclusive) . contains 0.33 . should_be_true
|
||||
Test.specify "can be checked for emptiness" <|
|
||||
group_builder.specify "can be checked for emptiness" <|
|
||||
Interval.new 0 0 Interval_Type.Exclusive . is_empty . should_be_true
|
||||
Interval.new 1 10 Interval_Type.Exclusive . is_empty . should_be_false
|
||||
Interval.new 0 0 Interval_Type.Start_Exclusive . is_empty . should_be_true
|
||||
@ -92,7 +92,7 @@ spec =
|
||||
Interval.new 1 10 Interval_Type.End_Exclusive . is_empty . should_be_false
|
||||
Interval.new 0 0 Interval_Type.Inclusive . is_empty . should_be_false
|
||||
Interval.new 0.1 0 Interval_Type.Inclusive . is_empty . should_be_true
|
||||
Test.specify "can be checked for non-emptiness" <|
|
||||
group_builder.specify "can be checked for non-emptiness" <|
|
||||
Interval.new 0 0 Interval_Type.Exclusive . not_empty . should_be_false
|
||||
Interval.new 0 0.001 Interval_Type.Inclusive . not_empty . should_be_true
|
||||
Interval.new 1 10 Interval_Type.Exclusive . not_empty . should_be_true
|
||||
@ -103,4 +103,8 @@ spec =
|
||||
Interval.new 0 0 Interval_Type.Inclusive . not_empty . should_be_true
|
||||
Interval.new 10 0 Interval_Type.Inclusive . not_empty . should_be_false
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -5,9 +5,10 @@ import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
|
||||
import Standard.Base.Errors.No_Such_Key.No_Such_Key
|
||||
from Standard.Base.Data.Json import Invalid_JSON
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
import Standard.Test.Test_Result.Test_Result
|
||||
from Standard.Test_New import all
|
||||
import Standard.Test_New.Spec_Result.Spec_Result
|
||||
|
||||
|
||||
|
||||
type Author
|
||||
Value name year_of_birth
|
||||
@ -17,17 +18,17 @@ type Book
|
||||
|
||||
Text.should_fail_parsing_with self expected =
|
||||
as_fail = case Json.parse self of
|
||||
_ -> Test_Result.Failure "Expected a parse error, but no error reported."
|
||||
_ -> Spec_Result.Failure "Expected a parse error, but no error reported."
|
||||
result = as_fail.catch Any e-> case e of
|
||||
Invalid_JSON.Error msg ->
|
||||
if msg.contains expected then Test_Result.Success else
|
||||
if msg.contains expected then Spec_Result.Success else
|
||||
fail_msg = "The reported message " + msg.to_text + " did not contain " + expected.to_text + "."
|
||||
Test_Result.Failure fail_msg
|
||||
Spec_Result.Failure fail_msg
|
||||
_ ->
|
||||
fail_msg = "Expected a parser error, but " + e.to_text + " was thrown."
|
||||
Test_Result.Failure fail_msg
|
||||
Spec_Result.Failure fail_msg
|
||||
case result of
|
||||
Test_Result.Success -> Test_Result.Success
|
||||
Spec_Result.Success -> Spec_Result.Success
|
||||
_ -> Panic.throw result
|
||||
|
||||
Text.should_parse_as self expected =
|
||||
@ -36,9 +37,9 @@ Text.should_parse_as self expected =
|
||||
Text.should_render_itself self =
|
||||
Json.parse self . to_json . should_equal self
|
||||
|
||||
spec =
|
||||
Test.group "JSON Deserialization" <|
|
||||
Test.specify "should parse JSON structures" <|
|
||||
add_specs suite_builder =
|
||||
suite_builder.group "JSON Deserialization" group_builder->
|
||||
group_builder.specify "should parse JSON structures" <|
|
||||
"0 ".should_parse_as 0
|
||||
" 123 ".should_parse_as 123
|
||||
"15.0643".should_parse_as 15.0643
|
||||
@ -59,7 +60,7 @@ spec =
|
||||
y_v = JS_Object.from_pairs [["z", Nothing], ["w", Nothing]]
|
||||
JS_Object.from_pairs [["foo", "bar"], ["baz", ["foo", "x", False]], ["y", y_v]]
|
||||
|
||||
Test.specify "should report meaningful parsing errors" <|
|
||||
group_builder.specify "should report meaningful parsing errors" <|
|
||||
"foo".should_fail_parsing_with "Unexpected token f in JSON at position 0"
|
||||
"[,]".should_fail_parsing_with "Unexpected token , in JSON at position 1"
|
||||
"{,}".should_fail_parsing_with "Unexpected token , in JSON at position 1"
|
||||
@ -67,20 +68,20 @@ spec =
|
||||
deep_err.should_fail_parsing_with "closing quote ] expected at position 34"
|
||||
"123 4".should_fail_parsing_with "JSON cannot be fully parsed at position 4"
|
||||
|
||||
Test.specify "should be able to deserialize using into via conversion" <|
|
||||
group_builder.specify "should be able to deserialize using into via conversion" <|
|
||||
Json.parse '{"type":"Time_Zone","constructor":"parse","id":"Europe/Moscow"}' . into Time_Zone . should_equal (Time_Zone.parse "Europe/Moscow")
|
||||
Json.parse '{}' . into Time_Zone . should_fail_with Illegal_Argument
|
||||
|
||||
Test.specify "should be able to deserialize using into for single constructor" <|
|
||||
group_builder.specify "should be able to deserialize using into for single constructor" <|
|
||||
Json.parse '{"first": 1, "second": 2}' . into Pair . should_equal (Pair.Value 1 2)
|
||||
Json.parse '{"start": 15, "end": 20, "step": 3}' . into Range . should_equal (Range.Between 15 20 3)
|
||||
|
||||
Test.specify "should be able to deserialize using into for multiple constructors" <|
|
||||
group_builder.specify "should be able to deserialize using into for multiple constructors" <|
|
||||
Json.parse '{"than": 2}' . into Filter_Condition . should_fail_with Illegal_Argument
|
||||
Json.parse '{"constructor": "Less", "than": 2}' . into Filter_Condition . should_equal (Filter_Condition.Less 2)
|
||||
Json.parse '{"constructor": "NotARealOne", "than": 2}' . into Filter_Condition . should_fail_with Illegal_Argument
|
||||
|
||||
Test.specify "should be able to convert a JS_Object into a Map using into" <|
|
||||
group_builder.specify "should be able to convert a JS_Object into a Map using into" <|
|
||||
Json.parse '{"a": 15, "b": 20, "c": "X", "d": null}' . into Map . should_equal (Map.from_vector [["a", 15], ["b", 20], ["c", "X"], ["d", Nothing]])
|
||||
Json.parse '{}' . into Map . should_equal Map.empty
|
||||
|
||||
@ -88,13 +89,13 @@ spec =
|
||||
Test.expect_panic No_Such_Method <|
|
||||
Json.parse '[]' . into Map
|
||||
|
||||
Test.specify "should be able to deserialize Date" <|
|
||||
group_builder.specify "should be able to deserialize Date" <|
|
||||
'{"type": "Date", "constructor": "new", "year": 2018, "month": 7, "day": 3}'.should_parse_as (Date.new 2018 7 3)
|
||||
'{"type": "Date", "year": 2025, "month": 5, "day": 12}'.should_parse_as (Date.new 2025 5 12)
|
||||
'{"type": "Date", "month": 5, "day": 12}' . should_parse_as (JS_Object.from_pairs [["type", "Date"], ["month", 5], ["day", 12]])
|
||||
'{"type": "Date", "year": 2019, "day": 12}' . should_parse_as (JS_Object.from_pairs [["type", "Date"], ["year", 2019], ["day", 12]])
|
||||
|
||||
Test.specify "should be able to deserialize Time_Of_Day" <|
|
||||
group_builder.specify "should be able to deserialize Time_Of_Day" <|
|
||||
'{"type": "Time_Of_Day", "constructor": "new", "hour": 22, "minute": 14, "second": 47}'.should_parse_as (Time_Of_Day.new 22 14 47)
|
||||
'{"type": "Time_Of_Day", "hour": 12, "minute": 30}'.should_parse_as (Time_Of_Day.new 12 30 0)
|
||||
'{"type": "Time_Of_Day", "hour": 18, "minute": 6, "second": 13, "nanosecond": 1234568}'.should_parse_as (Time_Of_Day.new 18 6 13 nanosecond=1234568)
|
||||
@ -102,7 +103,7 @@ spec =
|
||||
'{"type": "Time_Of_Day", "hour": 14, "second": 47}' . should_parse_as (JS_Object.from_pairs [["type", "Time_Of_Day"], ["hour", 14], ["second", 47]])
|
||||
'{"type": "Time_Of_Day", "hour": 18, "minute": 6, "nanosecond": 1234568}'.should_parse_as (JS_Object.from_pairs [["type", "Time_Of_Day"], ["hour", 18], ["minute", 6], ["nanosecond", 1234568]])
|
||||
|
||||
Test.specify "should be able to deserialize Date_Time" <|
|
||||
group_builder.specify "should be able to deserialize Date_Time" <|
|
||||
tz = Time_Zone.parse "Europe/Moscow"
|
||||
'{"type":"Date_Time","constructor":"new","year":2023,"month":9,"day":29,"hour":11,"minute":52,"second":33,"nanosecond":572104300,"zone":{"type":"Time_Zone","constructor":"parse","id":"Europe/Moscow"}}'.should_parse_as (Date_Time.new 2023 9 29 11 52 33 nanosecond=572104300 zone=tz)
|
||||
'{"type":"Date_Time","constructor":"new","year":2023,"month":9,"day":29,"hour":11,"minute":52,"second":33,"zone":{"type":"Time_Zone","constructor":"parse","id":"Europe/Moscow"}}'.should_parse_as (Date_Time.new 2023 9 29 11 52 33 zone=tz)
|
||||
@ -112,8 +113,8 @@ spec =
|
||||
'{"type":"Date_Time","constructor":"new","year":2023,"month":9,"day":29,"hour":11,"second":52}'.should_parse_as (JS_Object.from_pairs [["type", "Date_Time"], ["constructor", "new"], ["year", 2023], ["month", 9], ["day", 29], ["hour", 11], ["second", 52]])
|
||||
'{"type":"Date_Time","constructor":"new","year":2023,"month":9,"day":29,"hour":11,"minute":52,"nanosecond":572104300}'.should_parse_as (JS_Object.from_pairs [["type", "Date_Time"], ["constructor", "new"], ["year", 2023], ["month", 9], ["day", 29], ["hour", 11], ["minute", 52], ["nanosecond", 572104300]])
|
||||
|
||||
Test.group "JSON Serialization" <|
|
||||
Test.specify "should print JSON structures to valid json" <|
|
||||
suite_builder.group "JSON Serialization" group_builder->
|
||||
group_builder.specify "should print JSON structures to valid json" <|
|
||||
"0".should_render_itself
|
||||
"123".should_render_itself
|
||||
"15.0643".should_render_itself
|
||||
@ -129,28 +130,28 @@ spec =
|
||||
object_json = '{"baz":["foo","x",false],"foo":"bar","y":{"w":null,"z":null}}'
|
||||
object_json.should_render_itself
|
||||
|
||||
Test.specify "should convert arbitrary types to JSON" <|
|
||||
group_builder.specify "should convert arbitrary types to JSON" <|
|
||||
1.to_json.should_equal "1"
|
||||
1.54.to_json.should_equal "1.54"
|
||||
["foo", "bar", "baz"].to_json.should_equal '["foo","bar","baz"]'
|
||||
Author.Value "Tolkien" 1892 . to_json . should_equal <|
|
||||
'{"type":"Author","constructor":"Value","name":"Tolkien","year_of_birth":1892}'
|
||||
|
||||
Test.specify "should render NaN and Infinity to null" <|
|
||||
group_builder.specify "should render NaN and Infinity to null" <|
|
||||
Number.nan.to_json . should_equal "null"
|
||||
Number.positive_infinity.to_json . should_equal "null"
|
||||
Number.negative_infinity.to_json . should_equal "null"
|
||||
|
||||
Test.group "JS_Object" <|
|
||||
Test.specify "should be buildable from pairs" <|
|
||||
suite_builder.group "JS_Object" group_builder->
|
||||
group_builder.specify "should be buildable from pairs" <|
|
||||
JS_Object.from_pairs [["foo", "bar"]] . to_json . should_equal '{"foo":"bar"}'
|
||||
JS_Object.from_pairs [["foo", "bar"], ["baz", Nothing]] . to_json . should_equal '{"foo":"bar","baz":null}'
|
||||
|
||||
Test.specify "should be buildable from pairs" <|
|
||||
group_builder.specify "should be buildable from pairs" <|
|
||||
JS_Object.from_pairs [["foo", "bar"]] . to_json . should_equal '{"foo":"bar"}'
|
||||
JS_Object.from_pairs [["foo", "bar"], ["baz", Nothing]] . to_json . should_equal '{"foo":"bar","baz":null}'
|
||||
|
||||
Test.specify "should be handle equality on a key level" <|
|
||||
group_builder.specify "should be handle equality on a key level" <|
|
||||
JS_Object.from_pairs [["a", 42]] . should_equal <| Json.parse '{"a": 42}'
|
||||
JS_Object.from_pairs [["a", 42]] . should_not_equal <| JS_Object.from_pairs [["a", 43]]
|
||||
JS_Object.from_pairs [["a", 42]] . should_not_equal <| JS_Object.from_pairs [["b", 42]]
|
||||
@ -164,22 +165,22 @@ spec =
|
||||
obj1 . should_not_equal <| JS_Object.from_pairs [["a", 43], ["b", 123]]
|
||||
obj1 . should_not_equal <| JS_Object.from_pairs [["a", 42], ["b", JS_Object.from_pairs [["c",1], ["d",3]]]]
|
||||
|
||||
Test.specify "should be able to set values" <|
|
||||
group_builder.specify "should be able to set values" <|
|
||||
obj = JS_Object.from_pairs [["foo", "bar"], ["baz", "quux"]]
|
||||
obj.to_json . should_equal '{"foo":"bar","baz":"quux"}'
|
||||
obj.set_value "foo" "asdf" . to_json . should_equal '{"foo":"asdf","baz":"quux"}'
|
||||
obj.set_value "baz" "zxcv" . to_json . should_equal '{"foo":"bar","baz":"zxcv"}'
|
||||
obj.set_value "abc" "def" . to_json . should_equal '{"foo":"bar","baz":"quux","abc":"def"}'
|
||||
|
||||
Test.specify "set_value does not modify the original object" <|
|
||||
group_builder.specify "set_value does not modify the original object" <|
|
||||
obj = JS_Object.from_pairs [["foo", "bar"], ["baz", "quux"]]
|
||||
obj.to_json . should_equal '{"foo":"bar","baz":"quux"}'
|
||||
modified = obj.set_value "foo" "asdf"
|
||||
modified.to_json . should_equal '{"foo":"asdf","baz":"quux"}'
|
||||
obj.to_json . should_equal '{"foo":"bar","baz":"quux"}'
|
||||
|
||||
Test.group "JSON" <|
|
||||
Test.specify "should allow getting object fields" <|
|
||||
suite_builder.group "JSON" group_builder->
|
||||
group_builder.specify "should allow getting object fields" <|
|
||||
object = Json.parse '{ "foo": "bar", "baz": ["foo", "x", false],"y": {"z": null, "w": null} }'
|
||||
object.get "foo" . should_equal "bar"
|
||||
object.get "bar" . should_equal Nothing
|
||||
@ -189,16 +190,16 @@ spec =
|
||||
Json.parse "null" . get "foo" . should_equal Nothing
|
||||
Json.parse "null" . get "foo" 1 . should_equal 1
|
||||
|
||||
Test.specify "should allow checking for fields" <|
|
||||
group_builder.specify "should allow checking for fields" <|
|
||||
object = Json.parse '{ "foo": "bar", "baz": ["foo", "x", false],"y": {"z": null, "w": null} }'
|
||||
object.contains_key "foo" . should_equal True
|
||||
object.contains_key "bar" . should_equal False
|
||||
|
||||
Test.specify "should be able to get field_names" <|
|
||||
group_builder.specify "should be able to get field_names" <|
|
||||
Json.parse '{ "foo": "bar", "baz": ["foo", "x", false] }' . field_names . should_equal ["foo", "baz"]
|
||||
Json.parse '{}' . field_names . should_equal []
|
||||
|
||||
Test.specify "should be able to get a value by index" <|
|
||||
group_builder.specify "should be able to get a value by index" <|
|
||||
Json.parse "[1, 2, 3, 4, 5]" . at 2 . should_equal 3
|
||||
Json.parse "[1, 2, 3, 4, 5]" . at -2 . should_equal 4
|
||||
Json.parse "[1, 2, 3, 4, 5]" . at 5 . should_fail_with Index_Out_Of_Bounds
|
||||
@ -207,7 +208,7 @@ spec =
|
||||
Json.parse "[1, 2, 3, 4, 5]" . get 5 . should_equal Nothing
|
||||
Json.parse "[1, 2, 3, 4, 5]" . get 5 "?" . should_equal "?"
|
||||
|
||||
Test.specify "should be able to get length" <|
|
||||
group_builder.specify "should be able to get length" <|
|
||||
Json.parse "{}" . length . should_equal 0
|
||||
Json.parse "[]" . length . should_equal 0
|
||||
Json.parse '{ "foo": "bar", "baz": ["foo", "x", false] }' . length . should_equal 2
|
||||
@ -215,4 +216,8 @@ spec =
|
||||
Json.parse '"foo"' . length . should_equal 3
|
||||
Json.parse '""' . length . should_equal 0
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -6,29 +6,29 @@ import Standard.Base.Errors.Common.Type_Error
|
||||
import Standard.Base.Errors.Common.Unsupported_Argument_Types
|
||||
import Standard.Base.Runtime.State
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
spec = Test.group "List" <|
|
||||
|
||||
add_specs suite_builder = suite_builder.group "List" group_builder->
|
||||
l = List.Cons 1 <| List.Cons 2 <| List.Cons 3 <| List.Nil
|
||||
empty = List.Nil
|
||||
Test.specify "should have properly defined length" <|
|
||||
group_builder.specify "should have properly defined length" <|
|
||||
l.length.should_equal 3
|
||||
|
||||
Test.specify "should have well defined length when empty" <|
|
||||
group_builder.specify "should have well defined length when empty" <|
|
||||
List.Nil.length.should_equal 0
|
||||
|
||||
Test.specify "should allow folding the list with an arbitrary operation with .fold" <|
|
||||
group_builder.specify "should allow folding the list with an arbitrary operation with .fold" <|
|
||||
l.fold 0 (+) . should_equal 6
|
||||
l.fold 1 (*) . should_equal 6
|
||||
|
||||
Test.specify "should allow reducing the list with an arbitrary operation with .reduce" <|
|
||||
group_builder.specify "should allow reducing the list with an arbitrary operation with .reduce" <|
|
||||
l.reduce (+) . should_equal 6
|
||||
l.reduce (*) . should_equal 6
|
||||
empty.reduce (+) . should_fail_with Empty_Error
|
||||
empty.reduce (+) 0 . should_equal 0
|
||||
|
||||
Test.specify "should allow checking if an element satisfies a predicate with .any" <|
|
||||
group_builder.specify "should allow checking if an element satisfies a predicate with .any" <|
|
||||
any_even = l.any (x -> x % 2 == 0)
|
||||
any_eq_five = l.any (== 5)
|
||||
any_even.should_be_true
|
||||
@ -39,7 +39,7 @@ spec = Test.group "List" <|
|
||||
|
||||
Test.expect_panic_with (l.any "invalid arg") Type_Error
|
||||
|
||||
Test.specify "should allow checking if all elements satisfy a predicate with `.all`" <|
|
||||
group_builder.specify "should allow checking if all elements satisfy a predicate with `.all`" <|
|
||||
all_even = l.all(x -> x % 2 == 0)
|
||||
all_less_than_four = l.all (< 4)
|
||||
all_even . should_be_false
|
||||
@ -50,12 +50,12 @@ spec = Test.group "List" <|
|
||||
|
||||
Test.expect_panic_with (l.all "invalid arg") Type_Error
|
||||
|
||||
Test.specify "should allow checking if an element is in the list with `.contains`" <|
|
||||
group_builder.specify "should allow checking if an element is in the list with `.contains`" <|
|
||||
l.contains 4 . should_be_false
|
||||
l.contains 3 . should_be_true
|
||||
empty.contains 10 . should_be_false
|
||||
|
||||
Test.specify "should allow finding an element in the list with `.find`" <|
|
||||
group_builder.specify "should allow finding an element in the list with `.find`" <|
|
||||
l.find (==2) . should_equal 2
|
||||
l.find (==3) . should_equal 3
|
||||
l.find (==4) . should_fail_with Not_Found
|
||||
@ -73,7 +73,7 @@ spec = Test.group "List" <|
|
||||
|
||||
Test.expect_panic_with (l.find "invalid arg") Type_Error
|
||||
|
||||
Test.specify "should allow finding the index of an element in the list with `.index_of`" <|
|
||||
group_builder.specify "should allow finding the index of an element in the list with `.index_of`" <|
|
||||
l.index_of (==2) . should_equal 1
|
||||
l.index_of 3 . should_equal 2
|
||||
l.index_of (==4) . should_equal Nothing
|
||||
@ -92,7 +92,7 @@ spec = Test.group "List" <|
|
||||
l.index_of (Filter_Condition.Greater 1) . should_equal 1
|
||||
l.index_of "invalid arg" . should_equal Nothing
|
||||
|
||||
Test.specify "should allow finding the last index of an element in the list with `.last_index_of`" <|
|
||||
group_builder.specify "should allow finding the last index of an element in the list with `.last_index_of`" <|
|
||||
ll = List.Cons 1 <| List.Cons 2 <| List.Cons 3 <| List.Cons 1 <| List.Cons 2 <| List.Cons 3 <| List.Nil
|
||||
ll.last_index_of (==2) . should_equal 4
|
||||
ll.last_index_of 3 . should_equal 5
|
||||
@ -112,21 +112,21 @@ spec = Test.group "List" <|
|
||||
ll.last_index_of (Filter_Condition.Less 3) . should_equal 4
|
||||
ll.last_index_of "invalid arg" . should_equal Nothing
|
||||
|
||||
Test.specify "should allow checking if the list is empty with `.is_empty`" <|
|
||||
group_builder.specify "should allow checking if the list is empty with `.is_empty`" <|
|
||||
l.is_empty . should_be_false
|
||||
empty.is_empty . should_be_true
|
||||
|
||||
Test.specify "should allow checking if the list is not empty `.not_empty`" <|
|
||||
group_builder.specify "should allow checking if the list is not empty `.not_empty`" <|
|
||||
l.not_empty . should_be_true
|
||||
empty.not_empty . should_be_false
|
||||
|
||||
Test.specify "should be convertible to a vector" <|
|
||||
group_builder.specify "should be convertible to a vector" <|
|
||||
(List.Cons 3 (List.Cons "a" (List.Cons 1 List.Nil))).to_vector.should_equal [3, "a", 1]
|
||||
|
||||
Test.specify "should allow filtering of the list using `.filter`" <|
|
||||
group_builder.specify "should allow filtering of the list using `.filter`" <|
|
||||
l.filter (> 2) . should_equal (List.Cons 3 List.Nil)
|
||||
|
||||
Test.specify "should filter elements by Filter_Condition" <|
|
||||
group_builder.specify "should filter elements by Filter_Condition" <|
|
||||
list = [1, 2, 3, 4, 5].to_list
|
||||
list.filter (Filter_Condition.Greater than=3) . should_equal [4, 5].to_list
|
||||
list.filter (Filter_Condition.Less than=3.5) . should_equal [1, 2, 3].to_list
|
||||
@ -183,7 +183,7 @@ spec = Test.group "List" <|
|
||||
bools.filter Filter_Condition.Is_True . should_equal [True, True].to_list
|
||||
bools.filter Filter_Condition.Is_False . should_equal [False].to_list
|
||||
|
||||
Test.specify "should allow to partition a list" <|
|
||||
group_builder.specify "should allow to partition a list" <|
|
||||
r1 = l.partition (x-> x%2==0)
|
||||
r1.first . should_equal (List.Cons 2 List.Nil)
|
||||
r1.second . should_equal (List.Cons 1 (List.Cons 3 List.Nil))
|
||||
@ -202,10 +202,10 @@ spec = Test.group "List" <|
|
||||
|
||||
Test.expect_panic_with (l.partition "invalid arg") Type_Error
|
||||
|
||||
Test.specify "should allow mapping a function over its elements with .map" <|
|
||||
group_builder.specify "should allow mapping a function over its elements with .map" <|
|
||||
l.map +1 . first . should_equal 2
|
||||
|
||||
Test.specify "should allow executing an action for each element with .each" <|
|
||||
group_builder.specify "should allow executing an action for each element with .each" <|
|
||||
sum = State.run Number 0 <|
|
||||
l.each el->
|
||||
s = State.get Number
|
||||
@ -213,53 +213,53 @@ spec = Test.group "List" <|
|
||||
State.get Number
|
||||
sum.should_equal 6
|
||||
|
||||
Test.specify "should allow reversing with .reverse" <|
|
||||
group_builder.specify "should allow reversing with .reverse" <|
|
||||
l.reverse.first.should_equal 3
|
||||
|
||||
Test.specify "should allow dropping elements from the left with `.drop`" <|
|
||||
group_builder.specify "should allow dropping elements from the left with `.drop`" <|
|
||||
l.drop_start 1 . should_equal (List.Cons 2 (List.Cons 3 List.Nil))
|
||||
empty.drop_start 1 . should_equal List.Nil
|
||||
|
||||
Test.specify "should allow taking elements from the left with `.take_start`" <|
|
||||
group_builder.specify "should allow taking elements from the left with `.take_start`" <|
|
||||
l.take_start 2 . should_equal (List.Cons 1 (List.Cons 2 List.Nil))
|
||||
empty.take_start 2 . should_equal List.Nil
|
||||
|
||||
Test.specify "should allow getting the tail of the list with `.tail`" <|
|
||||
group_builder.specify "should allow getting the tail of the list with `.tail`" <|
|
||||
l.tail . should_equal (List.Cons 2 (List.Cons 3 List.Nil))
|
||||
empty.tail.should_fail_with Index_Out_Of_Bounds
|
||||
|
||||
Test.specify "single element list.init yields Nil" <|
|
||||
group_builder.specify "single element list.init yields Nil" <|
|
||||
(List.Cons 1 List.Nil).init . should_equal List.Nil
|
||||
|
||||
Test.specify "two element list.init yields one element" <|
|
||||
group_builder.specify "two element list.init yields one element" <|
|
||||
(List.Cons 1 (List.Cons 2 List.Nil)).init . should_equal (List.Cons 1 List.Nil)
|
||||
|
||||
Test.specify "should allow getting the init of the list with `.init`" <|
|
||||
group_builder.specify "should allow getting the init of the list with `.init`" <|
|
||||
l.init . should_equal (List.Cons 1 (List.Cons 2 List.Nil))
|
||||
empty.init.should_fail_with Index_Out_Of_Bounds
|
||||
|
||||
Test.specify "should allow getting the last element of the list with `.last`" <|
|
||||
group_builder.specify "should allow getting the last element of the list with `.last`" <|
|
||||
l.last . should_equal 3
|
||||
empty.last.should_fail_with Index_Out_Of_Bounds
|
||||
|
||||
Test.specify "should allow getting the head of the list with `.first`" <|
|
||||
group_builder.specify "should allow getting the head of the list with `.first`" <|
|
||||
l.first . should_equal 1
|
||||
empty.first.should_fail_with Index_Out_Of_Bounds
|
||||
empty.first.catch.should_equal (Index_Out_Of_Bounds.Error 0 0)
|
||||
|
||||
Test.specify "should allow getting the second item of the list with `.second`" <|
|
||||
group_builder.specify "should allow getting the second item of the list with `.second`" <|
|
||||
l.second . should_equal 2
|
||||
empty.second.should_fail_with Index_Out_Of_Bounds
|
||||
empty.second.catch.should_equal (Index_Out_Of_Bounds.Error 1 0)
|
||||
(List.Cons 1 List.Nil).second.should_fail_with Index_Out_Of_Bounds
|
||||
(List.Cons 1 List.Nil).second.catch.should_equal (Index_Out_Of_Bounds.Error 1 1)
|
||||
|
||||
Test.specify "should allow getting the last element of the list with `.last`" <|
|
||||
group_builder.specify "should allow getting the last element of the list with `.last`" <|
|
||||
l.last . should_equal 3
|
||||
empty.last.should_fail_with Index_Out_Of_Bounds
|
||||
empty.last.catch.should_equal (Index_Out_Of_Bounds.Error -1 0)
|
||||
|
||||
Test.specify "should allow getting the items of the list with `.at`" <|
|
||||
group_builder.specify "should allow getting the items of the list with `.at`" <|
|
||||
l.at 0 . should_equal 1
|
||||
l.at 1 . should_equal 2
|
||||
l.at 2 . should_equal 3
|
||||
@ -270,7 +270,7 @@ spec = Test.group "List" <|
|
||||
l.at 3 . should_fail_with Index_Out_Of_Bounds
|
||||
l.at -4 . should_fail_with Index_Out_Of_Bounds
|
||||
|
||||
Test.specify "should allow getting the items of the list with `.get`" <|
|
||||
group_builder.specify "should allow getting the items of the list with `.get`" <|
|
||||
l.get 0 . should_equal 1
|
||||
l.get 1 . should_equal 2
|
||||
l.get 2 . should_equal 3
|
||||
@ -282,4 +282,8 @@ spec = Test.group "List" <|
|
||||
l.get 3 . should_equal Nothing
|
||||
l.get -4 . should_equal Nothing
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -1,8 +1,8 @@
|
||||
from Standard.Base import all
|
||||
|
||||
from Standard.Base.Metadata.Choice import Option
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
|
||||
polyglot java import java.util.Locale as JavaLocale
|
||||
|
||||
@ -15,40 +15,40 @@ with_locale locale ~test =
|
||||
JavaLocale.setDefault default_locale
|
||||
result
|
||||
|
||||
spec =
|
||||
Test.group "Locale" <|
|
||||
add_specs suite_builder =
|
||||
suite_builder.group "Locale" group_builder->
|
||||
en_gb = Locale.new "en" "GB"
|
||||
Test.specify "allow constructing a locale with optional parts" <|
|
||||
group_builder.specify "allow constructing a locale with optional parts" <|
|
||||
loc = Locale.new "en"
|
||||
loc.language . should_equal "en"
|
||||
loc.country . should_equal Nothing
|
||||
loc.variant . should_equal Nothing
|
||||
Test.specify "should have a well-defined text conversion" <|
|
||||
group_builder.specify "should have a well-defined text conversion" <|
|
||||
en_gb.to_text . should_equal "en-GB"
|
||||
Test.specify "should allow obtaining it from a language tag" <|
|
||||
group_builder.specify "should allow obtaining it from a language tag" <|
|
||||
locale = Locale.from_language_tag "en-US"
|
||||
locale.to_text . should_equal "en-US"
|
||||
Test.specify "should allow getting the language code" <|
|
||||
group_builder.specify "should allow getting the language code" <|
|
||||
locale = Locale.from_language_tag "en-US"
|
||||
locale.language . should_equal "en"
|
||||
Test.specify "should allow getting the country code" <|
|
||||
group_builder.specify "should allow getting the country code" <|
|
||||
locale = Locale.from_language_tag "en-US"
|
||||
locale.country . should_equal "US"
|
||||
Test.specify "should allow getting the variant code" <|
|
||||
group_builder.specify "should allow getting the variant code" <|
|
||||
locale = Locale.from_language_tag "en-US-x-lvariant-UTF-8"
|
||||
locale.variant . should_equal "UTF_8"
|
||||
Test.specify "should allow getting the display language" <|
|
||||
group_builder.specify "should allow getting the display language" <|
|
||||
with_locale Locale.us <|
|
||||
display = Locale.from_language_tag "en-GB" . display_language
|
||||
display . should_equal "English"
|
||||
Test.specify "should allow getting the display country" <|
|
||||
group_builder.specify "should allow getting the display country" <|
|
||||
with_locale Locale.us <|
|
||||
display = Locale.from_language_tag "en-GB" . display_country
|
||||
display . should_equal "United Kingdom"
|
||||
Test.specify "should allow getting the display variant" <|
|
||||
group_builder.specify "should allow getting the display variant" <|
|
||||
display = Locale.from_language_tag "en-GB-x-lvariant-UTF8" . display_variant
|
||||
display . should_equal "UTF8"
|
||||
Test.specify "should have some common locale constants" <|
|
||||
group_builder.specify "should have some common locale constants" <|
|
||||
Locale.default.to_text . should_equal "und"
|
||||
Locale.bangladesh.to_text . should_equal "bn-BD"
|
||||
Locale.brazil.to_text . should_equal "pt-BR"
|
||||
@ -70,14 +70,18 @@ spec =
|
||||
Locale.south_korea.to_text . should_equal "ko-KR"
|
||||
Locale.uk.to_text . should_equal "en-GB"
|
||||
Locale.us.to_text . should_equal "en-US"
|
||||
Test.specify "should convert to Json" <|
|
||||
group_builder.specify "should convert to Json" <|
|
||||
en_gb.to_json.should_equal '{"type":"Locale","constructor":"new","language":"en","country":"GB","variant":null}'
|
||||
Test.specify "should allow equality comparisons" <|
|
||||
group_builder.specify "should allow equality comparisons" <|
|
||||
Locale.uk . should_equal Locale.uk
|
||||
Locale.uk . should_not_equal Locale.us
|
||||
|
||||
Test.specify "Should correctly auto-discover static Locales" <|
|
||||
group_builder.specify "Should correctly auto-discover static Locales" <|
|
||||
locale_names = Locale.widget_options.map x-> case x of Option locale_name _ _ _ -> locale_name
|
||||
locale_names . should_equal ['Bangladesh', 'Brazil', 'Canada English', 'Canada French', 'China', 'France', 'Germany', 'India English', 'India Hindi', 'Indonesia', 'Italy', 'Japan', 'Mexico', 'Nigeria', 'Pakistan English', 'Pakistan Urdu', 'Poland', 'Russia', 'South Korea', 'UK', 'US']
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -2,8 +2,8 @@ from Standard.Base import all
|
||||
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
|
||||
import Standard.Base.Errors.No_Such_Key.No_Such_Key
|
||||
|
||||
from Standard.Test import Test, Test_Suite, Problems
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
|
||||
polyglot java import java.nio.file.Path as JavaPath
|
||||
polyglot java import java.util.Map as JavaMap
|
||||
@ -91,45 +91,45 @@ type Parent
|
||||
type GrandParent
|
||||
Value parent
|
||||
|
||||
spec =
|
||||
Test.group "Enso maps" <|
|
||||
Test.specify "should allow checking for emptiness" <|
|
||||
add_specs suite_builder =
|
||||
suite_builder.group "Enso maps" group_builder->
|
||||
group_builder.specify "should allow checking for emptiness" <|
|
||||
empty_map = Map.empty
|
||||
non_empty = Map.empty . insert "foo" 1234
|
||||
empty_map.is_empty . should_be_true
|
||||
non_empty.is_empty . should_be_false
|
||||
|
||||
Test.specify "should get the default comparator for polyglot maps" <|
|
||||
group_builder.specify "should get the default comparator for polyglot maps" <|
|
||||
Comparable.from (Map.empty) . should_equal Default_Comparator
|
||||
Comparable.from (js_empty_dict) . should_equal Default_Comparator
|
||||
Comparable.from (JavaMap.of "A" 1 "B" 2) . should_equal Default_Comparator
|
||||
|
||||
Test.specify "should compare two hash maps" <|
|
||||
group_builder.specify "should compare two hash maps" <|
|
||||
(Map.singleton "a" 1).should_equal (Map.singleton "a" 1)
|
||||
(Map.singleton "b" 2).should_not_equal (Map.singleton "a" 1)
|
||||
Map.empty.should_equal Map.empty
|
||||
Map.empty.should_not_equal (Map.singleton "a" 1)
|
||||
(Map.empty.insert "a" 1 . insert "b" 2).should_equal (Map.empty.insert "b" 2 . insert "a" 1)
|
||||
|
||||
Test.specify "should allow checking for non emptiness" <|
|
||||
group_builder.specify "should allow checking for non emptiness" <|
|
||||
empty_map = Map.empty
|
||||
non_empty = Map.empty . insert "foo" 1234
|
||||
empty_map.not_empty . should_be_false
|
||||
non_empty.not_empty . should_be_true
|
||||
|
||||
Test.specify "should allow checking its size" <|
|
||||
group_builder.specify "should allow checking its size" <|
|
||||
empty_map = Map.empty
|
||||
non_empty = Map.singleton "a" "b" . insert "x" "y"
|
||||
empty_map.size . should_equal 0
|
||||
non_empty.size . should_equal 2
|
||||
|
||||
Test.specify "should support arbitrary atoms as keys" <|
|
||||
group_builder.specify "should support arbitrary atoms as keys" <|
|
||||
map = Map.singleton (Pair.new "one" "two") 42
|
||||
(map.get (Pair.new "one" "two")).should_equal 42
|
||||
(map.get (Pair.new "A" "B")).should_equal Nothing
|
||||
(map.get (Pair.new "two" "two")).should_equal Nothing
|
||||
|
||||
Test.specify "should use proper hash code for keys" <|
|
||||
group_builder.specify "should use proper hash code for keys" <|
|
||||
single_key_map key = Map.singleton key 42
|
||||
grand_parent_1 = GrandParent.Value (Parent.Value (Child.Value 2))
|
||||
grand_parent_2 = GrandParent.Value (Parent.Value (Child.Value 2.0))
|
||||
@ -144,12 +144,12 @@ spec =
|
||||
(single_key_map (Json.parse '{"a": 1}') . at (Json.parse '{"a": 1}')) . should_equal 42
|
||||
(single_key_map (Child.Value 1) . at (Child.Value 1.0)) . should_equal 42
|
||||
|
||||
Test.specify "should support vectors as keys" <|
|
||||
group_builder.specify "should support vectors as keys" <|
|
||||
map = Map.singleton [1, "a", 2] "Value"
|
||||
map.size.should_equal 1
|
||||
map.get [1, "a", 2] . should_equal "Value"
|
||||
|
||||
Test.specify "should support NaN as keys" <|
|
||||
group_builder.specify "should support NaN as keys" <|
|
||||
Map.empty.insert Number.nan 1 . contains_key Number.nan . should_be_true
|
||||
Map.empty.insert Number.nan 1 . values . should_equal [1]
|
||||
Map.empty.insert Number.nan 1 . insert Number.nan 2 . contains_key Number.nan . should_be_true
|
||||
@ -161,7 +161,7 @@ spec =
|
||||
Map.empty.insert Number.nan 1 . insert Number.nan Number.nan . at Number.nan . to_text . should_equal "NaN"
|
||||
Map.empty.insert Number.nan 1 . insert Number.nan Number.nan . remove Number.nan . size . should_equal 0
|
||||
|
||||
Test.specify "should support dates as keys" <|
|
||||
group_builder.specify "should support dates as keys" <|
|
||||
map = Map.empty.insert (Date.new 1993) 1 . insert (Date.new 1993 2 5) 2 . insert (Date_Time.new 1993 2 5 13 45) 3
|
||||
map.size.should_equal 3
|
||||
map.get (Date.new 1993 6 7) . should_equal Nothing
|
||||
@ -171,7 +171,7 @@ spec =
|
||||
map.get (Date_Time.new 1993 2 5) . should_equal Nothing
|
||||
map.get (Date_Time.new 1993 2 5 13 45) . should_equal 3
|
||||
|
||||
Test.specify "should support another hash map as key" <|
|
||||
group_builder.specify "should support another hash map as key" <|
|
||||
key_map = Map.singleton (Pair.new "one" "two") 42
|
||||
map = Map.singleton key_map 23
|
||||
map.size.should_equal 1
|
||||
@ -179,13 +179,13 @@ spec =
|
||||
(map.get key_map).should_equal 23
|
||||
(map.get map).should_equal Nothing
|
||||
|
||||
Test.specify "should support another hash map with NaN keys as key" <|
|
||||
group_builder.specify "should support another hash map with NaN keys as key" <|
|
||||
Map.singleton (Map.singleton Number.nan 1) 42 . size . should_equal 1
|
||||
Map.singleton (Map.singleton Number.nan 1) 42 . keys . at 0 . keys . to_text . should_equal "[NaN]"
|
||||
Map.singleton (Map.singleton Number.nan 1) 42 . keys . at 0 . get Number.nan . should_equal 1
|
||||
Map.singleton (Map.singleton Number.nan 1) 42 . at (Map.singleton Number.nan 1) . should_equal 42
|
||||
|
||||
Test.specify "should support atoms with custom comparators that violate reflexivity as keys" <|
|
||||
group_builder.specify "should support atoms with custom comparators that violate reflexivity as keys" <|
|
||||
k = My_Nan.Value "foo"
|
||||
k2 = My_Nan.Value "foo"
|
||||
(k==k).should_be_true
|
||||
@ -206,7 +206,7 @@ spec =
|
||||
m3.size . should_equal 2
|
||||
m3.get k . should_equal 30
|
||||
|
||||
Test.specify "should support atom with custom comparators with complicated hash method" <|
|
||||
group_builder.specify "should support atom with custom comparators with complicated hash method" <|
|
||||
keys = 0.up_to 500 . map ix->
|
||||
value = ["A", "B", "C", "D", "E"].at (ix % 5)
|
||||
hash_code = Comparable.from value . hash value
|
||||
@ -220,74 +220,74 @@ spec =
|
||||
acc_map.insert item True
|
||||
distinct_key_values.size . should_equal 5
|
||||
|
||||
Test.specify "should handle keys with standard equality semantics" <|
|
||||
group_builder.specify "should handle keys with standard equality semantics" <|
|
||||
map = Map.singleton 2 "Hello"
|
||||
(map.get 2).should_equal "Hello"
|
||||
(map.get 2.0).should_equal "Hello"
|
||||
(Map.singleton 2 "Hello").should_equal (Map.singleton 2.0 "Hello")
|
||||
|
||||
Test.specify "should handle Nothing as keys" <|
|
||||
group_builder.specify "should handle Nothing as keys" <|
|
||||
Map.singleton Nothing 3 . get Nothing . should_equal 3
|
||||
Map.singleton Nothing 1 . insert Nothing 2 . get Nothing . should_equal 2
|
||||
Map.singleton Nothing 1 . should_equal (Map.singleton Nothing 1)
|
||||
Map.singleton Nothing 1 . insert Nothing 2 . at Nothing . should_equal 2
|
||||
Map.singleton js_null 1 . at Nothing . should_equal 1
|
||||
|
||||
Test.specify "should handle incomparable values as keys" <|
|
||||
group_builder.specify "should handle incomparable values as keys" <|
|
||||
Map.empty.insert Number.nan 1 . insert Number.nan 2 . get Number.nan . should_equal 2
|
||||
|
||||
Test.specify "should handle Nothing as values" <|
|
||||
group_builder.specify "should handle Nothing as values" <|
|
||||
Map.singleton 1 Nothing . at 1 . should_equal Nothing
|
||||
Map.singleton Nothing Nothing . at Nothing . should_equal Nothing
|
||||
|
||||
Test.specify "should support rewriting values with same keys" <|
|
||||
group_builder.specify "should support rewriting values with same keys" <|
|
||||
map = Map.empty.insert "a" 1 . insert "a" 42
|
||||
map.size.should_equal 1
|
||||
map.get "a" . should_equal 42
|
||||
|
||||
Test.specify "should allow storing atoms as values" <|
|
||||
group_builder.specify "should allow storing atoms as values" <|
|
||||
json = Json.parse '{"a": 1}'
|
||||
pair = Pair.new "first" "second"
|
||||
map = Map.empty.insert 0 json . insert 1 pair
|
||||
map.get 0 . should_equal json
|
||||
map.get 1 . should_equal pair
|
||||
|
||||
Test.specify "should not drop warnings from keys" <|
|
||||
group_builder.specify "should not drop warnings from keys" <|
|
||||
key = Warning.attach "my_warn" "my_key"
|
||||
map = Map.singleton key 42
|
||||
(Warning.get_all (map.keys.at 0)).length . should_equal 1
|
||||
|
||||
Test.specify "should not drop warnings from values" <|
|
||||
group_builder.specify "should not drop warnings from values" <|
|
||||
val = Warning.attach "my_warn" "my_val"
|
||||
map = Map.singleton 42 val
|
||||
(Warning.get_all (map.values.at 0)).length . should_equal 1
|
||||
|
||||
Test.specify "should convert the whole map to a vector" <|
|
||||
group_builder.specify "should convert the whole map to a vector" <|
|
||||
m = Map.empty . insert 0 0 . insert 3 -5 . insert 1 2
|
||||
m.to_vector.sort on=_.first . should_equal [[0, 0], [1, 2], [3, -5]]
|
||||
|
||||
Test.specify "should allow building the map from two vectors" <|
|
||||
group_builder.specify "should allow building the map from two vectors" <|
|
||||
expected = Map.empty . insert 0 0 . insert 3 -5 . insert 1 2
|
||||
Map.from_keys_and_values [0, 3, 1] [0, -5, 2] . should_equal expected
|
||||
|
||||
Test.specify "should allow building the map from vector like things" <|
|
||||
group_builder.specify "should allow building the map from vector like things" <|
|
||||
expected = Map.empty . insert 0 0 . insert 1 -5 . insert 2 2
|
||||
Map.from_keys_and_values (0.up_to 3) [0, -5, 2] . should_equal expected
|
||||
|
||||
Test.specify "should not allow building with duplicate keys unless explicitly allowed" <|
|
||||
group_builder.specify "should not allow building with duplicate keys unless explicitly allowed" <|
|
||||
expected = Map.empty . insert 0 0 . insert 3 -5 . insert 1 2
|
||||
Map.from_keys_and_values [0, 3, 1, 0] [3, -5, 2, 0] . should_fail_with Illegal_Argument
|
||||
Map.from_keys_and_values [0, 3, 1, 0] [3, -5, 2, 0] error_on_duplicates=False . should_equal expected
|
||||
|
||||
Test.specify "should not allow different length vectors when building" <|
|
||||
group_builder.specify "should not allow different length vectors when building" <|
|
||||
Map.from_keys_and_values [0, 3, 1] [3, -5, 2, 0] . should_fail_with Illegal_Argument
|
||||
|
||||
Test.specify "should allow building the map from a vector" <|
|
||||
group_builder.specify "should allow building the map from a vector" <|
|
||||
expected = Map.empty . insert 0 0 . insert 3 -5 . insert 1 2
|
||||
vec = [[0, 0], [3, -5], [1, 2]]
|
||||
Map.from_vector vec . should_equal expected
|
||||
|
||||
Test.specify "should not allow duplicates when building the map from a vector, unless explicitly allowed" <|
|
||||
group_builder.specify "should not allow duplicates when building the map from a vector, unless explicitly allowed" <|
|
||||
vec = [[0, 0], [3, -5], [1, 2], [0, 1]]
|
||||
m1 = Map.from_vector vec
|
||||
m1.should_fail_with Illegal_Argument
|
||||
@ -298,13 +298,13 @@ spec =
|
||||
m2.get 0 . should_equal 1
|
||||
m2.get 3 . should_equal -5
|
||||
|
||||
Test.specify "should define a well-defined text conversion" <|
|
||||
group_builder.specify "should define a well-defined text conversion" <|
|
||||
m = Map.empty . insert 0 0 . insert 3 -5 . insert 1 2
|
||||
m.to_text . should_contain "0=0"
|
||||
m.to_text . should_contain "3=-5"
|
||||
m.to_text . should_contain "1=2"
|
||||
|
||||
Test.specify "should define structural equality" <|
|
||||
group_builder.specify "should define structural equality" <|
|
||||
map_1 = Map.empty . insert "1" 2 . insert "2" "1"
|
||||
map_2 = Map.empty . insert "1" 2 . insert "2" "1"
|
||||
map_3 = Map.empty
|
||||
@ -312,100 +312,100 @@ spec =
|
||||
map_1==map_3 . should_be_false
|
||||
map_2==map_3 . should_be_false
|
||||
|
||||
Test.specify "should allow inserting and looking up values" <|
|
||||
group_builder.specify "should allow inserting and looking up values" <|
|
||||
m = Map.empty . insert "foo" 134 . insert "bar" 654 . insert "baz" "spam"
|
||||
m.at "foo" . should_equal 134
|
||||
m.at "bar" . should_equal 654
|
||||
m.at "baz" . should_equal "spam"
|
||||
(m.at "nope").should_fail_with No_Such_Key
|
||||
|
||||
Test.specify "should support get" <|
|
||||
group_builder.specify "should support get" <|
|
||||
m = Map.empty . insert 2 3
|
||||
m.get 2 0 . should_equal 3
|
||||
m.get 1 10 . should_equal 10
|
||||
m.get 2 (Panic.throw "missing") . should_equal 3
|
||||
|
||||
Test.specify "should allow getting a vector of the keys" <|
|
||||
group_builder.specify "should allow getting a vector of the keys" <|
|
||||
m = Map.empty . insert 1 2 . insert 2 4
|
||||
m.keys . should_equal [1, 2]
|
||||
|
||||
Test.specify "should allow getting a vector of the values" <|
|
||||
group_builder.specify "should allow getting a vector of the values" <|
|
||||
m = Map.empty . insert 1 2 . insert 2 4
|
||||
m.values . should_equal [2, 4]
|
||||
|
||||
Test.specify "should support contains_key" <|
|
||||
group_builder.specify "should support contains_key" <|
|
||||
m = Map.empty . insert 2 3
|
||||
m.contains_key 2 . should_be_true
|
||||
m.contains_key 1 . should_be_false
|
||||
|
||||
Test.specify "should allow transforming the map" <|
|
||||
group_builder.specify "should allow transforming the map" <|
|
||||
m = Map.empty . insert 1 2 . insert 2 4
|
||||
expected = Map.empty . insert "1" 4 . insert "2" 8
|
||||
m.transform (k -> v -> [k.to_text, v*2]) . should_equal expected
|
||||
|
||||
Test.specify "should disallow duplicate keys when transforming the map" <|
|
||||
group_builder.specify "should disallow duplicate keys when transforming the map" <|
|
||||
m = Map.from_vector [[1, 2], [11, 3]]
|
||||
m2 = m.transform (k -> v -> [k % 10, v*2])
|
||||
m2.should_fail_with Illegal_Argument
|
||||
m2.catch.message . should_equal "`Map.transform` encountered duplicate key: 1"
|
||||
|
||||
Test.specify "should allow mapping over values" <|
|
||||
group_builder.specify "should allow mapping over values" <|
|
||||
m = Map.empty . insert 1 2 . insert 2 4
|
||||
expected = Map.empty . insert 1 4 . insert 2 8
|
||||
m.map (v -> v*2) . should_equal expected
|
||||
|
||||
Test.specify "should allow mapping over keys" <|
|
||||
group_builder.specify "should allow mapping over keys" <|
|
||||
m = Map.empty . insert 1 2 . insert 2 4
|
||||
expected = Map.empty . insert 2 2 . insert 4 4
|
||||
m.map_keys (k -> k*2) . should_equal expected
|
||||
|
||||
Test.specify "should allow mapping with keys" <|
|
||||
group_builder.specify "should allow mapping with keys" <|
|
||||
m = Map.empty . insert 1 2 . insert 2 4
|
||||
expected = Map.empty . insert 1 3 . insert 2 6
|
||||
m.map_with_key (k -> v -> k + v) . should_equal expected
|
||||
|
||||
Test.specify "should allow iterating over each value" <|
|
||||
group_builder.specify "should allow iterating over each value" <|
|
||||
m = Map.empty . insert 1 2 . insert 2 4
|
||||
vec = Vector.new_builder
|
||||
expected_vec = [2, 4]
|
||||
m.each (v -> vec.append v)
|
||||
vec.to_vector . should_equal expected_vec
|
||||
|
||||
Test.specify "should allow iterating over each key-value pair" <|
|
||||
group_builder.specify "should allow iterating over each key-value pair" <|
|
||||
m = Map.empty . insert 1 2 . insert 2 4
|
||||
vec = Vector.new_builder
|
||||
expected_vec = [3, 6]
|
||||
m.each_with_key (k -> v -> vec.append (k+v))
|
||||
vec.to_vector . should_equal expected_vec
|
||||
|
||||
Test.specify "should allow folding over the values" <|
|
||||
group_builder.specify "should allow folding over the values" <|
|
||||
m = Map.empty . insert 1 2 . insert 2 4
|
||||
m.fold 0 (+) . should_equal 6
|
||||
|
||||
Test.specify "should allow folding over the key-value pairs" <|
|
||||
group_builder.specify "should allow folding over the key-value pairs" <|
|
||||
m = Map.empty . insert 1 2 . insert 2 4
|
||||
m.fold_with_key 0 (l -> k -> v -> l + k + v) . should_equal 9
|
||||
|
||||
Test.specify "should be able to add a Nothing key to the map of Text" <|
|
||||
group_builder.specify "should be able to add a Nothing key to the map of Text" <|
|
||||
m = Map.empty . insert "A" 2 . insert Nothing 1 . insert "B" 3
|
||||
m.at "A" . should_equal 2
|
||||
m.at "B" . should_equal 3
|
||||
m.at Nothing . should_equal 1
|
||||
|
||||
Test.specify "should be able to add a Nothing key to the map of Integer" <|
|
||||
group_builder.specify "should be able to add a Nothing key to the map of Integer" <|
|
||||
m = Map.empty . insert 100 2 . insert Nothing 1 . insert 200 3
|
||||
m.at 100 . should_equal 2
|
||||
m.at 200 . should_equal 3
|
||||
m.at Nothing . should_equal 1
|
||||
|
||||
Test.specify "should be able to remove entries (1)" <|
|
||||
group_builder.specify "should be able to remove entries (1)" <|
|
||||
m1 = Map.empty.insert "A" 1 . insert "B" 2
|
||||
m2 = m1.remove "B"
|
||||
m2.get "A" . should_equal 1
|
||||
m2.remove "A" . should_equal Map.empty
|
||||
Test.expect_panic_with (m1.remove "foo") Any
|
||||
|
||||
Test.specify "should be able to remove entries (2)" <|
|
||||
group_builder.specify "should be able to remove entries (2)" <|
|
||||
m1 = Map.empty.insert "A" 1
|
||||
m2 = m1.insert "B" 2
|
||||
m3 = m1.insert "C" 3
|
||||
@ -414,50 +414,50 @@ spec =
|
||||
m3.remove "A" . to_vector . should_equal [["C", 3]]
|
||||
m3.remove "C" . to_vector . should_equal [["A", 1]]
|
||||
|
||||
Test.specify "should be able to remove entries (3)" <|
|
||||
group_builder.specify "should be able to remove entries (3)" <|
|
||||
m = Map.empty.insert "A" 1 . insert "B" 2 . insert "C" 3
|
||||
m.remove "B" . should_equal (Map.singleton "A" 1 . insert "C" 3)
|
||||
|
||||
Test.group "Polyglot keys and values" <|
|
||||
Test.specify "should support polyglot keys" <|
|
||||
suite_builder.group "Polyglot keys and values" group_builder->
|
||||
group_builder.specify "should support polyglot keys" <|
|
||||
map = Map.singleton (js_str "A") 42
|
||||
map.size.should_equal 1
|
||||
map.get "A" . should_equal 42
|
||||
map.get (js_str "A") . should_equal 42
|
||||
|
||||
Test.specify "should support host objects as keys" <|
|
||||
group_builder.specify "should support host objects as keys" <|
|
||||
# JavaPath has proper implementation of hashCode
|
||||
map = Map.singleton (JavaPath.of "/home/user/file.txt") 42
|
||||
map.get "X" . should_equal Nothing
|
||||
map.get "A" . should_equal Nothing
|
||||
map.get (JavaPath.of "/home/user/file.txt") . should_equal 42
|
||||
|
||||
Test.specify "should support Python objects as keys" pending=pending_python_missing <|
|
||||
group_builder.specify "should support Python objects as keys" pending=pending_python_missing <|
|
||||
py_obj = py_wrapper 42
|
||||
map = Map.singleton py_obj "Value"
|
||||
map.get py_obj . should_equal "Value"
|
||||
|
||||
Test.specify "should support Python objects as values" pending=pending_python_missing <|
|
||||
group_builder.specify "should support Python objects as values" pending=pending_python_missing <|
|
||||
map = Map.singleton "A" (py_wrapper 42)
|
||||
map.get "A" . data . should_equal 42
|
||||
|
||||
Test.specify "should insert entries to a polyglot map" pending=pending_python_missing <|
|
||||
group_builder.specify "should insert entries to a polyglot map" pending=pending_python_missing <|
|
||||
dict = py_dict_from_vec ["A", 1, "B", 2]
|
||||
dict.insert "C" 3 . keys . sort . should_equal ["A", "B", "C"]
|
||||
|
||||
Test.specify "should remove entries from a polyglot map" pending=pending_python_missing <|
|
||||
group_builder.specify "should remove entries from a polyglot map" pending=pending_python_missing <|
|
||||
dict = py_dict_from_vec ["A", 1, "B", 2]
|
||||
dict.remove "B" . to_vector . should_equal [["A", 1]]
|
||||
|
||||
Test.group "non-linear inserts" <|
|
||||
Test.specify "should handle inserts with different keys" <|
|
||||
suite_builder.group "non-linear inserts" group_builder->
|
||||
group_builder.specify "should handle inserts with different keys" <|
|
||||
m1 = Map.singleton "A" 1
|
||||
m2 = m1.insert "B" 2
|
||||
m3 = m1.insert "C" 3
|
||||
m2.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2]]
|
||||
m3.to_vector.sort on=_.first . should_equal [["A", 1], ["C", 3]]
|
||||
|
||||
Test.specify "should handle inserts with same keys (1)" <|
|
||||
group_builder.specify "should handle inserts with same keys (1)" <|
|
||||
m1 = Map.singleton "A" 1
|
||||
m2 = m1.insert "A" 2
|
||||
m3 = m1.insert "A" 3
|
||||
@ -466,7 +466,7 @@ spec =
|
||||
m3.to_vector.sort on=_.first . should_equal [["A", 3]]
|
||||
m4.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 4]]
|
||||
|
||||
Test.specify "should handle inserts with same keys (2)" <|
|
||||
group_builder.specify "should handle inserts with same keys (2)" <|
|
||||
m1 = Map.singleton "foo" 1
|
||||
m2 = m1.insert "baz" 2
|
||||
m3 = m2.insert "foo" 3
|
||||
@ -474,7 +474,7 @@ spec =
|
||||
m2.to_vector.sort on=_.first . should_equal [['baz', 2], ['foo', 1]]
|
||||
m3.to_vector.sort on=_.first . should_equal [['baz', 2], ['foo', 3]]
|
||||
|
||||
Test.specify "should handle inserts with same keys (3)" <|
|
||||
group_builder.specify "should handle inserts with same keys (3)" <|
|
||||
m1 = Map.singleton "A" 1
|
||||
m2 = m1.insert "B" 2
|
||||
m3 = m2.insert "A" 3
|
||||
@ -484,7 +484,7 @@ spec =
|
||||
m3.to_vector.sort on=_.first . should_equal [["A", 3], ["B", 2]]
|
||||
m4.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2], ["C", 4]]
|
||||
|
||||
Test.specify "should handle inserts with same keys (4)" <|
|
||||
group_builder.specify "should handle inserts with same keys (4)" <|
|
||||
m1 = Map.singleton "A" 1
|
||||
m2 = m1.insert "B" 2
|
||||
m3 = m2.insert "C" 3
|
||||
@ -493,7 +493,7 @@ spec =
|
||||
m3.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2], ["C", 3]]
|
||||
m4.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2], ["D", 4]]
|
||||
|
||||
Test.specify "should handle inserts with same keys (5)" <|
|
||||
group_builder.specify "should handle inserts with same keys (5)" <|
|
||||
m1 = Map.singleton "A" 1
|
||||
m2 = m1.insert "B" 2
|
||||
m3 = m2.insert "A" 3
|
||||
@ -502,7 +502,7 @@ spec =
|
||||
m3.to_vector.sort on=_.first . should_equal [["A", 3], ["B", 2]]
|
||||
m4.to_vector.sort on=_.first . should_equal [["A", 4], ["B", 2]]
|
||||
|
||||
Test.specify "should handle inserts with same keys (6)" <|
|
||||
group_builder.specify "should handle inserts with same keys (6)" <|
|
||||
m1 = Map.singleton "A" 1
|
||||
m2 = m1.insert "B" 2
|
||||
m3 = m2.insert "C" 3
|
||||
@ -511,7 +511,7 @@ spec =
|
||||
m3.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2], ["C", 3]]
|
||||
m4.to_vector.sort on=_.first . should_equal [["A", 4], ["B", 2]]
|
||||
|
||||
Test.specify "should handle inserts with same keys (7)" <|
|
||||
group_builder.specify "should handle inserts with same keys (7)" <|
|
||||
m1 = Map.singleton "A" 1
|
||||
m2 = m1.insert "B" 2
|
||||
m3 = m2.insert "C" 3
|
||||
@ -522,7 +522,7 @@ spec =
|
||||
m4.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2], ["C", 3], ["D", 4]]
|
||||
m5.to_vector.sort on=_.first . should_equal [["A", 5], ["B", 2]]
|
||||
|
||||
Test.specify "should handle inserts with same keys (8)" <|
|
||||
group_builder.specify "should handle inserts with same keys (8)" <|
|
||||
m1 = Map.singleton "A" 1
|
||||
m2 = m1.insert "B" 2
|
||||
m3 = m2.insert "C" 3
|
||||
@ -533,7 +533,7 @@ spec =
|
||||
m4.to_vector.sort on=_.first . should_equal [["A", 4], ["B", 2], ["C", 3]]
|
||||
m5.to_vector.sort on=_.first . should_equal [["A", 5], ["B", 2]]
|
||||
|
||||
Test.specify "should handle inserts with same keys (9)" <|
|
||||
group_builder.specify "should handle inserts with same keys (9)" <|
|
||||
m1 = Map.singleton "A" 1
|
||||
m2 = m1.insert "B" 2
|
||||
m3 = m2.insert "A" 3
|
||||
@ -544,7 +544,7 @@ spec =
|
||||
m4.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 4]]
|
||||
m5.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2], ["C", 5]]
|
||||
|
||||
Test.specify "should handle inserts with same keys (10)" <|
|
||||
group_builder.specify "should handle inserts with same keys (10)" <|
|
||||
m1 = Map.singleton "A" 1
|
||||
m2 = m1.insert "B" 2
|
||||
m3 = m2.insert "C" 3
|
||||
@ -555,26 +555,26 @@ spec =
|
||||
m4.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2], ["D", 4]]
|
||||
m5.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2], ["E", 5]]
|
||||
|
||||
Test.group "Polyglot hash maps" <|
|
||||
Test.specify "should pass maps as immutable maps to other langs" pending=pending_python_missing <|
|
||||
suite_builder.group "Polyglot hash maps" group_builder->
|
||||
group_builder.specify "should pass maps as immutable maps to other langs" pending=pending_python_missing <|
|
||||
map = Map.singleton "A" 1
|
||||
# Python's KeyError should be raised
|
||||
Test.expect_panic_with (py_update_dict map "A" 2) Any
|
||||
map.get "A" . should_equal 1
|
||||
|
||||
Test.specify "should treat JavaScript maps as Enso maps" <|
|
||||
group_builder.specify "should treat JavaScript maps as Enso maps" <|
|
||||
js_dict = js_dict_from_vec ["A", 1, "B", 2]
|
||||
map = js_dict.insert "C" 3
|
||||
js_dict.to_vector.should_equal [["A", 1], ["B", 2]]
|
||||
map.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2], ["C", 3]]
|
||||
|
||||
Test.specify "should treat Java Map as Enso map" <|
|
||||
group_builder.specify "should treat Java Map as Enso map" <|
|
||||
sort_by_keys vec = vec.sort by=x-> y-> Ordering.compare x.first y.first
|
||||
jmap = JavaMap.of "A" 1 "B" 2
|
||||
(sort_by_keys jmap.to_vector) . should_equal [["A", 1], ["B", 2]]
|
||||
(sort_by_keys (jmap.insert "C" 3 . to_vector)) . should_equal [["A", 1], ["B", 2], ["C", 3]]
|
||||
|
||||
Test.specify "should treat Python dicts as Enso maps" pending=pending_python_missing <|
|
||||
group_builder.specify "should treat Python dicts as Enso maps" pending=pending_python_missing <|
|
||||
py_dict = py_dict_from_vec ["A", 1, "B", 2]
|
||||
map = py_dict.insert "C" 3
|
||||
py_dict.not_empty . should_be_true
|
||||
@ -583,18 +583,22 @@ spec =
|
||||
py_empty_dict.is_empty.should_be_true
|
||||
py_empty_dict.insert "A" 1 . insert "A" 2 . get "A" . should_equal 2
|
||||
|
||||
Test.specify "should pass maps with null keys to Python and back" pending=pending_python_missing <|
|
||||
group_builder.specify "should pass maps with null keys to Python and back" pending=pending_python_missing <|
|
||||
# Python supports None as keys, Enso support Nothing as keys
|
||||
py_dict = py_dict_from_map (Map.singleton Nothing 42)
|
||||
py_dict.get Nothing . should_equal 42
|
||||
py_dict.insert "A" 23 . get Nothing . should_equal 42
|
||||
py_dict.insert Nothing 23 . get Nothing . should_equal 23
|
||||
|
||||
Test.specify "should treat Enso maps as Python dicts when passed to Python" pending=pending_python_missing <|
|
||||
group_builder.specify "should treat Enso maps as Python dicts when passed to Python" pending=pending_python_missing <|
|
||||
map1 = Map.empty.insert "A" 1 . insert "B" 2
|
||||
py_vec_from_map map1 . should_contain_the_same_elements_as [["A", 1], ["B", 2]]
|
||||
map2 = Map.empty.insert "A" 1 . insert Nothing 2
|
||||
py_vec_from_map map2 . should_contain_the_same_elements_as [["A", 1], [Nothing, 2]]
|
||||
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -1,21 +1,25 @@
|
||||
from Standard.Base import all
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
spec = Test.group "Maybe" <|
|
||||
Test.specify "should have a None variant" <|
|
||||
|
||||
add_specs suite_builder = suite_builder.group "Maybe" group_builder->
|
||||
group_builder.specify "should have a None variant" <|
|
||||
Maybe.None . should_equal Maybe.None
|
||||
Test.specify "should have a Some variant" <|
|
||||
group_builder.specify "should have a Some variant" <|
|
||||
(Maybe.Some 2).value . should_equal 2
|
||||
Test.specify "should provide the `maybe` function" <|
|
||||
group_builder.specify "should provide the `maybe` function" <|
|
||||
Maybe.None.maybe 2 x->x . should_equal 2
|
||||
(Maybe.Some 7).maybe 2 (*2) . should_equal 14
|
||||
Test.specify "should provide `is_some`" <|
|
||||
group_builder.specify "should provide `is_some`" <|
|
||||
Maybe.None.is_some . should_be_false
|
||||
Maybe.Some 2 . is_some . should_be_true
|
||||
Test.specify "should provide `is_none`" <|
|
||||
group_builder.specify "should provide `is_none`" <|
|
||||
Maybe.None.is_none . should_be_true
|
||||
Maybe.Some 2 . is_none . should_be_false
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -704,6 +704,7 @@ main =
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
||||
foreign js to_js_bigint n = """
|
||||
return BigInt(n)
|
||||
|
||||
|
@ -5,8 +5,8 @@ polyglot java import org.enso.base.ObjectComparator
|
||||
|
||||
polyglot java import org.enso.base.CompareException
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
|
||||
# === Test Resources ===
|
||||
|
||||
@ -36,22 +36,22 @@ Comparable.from (_:No_Ord) = No_Ord_Comparator
|
||||
|
||||
# Tests
|
||||
|
||||
spec = Test.group "Object Comparator" <|
|
||||
add_specs suite_builder = suite_builder.group "Object Comparator" group_builder->
|
||||
handle_comp_exc = Panic.catch CompareException handler=(exc -> Error.throw (Incomparable_Values.Error exc.payload.getLeftOperand exc.payload.getRightOperand))
|
||||
default_comparator a b = handle_comp_exc <| ObjectComparator.DEFAULT.compare a b
|
||||
case_insensitive a b = handle_comp_exc <| ObjectComparator.new False Locale.default.java_locale . compare a b
|
||||
|
||||
Test.specify "can compare numbers" <|
|
||||
group_builder.specify "can compare numbers" <|
|
||||
((default_comparator 1 2) < 0) . should_equal True
|
||||
((default_comparator 1 1.2) < 0) . should_equal True
|
||||
((default_comparator 1 1) == 0) . should_equal True
|
||||
|
||||
Test.specify "can compare booleans" <|
|
||||
group_builder.specify "can compare booleans" <|
|
||||
((default_comparator True False) > 0) . should_equal True
|
||||
((default_comparator True True) == 0) . should_equal True
|
||||
((default_comparator False False) == 0) . should_equal True
|
||||
|
||||
Test.specify "can compare Nothing and it ends up as lowest value" <|
|
||||
group_builder.specify "can compare Nothing and it ends up as lowest value" <|
|
||||
((default_comparator 1 Nothing) > 0) . should_equal True
|
||||
((default_comparator Nothing 1.235) < 0) . should_equal True
|
||||
((default_comparator True Nothing) > 0) . should_equal True
|
||||
@ -60,22 +60,26 @@ spec = Test.group "Object Comparator" <|
|
||||
((default_comparator Nothing "ZSA") < 0) . should_equal True
|
||||
((default_comparator Nothing Nothing) == 0) . should_equal True
|
||||
|
||||
Test.specify "can compare Text with Enso standard defaults" <|
|
||||
group_builder.specify "can compare Text with Enso standard defaults" <|
|
||||
((default_comparator "A" "a") < 0) . should_equal True
|
||||
((default_comparator "ABBA" "ABBA") == 0) . should_equal True
|
||||
((default_comparator '\u00E9' '\u0065\u{301}') == 0) . should_equal True
|
||||
|
||||
Test.specify "can compare Text with case-insensitive comparisons" <|
|
||||
group_builder.specify "can compare Text with case-insensitive comparisons" <|
|
||||
((case_insensitive "A" "a") == 0) . should_equal True
|
||||
((case_insensitive "ABBA" "abba") == 0) . should_equal True
|
||||
((case_insensitive '\u00E9' '\u0065\u{301}') == 0) . should_equal True
|
||||
|
||||
Test.specify "can compare custom types" <|
|
||||
group_builder.specify "can compare custom types" <|
|
||||
((default_comparator (Ord.Value 1) (Ord.Value 0)) < 0) . should_equal True
|
||||
((default_comparator (Ord.Value 1) (Ord.Value 1)) == 0) . should_equal True
|
||||
|
||||
Test.specify "should fail gracefully for incomparable items" <|
|
||||
group_builder.specify "should fail gracefully for incomparable items" <|
|
||||
(default_comparator 1 True) . should_fail_with Incomparable_Values
|
||||
(default_comparator (No_Ord.Value 1) (No_Ord.Value 2)).should_fail_with Incomparable_Values
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -1,18 +1,18 @@
|
||||
from Standard.Base import all
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
spec = Test.group "Natural Order" <|
|
||||
|
||||
add_specs suite_builder = suite_builder.group "Natural Order" group_builder->
|
||||
case_insensitive_compare a b = Natural_Order.compare a b Case_Sensitivity.Insensitive
|
||||
|
||||
Test.specify "should behave as shown in examples" <|
|
||||
group_builder.specify "should behave as shown in examples" <|
|
||||
Natural_Order.compare "a2" "a100" . should_equal Ordering.Less
|
||||
["a2", "a1", "a100", "a001", "a0001"].sort by=Natural_Order.compare . should_equal ["a0001", "a001", "a1", "a2", "a100"]
|
||||
["A2", "a1", "A100", "A001", "a0001"].sort by=Natural_Order.compare . should_equal ["A001", "A2", "A100", "a0001", "a1"]
|
||||
["A2", "a1", "A100", "A001", "a0001"].sort by=case_insensitive_compare . should_equal ["a0001", "A001", "a1", "A2", "A100"]
|
||||
|
||||
Test.specify "should correctly compare values" <|
|
||||
group_builder.specify "should correctly compare values" <|
|
||||
Natural_Order.compare "a1" "a2" . should_equal Ordering.Less
|
||||
Natural_Order.compare "a0001" "a01" . should_equal Ordering.Less
|
||||
Natural_Order.compare "a0001" "a1" . should_equal Ordering.Less
|
||||
@ -27,7 +27,7 @@ spec = Test.group "Natural Order" <|
|
||||
|
||||
Natural_Order.compare "2 ft 17 in" "2 ft 3 in" . should_equal Ordering.Greater
|
||||
|
||||
Test.specify "should correctly work with groups of numbers and delimiters" <|
|
||||
group_builder.specify "should correctly work with groups of numbers and delimiters" <|
|
||||
Natural_Order.compare "127.0.0.1" "200" . should_equal Ordering.Less
|
||||
Natural_Order.compare "200" "127.0.0.1" . should_equal Ordering.Greater
|
||||
Natural_Order.compare "127.0.0.1" "255.255.255.0" . should_equal Ordering.Less
|
||||
@ -44,7 +44,7 @@ spec = Test.group "Natural Order" <|
|
||||
["255.255.0.0", "127.0.0.1", "255.255.255.0", "200"].sort by=Natural_Order.compare . should_equal ["127.0.0.1", "200", "255.255.0.0", "255.255.255.0"]
|
||||
["100-200.300", "1.2.3", "4.5.6", "4-5-6"].sort by=Natural_Order.compare . should_equal ["1.2.3", "4-5-6", "4.5.6", "100-200.300"]
|
||||
|
||||
Test.specify "does not treat a floating point in a special way" <|
|
||||
group_builder.specify "does not treat a floating point in a special way" <|
|
||||
Natural_Order.compare "0" "0.0" . should_equal Ordering.Less
|
||||
Natural_Order.compare "0" "1.0001" . should_equal Ordering.Less
|
||||
Natural_Order.compare "1.0001" "1.01" . should_equal Ordering.Less
|
||||
@ -59,4 +59,8 @@ spec = Test.group "Natural Order" <|
|
||||
|
||||
["1.0002", "1.0001", "1.01", "1.1", "1.10", "1.2", "2", "0", "1.20"].sort by=Natural_Order.compare . should_equal ["0", "1.0001", "1.01", "1.1", "1.0002", "1.2", "1.10", "1.20", "2"]
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -2,22 +2,26 @@ from Standard.Base import all
|
||||
|
||||
import Standard.Base.Data.Ordering.Vector_Lexicographic_Order
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
|
||||
type My_Type
|
||||
Value a b
|
||||
|
||||
spec = Test.group "Lexicographic Order on Vectors" <|
|
||||
Test.specify "should behave as shown in examples" <|
|
||||
add_specs suite_builder = suite_builder.group "Lexicographic Order on Vectors" group_builder->
|
||||
group_builder.specify "should behave as shown in examples" <|
|
||||
Vector_Lexicographic_Order.compare [1, 2, 3] [1, 3, 0] . should_equal Ordering.Less
|
||||
Vector_Lexicographic_Order.compare [1, 2, 3] [1, 2] . should_equal Ordering.Greater
|
||||
Vector_Lexicographic_Order.compare [] [1] . should_equal Ordering.Less
|
||||
Vector_Lexicographic_Order.compare [1] [1] . should_equal Ordering.Equal
|
||||
|
||||
Test.specify "should work correctly with a custom comparator" <|
|
||||
group_builder.specify "should work correctly with a custom comparator" <|
|
||||
comparator = x-> y-> Ordering.compare x.a y.a
|
||||
Vector_Lexicographic_Order.compare [My_Type.Value "a" 1, My_Type.Value "b" 1, My_Type.Value "c" 1] [My_Type.Value "b" 1, My_Type.Value "a" 1, My_Type.Value "c" 1] element_comparator=comparator . should_equal Ordering.Less
|
||||
Vector_Lexicographic_Order.compare [My_Type.Value "a" 1, My_Type.Value "b" 1, My_Type.Value "c" 1] [My_Type.Value "a" 100, My_Type.Value "b" 2, My_Type.Value "c" 3] element_comparator=comparator . should_equal Ordering.Equal
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -2,8 +2,8 @@ from Standard.Base import all
|
||||
import Standard.Base.Errors.Common.Incomparable_Values
|
||||
import Standard.Base.Errors.Common.Type_Error
|
||||
|
||||
from Standard.Test import Test, Test_Suite, Problems
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
|
||||
# === Test Resources ===
|
||||
|
||||
@ -71,38 +71,38 @@ expect_no_warns result =
|
||||
|
||||
|
||||
# === The Tests ===
|
||||
spec =
|
||||
add_specs suite_builder =
|
||||
topo_sort_pending = "Waiting for implementation of topological sort (https://github.com/enso-org/enso/issues/5834)"
|
||||
|
||||
Test.group "Default comparator" <|
|
||||
Test.specify "should support custom comparator" <|
|
||||
suite_builder.group "Default comparator" group_builder->
|
||||
group_builder.specify "should support custom comparator" <|
|
||||
Ordering.compare (Ord.Value 1) (Ord.Value 2) . should_equal Ordering.Less
|
||||
Ordering.compare (Ord.Value 1) (Ord.Value 1) . should_equal Ordering.Equal
|
||||
Ordering.compare (Ord.Value 20) (Ord.Value 1) . should_equal Ordering.Greater
|
||||
Ordering.compare (UPair.Value 1 2) (UPair.Value 2 1) . should_equal Ordering.Equal
|
||||
|
||||
Test.specify "should support equality for custom comparators in atom field" <|
|
||||
group_builder.specify "should support equality for custom comparators in atom field" <|
|
||||
((Parent.Value (Ord.Value 1)) == (Parent.Value (Ord.Value 1))) . should_be_true
|
||||
((Parent.Value (Ord.Value 1)) == (Parent.Value (Ord.Value 22))) . should_be_false
|
||||
|
||||
Test.specify "should throw Incomparable_Values when comparing different types" <|
|
||||
group_builder.specify "should throw Incomparable_Values when comparing different types" <|
|
||||
Ordering.compare (UPair.Value 1 2) (Ord.Value 2) . should_fail_with Incomparable_Values
|
||||
|
||||
Test.specify "should throw Incomparable_Values when comparing Number with Nothing" <|
|
||||
group_builder.specify "should throw Incomparable_Values when comparing Number with Nothing" <|
|
||||
Ordering.compare 1 Nothing . should_fail_with Incomparable_Values
|
||||
|
||||
Test.group "Ordering" <|
|
||||
Test.specify "should allow conversion to sign representation" <|
|
||||
suite_builder.group "Ordering" group_builder->
|
||||
group_builder.specify "should allow conversion to sign representation" <|
|
||||
Ordering.Less.to_sign . should_equal -1
|
||||
Ordering.Equal.to_sign . should_equal 0
|
||||
Ordering.Greater.to_sign . should_equal 1
|
||||
|
||||
Test.specify "should allow conversion from sign representation" <|
|
||||
group_builder.specify "should allow conversion from sign representation" <|
|
||||
Ordering.from_sign -1 . should_equal Ordering.Less
|
||||
Ordering.from_sign 0 . should_equal Ordering.Equal
|
||||
Ordering.from_sign 1 . should_equal Ordering.Greater
|
||||
|
||||
Test.specify "should be ordered itself" <|
|
||||
group_builder.specify "should be ordered itself" <|
|
||||
Ordering.compare Ordering.Less Ordering.Less . should_equal Ordering.Equal
|
||||
Ordering.compare Ordering.Less Ordering.Equal . should_equal Ordering.Less
|
||||
Ordering.compare Ordering.Less Ordering.Greater . should_equal Ordering.Less
|
||||
@ -113,7 +113,7 @@ spec =
|
||||
Ordering.compare Ordering.Greater Ordering.Equal . should_equal Ordering.Greater
|
||||
Ordering.compare Ordering.Greater Ordering.Greater . should_equal Ordering.Equal
|
||||
|
||||
Test.specify "should allow lexicographical composition" <|
|
||||
group_builder.specify "should allow lexicographical composition" <|
|
||||
Ordering.Less.and_then Ordering.Less . should_equal Ordering.Less
|
||||
Ordering.Less.and_then Ordering.Equal . should_equal Ordering.Less
|
||||
Ordering.Less.and_then Ordering.Greater . should_equal Ordering.Less
|
||||
@ -124,20 +124,20 @@ spec =
|
||||
Ordering.Greater.and_then Ordering.Equal . should_equal Ordering.Greater
|
||||
Ordering.Greater.and_then Ordering.Greater . should_equal Ordering.Greater
|
||||
|
||||
Test.specify "should handle partial ordering of Float type" <|
|
||||
group_builder.specify "should handle partial ordering of Float type" <|
|
||||
Ordering.compare Number.nan 42.0 . should_fail_with Incomparable_Values
|
||||
Ordering.compare 42.0 Number.nan . should_fail_with Incomparable_Values
|
||||
Ordering.compare 42.5 67.9 . should_equal Ordering.Less
|
||||
Meta.is_same_object (Comparable.from Number.nan) (Comparable.from 42.0) . should_be_true
|
||||
|
||||
Test.specify "should fail with Incomparable_Values for wrong type of that" <|
|
||||
group_builder.specify "should fail with Incomparable_Values for wrong type of that" <|
|
||||
Ordering.compare Ordering.Less 1 . should_fail_with Incomparable_Values
|
||||
Ordering.compare Ordering.Less Nothing . should_fail_with Incomparable_Values
|
||||
Ordering.compare Ordering.Less "Hello" . should_fail_with Incomparable_Values
|
||||
|
||||
Test.group "Sorting with the default comparator" <|
|
||||
suite_builder.group "Sorting with the default comparator" group_builder->
|
||||
|
||||
Test.specify "should be able to sort primitive types" <|
|
||||
group_builder.specify "should be able to sort primitive types" <|
|
||||
[3, 2, 1, Nothing].sort . should_equal [1, 2, 3, Nothing]
|
||||
[Nothing, Number.nan].sort . at 0 . is_nan . should_be_true
|
||||
[Nothing, Number.nan].sort . at 1 . is_nothing . should_be_true
|
||||
@ -153,7 +153,7 @@ spec =
|
||||
[3, True, 2, False].sort . should_equal [2, 3, False, True]
|
||||
[Nothing, False].sort . should_equal [False, Nothing]
|
||||
|
||||
Test.specify "should be able to sort any single-element vector without any warnings" <|
|
||||
group_builder.specify "should be able to sort any single-element vector without any warnings" <|
|
||||
[Nothing].sort . should_equal [Nothing]
|
||||
expect_no_warns [Nothing].sort
|
||||
[[Nothing]].sort . should_equal [[Nothing]]
|
||||
@ -161,41 +161,41 @@ spec =
|
||||
[[1]].sort . should_equal [[1]]
|
||||
expect_no_warns [[1]].sort
|
||||
|
||||
Test.specify "should produce warnings when sorting nested vectors" <|
|
||||
group_builder.specify "should produce warnings when sorting nested vectors" <|
|
||||
[[1], [2]].sort . should_equal [[1], [2]]
|
||||
[[2], [1]].sort . should_equal [[2], [1]]
|
||||
|
||||
Test.specify "should be able to sort primitive values in atoms" pending=topo_sort_pending <|
|
||||
group_builder.specify "should be able to sort primitive values in atoms" pending=topo_sort_pending <|
|
||||
[Ord.Value Nothing, Ord.Value 20, Ord.Value 10].sort . should_equal [Ord.Value 10, Ord.Value 20, Ord.Value Nothing]
|
||||
|
||||
Test.specify "should produce warnings when sorting primitive values in atoms" pending=topo_sort_pending <|
|
||||
group_builder.specify "should produce warnings when sorting primitive values in atoms" pending=topo_sort_pending <|
|
||||
expect_incomparable_warn (Ord.Value 1) (Ord.Value Nothing) [Ord.Value 1, Ord.Value Nothing].sort
|
||||
|
||||
Test.specify "should fail to sort custom incomparable values until topological sorting is implemented" <|
|
||||
group_builder.specify "should fail to sort custom incomparable values until topological sorting is implemented" <|
|
||||
[(UPair.Value 1 2), (UPair.Value 3 4)].sort . should_fail_with Incomparable_Values
|
||||
|
||||
Test.specify "should attach warning when trying to sort incomparable values" <|
|
||||
group_builder.specify "should attach warning when trying to sort incomparable values" <|
|
||||
expect_incomparable_warn Nothing Number.nan <| [Nothing, Number.nan].sort on_incomparable=Problem_Behavior.Report_Warning
|
||||
expect_incomparable_warn 1 "hello" <| [1, "hello"].sort on_incomparable=Problem_Behavior.Report_Warning
|
||||
|
||||
Test.specify "should respect previous warnings on a vector" <|
|
||||
group_builder.specify "should respect previous warnings on a vector" <|
|
||||
Problems.expect_warning "my_warn" <| (Warning.attach "my_warn" [3, 2]) . sort
|
||||
Problems.expect_warning "my_warn" <| (Warning.attach "my_warn" [3, Number.nan]) . sort
|
||||
expect_incomparable_warn 3 Number.nan <| (Warning.attach "my_warn" [3, Number.nan]) . sort on_incomparable=Problem_Behavior.Report_Warning
|
||||
|
||||
Test.specify "should respect previous warnings on vectors" pending="https://github.com/enso-org/enso/issues/6070" <|
|
||||
group_builder.specify "should respect previous warnings on vectors" pending="https://github.com/enso-org/enso/issues/6070" <|
|
||||
Problems.expect_warning "my_warn" <| [3, Warning.attach "my_warn" 2].sort
|
||||
expect_incomparable_warn 1 Number.nan [1, Warning.attach "my_warn" Number.nan].sort
|
||||
Problems.expect_warning "my_warn" <| [1, Warning.attach "my_warn" Number.nan].sort
|
||||
|
||||
Test.specify "should not fail when sorting incomparable types without custom comparator" <|
|
||||
group_builder.specify "should not fail when sorting incomparable types without custom comparator" <|
|
||||
# Parent, and No_Comp_Type do not have custom comparators
|
||||
[No_Comp_Type.Value 42, "hello"].sort . should_equal ["hello", No_Comp_Type.Value 42]
|
||||
[Parent.Value 42, No_Comp_Type.Value 42].sort . should_equal [No_Comp_Type.Value 42, Parent.Value 42]
|
||||
[No_Comp_Type.Value 42, Parent.Value 42].sort . should_equal [No_Comp_Type.Value 42, Parent.Value 42]
|
||||
|
||||
Test.group "Sorting with multiple comparators" <|
|
||||
Test.specify "should sort primitive values with the default comparator as the first group" <|
|
||||
suite_builder.group "Sorting with multiple comparators" group_builder->
|
||||
group_builder.specify "should sort primitive values with the default comparator as the first group" <|
|
||||
[Ord.Value 4, Ord.Value 3, 20, 10].sort . should_equal [10, 20, Ord.Value 3, Ord.Value 4]
|
||||
[Ord.Value 4, 20, Ord.Value 3, 10].sort . should_equal [10, 20, Ord.Value 3, Ord.Value 4]
|
||||
[20, Ord.Value 4, Ord.Value 3, 10].sort . should_equal [10, 20, Ord.Value 3, Ord.Value 4]
|
||||
@ -203,18 +203,18 @@ spec =
|
||||
[Nothing, Ord.Value 4, 20, Ord.Value 3, 10].sort . should_equal [10, 20, Nothing, Ord.Value 3, Ord.Value 4]
|
||||
[Ord.Value 4, 20, Ord.Value 3, Nothing, 10].sort . should_equal [10, 20, Nothing, Ord.Value 3, Ord.Value 4]
|
||||
|
||||
Test.specify "should produce warning when sorting types with different comparators" <|
|
||||
group_builder.specify "should produce warning when sorting types with different comparators" <|
|
||||
[Ord.Value 1, 1].sort . should_equal [1, Ord.Value 1]
|
||||
sorted = [Ord.Value 1, 1].sort on_incomparable=Problem_Behavior.Report_Warning
|
||||
Warning.get_all sorted . at 0 . value . starts_with "Different comparators" . should_be_true
|
||||
|
||||
Test.specify "should merge groups of values with custom comparators based on the comparators FQN" <|
|
||||
group_builder.specify "should merge groups of values with custom comparators based on the comparators FQN" <|
|
||||
[Ord.Value 1, My_Type.Value 1].sort . should_equal [My_Type.Value 1, Ord.Value 1]
|
||||
[My_Type.Value 1, Ord.Value 1].sort . should_equal [My_Type.Value 1, Ord.Value 1]
|
||||
sorted = [Ord.Value 1, My_Type.Value 1].sort on_incomparable=Problem_Behavior.Report_Warning
|
||||
Warning.get_all sorted . at 0 . value . starts_with "Different comparators" . should_be_true
|
||||
|
||||
Test.specify "should be stable when sorting values with different comparators" <|
|
||||
group_builder.specify "should be stable when sorting values with different comparators" <|
|
||||
[Ord.Value 1, 20, My_Type.Value 1, 10].sort . should_equal [10, 20, My_Type.Value 1, Ord.Value 1]
|
||||
[20, Ord.Value 1, My_Type.Value 1, 10].sort . should_equal [10, 20, My_Type.Value 1, Ord.Value 1]
|
||||
[20, My_Type.Value 1, Ord.Value 1, 10].sort . should_equal [10, 20, My_Type.Value 1, Ord.Value 1]
|
||||
@ -222,14 +222,18 @@ spec =
|
||||
[My_Type.Value 1, Ord.Value 1, 20, 10].sort . should_equal [10, 20, My_Type.Value 1, Ord.Value 1]
|
||||
[Ord.Value 1, 20, 10, My_Type.Value 1].sort . should_equal [10, 20, My_Type.Value 1, Ord.Value 1]
|
||||
|
||||
Test.specify "should be able to sort even unordered values" pending=topo_sort_pending <|
|
||||
group_builder.specify "should be able to sort even unordered values" pending=topo_sort_pending <|
|
||||
[Ord.Value 2, UPair.Value "a" "b", Ord.Value 1, UPair.Value "c" "d"].sort . should_equal [Ord.Value 2, Ord.Value 1, UPair.Value "a" "b", UPair.Value "c" "d"]
|
||||
[Ord.Value 2, UPair.Value "X" "Y", Ord.Value 1, UPair.Value "c" "d"].sort . should_equal [Ord.Value 2, Ord.Value 1, UPair.Value "X" "Y", UPair.Value "c" "d"]
|
||||
|
||||
Test.specify "should produce warning when sorting unordered values" pending=topo_sort_pending <|
|
||||
group_builder.specify "should produce warning when sorting unordered values" pending=topo_sort_pending <|
|
||||
expect_incomparable_warn (UPair.Value 1 2) (UPair.Value 3 4) [UPair.Value 1 2, UPair.Value 3 4].sort
|
||||
|
||||
|
||||
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -2,20 +2,20 @@ from Standard.Base import all
|
||||
import Standard.Base.Errors.Common.Index_Out_Of_Bounds
|
||||
import Standard.Base.Errors.Common.Not_Found
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
type_spec name ctor = Test.group name <|
|
||||
Test.specify "should allow mapping" <|
|
||||
|
||||
type_spec suite_builder name ctor = suite_builder.group name group_builder->
|
||||
group_builder.specify "should allow mapping" <|
|
||||
ctor 1 2 . map x->x+1 . should_equal (ctor 2 3)
|
||||
|
||||
Test.specify "should have length 2" <|
|
||||
group_builder.specify "should have length 2" <|
|
||||
ctor "A" "B" . length . should_equal 2
|
||||
|
||||
Test.specify "should allow reversing" <|
|
||||
group_builder.specify "should allow reversing" <|
|
||||
ctor 1 2 . reverse . should_equal (ctor 2 1)
|
||||
|
||||
Test.specify "should have allow getting by index" <|
|
||||
group_builder.specify "should have allow getting by index" <|
|
||||
ctor "A" "B" . get 0 . should_equal "A"
|
||||
ctor "A" "B" . get -2 . should_equal "A"
|
||||
ctor "A" "B" . get 1 . should_equal "B"
|
||||
@ -28,15 +28,15 @@ type_spec name ctor = Test.group name <|
|
||||
ctor "A" "B" . at -1 . should_equal "B"
|
||||
ctor "A" "B" . at 2 . should_fail_with Index_Out_Of_Bounds
|
||||
|
||||
Test.specify "should have allow getting by first, second and last" <|
|
||||
group_builder.specify "should have allow getting by first, second and last" <|
|
||||
ctor "A" "B" . first . should_equal "A"
|
||||
ctor "A" "B" . second . should_equal "B"
|
||||
ctor "A" "B" . last . should_equal "B"
|
||||
|
||||
Test.specify "should be convertable to a vector" <|
|
||||
group_builder.specify "should be convertable to a vector" <|
|
||||
ctor "A" "B" . to_vector . should_equal ["A", "B"]
|
||||
|
||||
Test.specify "should allow checking all, any and contains" <|
|
||||
group_builder.specify "should allow checking all, any and contains" <|
|
||||
ctor 1 3 . any (>0) . should_equal True
|
||||
ctor 1 3 . any (>2) . should_equal True
|
||||
ctor 1 3 . any (>3) . should_equal False
|
||||
@ -47,7 +47,7 @@ type_spec name ctor = Test.group name <|
|
||||
ctor 1 3 . contains 2 . should_equal False
|
||||
ctor 1 3 . contains 3 . should_equal True
|
||||
|
||||
Test.specify "should allow finding an item" <|
|
||||
group_builder.specify "should allow finding an item" <|
|
||||
ctor 1 3 . find (>0) . should_equal 1
|
||||
ctor 1 3 . find (>2) . should_equal 3
|
||||
ctor 1 3 . find (>3) . should_fail_with Not_Found
|
||||
@ -60,7 +60,7 @@ type_spec name ctor = Test.group name <|
|
||||
ctor 10 30 . find (>0) start=3 . catch . should_equal (Index_Out_Of_Bounds.Error 3 3)
|
||||
ctor 1 3 . find (>3) if_missing=Nothing . should_equal Nothing
|
||||
|
||||
Test.specify "should allow finding the index of an item" <|
|
||||
group_builder.specify "should allow finding the index of an item" <|
|
||||
ctor 10 30 . index_of 10 . should_equal 0
|
||||
ctor 10 10 . index_of 10 . should_equal 0
|
||||
ctor 10 30 . index_of 30 . should_equal 1
|
||||
@ -74,7 +74,7 @@ type_spec name ctor = Test.group name <|
|
||||
ctor 10 30 . index_of (>0) start=4 . should_fail_with Index_Out_Of_Bounds
|
||||
ctor 10 30 . index_of (>0) start=-22 . should_fail_with Index_Out_Of_Bounds
|
||||
|
||||
Test.specify "should allow finding the last index of an item" <|
|
||||
group_builder.specify "should allow finding the last index of an item" <|
|
||||
ctor 10 30 . last_index_of 10 . should_equal 0
|
||||
ctor 10 10 . last_index_of 10 . should_equal 1
|
||||
ctor 10 30 . last_index_of 30 . should_equal 1
|
||||
@ -87,31 +87,35 @@ type_spec name ctor = Test.group name <|
|
||||
ctor 10 30 . last_index_of (>0) start=4 . should_fail_with Index_Out_Of_Bounds
|
||||
ctor 10 30 . last_index_of (>0) start=-22 . should_fail_with Index_Out_Of_Bounds
|
||||
|
||||
Test.specify "should allow folding an operator over its elements" <|
|
||||
group_builder.specify "should allow folding an operator over its elements" <|
|
||||
ctor 10 20 . fold 0 (+) . should_equal 30
|
||||
ctor "A" "B" . fold "" (+) . should_equal "AB"
|
||||
|
||||
Test.specify "should allow reducing an operator over its elements" <|
|
||||
group_builder.specify "should allow reducing an operator over its elements" <|
|
||||
ctor 10 20 . reduce (*) . should_equal 200
|
||||
ctor "Hello" "World" . reduce (a->b-> a+" "+b) . should_equal "Hello World"
|
||||
|
||||
Test.specify "should allow applying a function to each element" <|
|
||||
group_builder.specify "should allow applying a function to each element" <|
|
||||
vec_mut = Vector.new_builder
|
||||
ctor 10 20 . each vec_mut.append
|
||||
vec_mut.to_vector . should_equal [10, 20]
|
||||
|
||||
spec =
|
||||
Test.group "Pair " <|
|
||||
Test.specify "should be created by new" <|
|
||||
add_specs suite_builder =
|
||||
suite_builder.group "Pair " group_builder->
|
||||
group_builder.specify "should be created by new" <|
|
||||
Pair.new 1 2 . should_equal (Pair.Value 1 2)
|
||||
|
||||
type_spec "Pair - from Pair.new" Pair.new
|
||||
type_spec "Pair - from Pair.Value" Pair.Value
|
||||
type_spec "Pair - from 2-Item Vector" a->b->[a,b]
|
||||
type_spec "Pair - from 2-Item JavaScript Array" js_pair
|
||||
type_spec "Pair - from 2-Item List" a->b->(List.Cons a (List.Cons b List.Nil))
|
||||
type_spec suite_builder "Pair - from Pair.new" Pair.new
|
||||
type_spec suite_builder "Pair - from Pair.Value" Pair.Value
|
||||
type_spec suite_builder "Pair - from 2-Item Vector" a->b->[a,b]
|
||||
type_spec suite_builder "Pair - from 2-Item JavaScript Array" js_pair
|
||||
type_spec suite_builder "Pair - from 2-Item List" a->b->(List.Cons a (List.Cons b List.Nil))
|
||||
|
||||
foreign js js_pair a b = """
|
||||
return [a, b]
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
from Standard.Base import all
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
|
||||
polyglot java import java.lang.Double
|
||||
polyglot java import java.lang.Integer as Java_Integer
|
||||
@ -10,39 +10,39 @@ polyglot java import java.time.LocalDate
|
||||
polyglot java import java.util.function.Function as Java_Function
|
||||
polyglot java import org.enso.base_test_helpers.IntHolder
|
||||
|
||||
spec = Test.group "Polyglot" <|
|
||||
Test.specify "should be able to invoke a polyglot method by name and pass arguments" <|
|
||||
spec = suite_builder.group "Polyglot" group_builder->
|
||||
group_builder.specify "should be able to invoke a polyglot method by name and pass arguments" <|
|
||||
poly_date = LocalDate.now
|
||||
date = Date.today.to_date_time
|
||||
|
||||
Polyglot.invoke poly_date "atStartOfDay" [] . should_equal date
|
||||
Polyglot.invoke poly_date "atStartOfDay" [].to_array . should_equal date
|
||||
|
||||
Test.specify "should be able to create a new polyglot object using the constructor" <|
|
||||
group_builder.specify "should be able to create a new polyglot object using the constructor" <|
|
||||
Polyglot.new String ["42"] . should_equal "42"
|
||||
Polyglot.new String ["42"].to_array . should_equal "42"
|
||||
|
||||
Test.specify "use Integer created by constructor" <|
|
||||
group_builder.specify "use Integer created by constructor" <|
|
||||
Polyglot.new Java_Integer [42] . should_equal 42
|
||||
|
||||
Test.specify "use Double created by constructor" <|
|
||||
group_builder.specify "use Double created by constructor" <|
|
||||
Polyglot.new Double [42] . should_equal 42
|
||||
|
||||
Test.specify "use Integer read from Polyglot object" <|
|
||||
group_builder.specify "use Integer read from Polyglot object" <|
|
||||
(Polyglot.get_member js_meaning "meaning") . should_equal 42
|
||||
|
||||
Test.specify "access Integer field from Polyglot object" <|
|
||||
group_builder.specify "access Integer field from Polyglot object" <|
|
||||
js_meaning.meaning . should_equal 42
|
||||
|
||||
Test.specify "use Integer obtained from a call" <|
|
||||
group_builder.specify "use Integer obtained from a call" <|
|
||||
Java_Integer.parseInt "42" . should_equal 42
|
||||
|
||||
Test.specify "use Integer obtained from a read" <|
|
||||
group_builder.specify "use Integer obtained from a read" <|
|
||||
hold = IntHolder.new (6 * 7)
|
||||
hold.value . should_equal 42
|
||||
hold.boxed . should_equal 42
|
||||
|
||||
Test.specify "should be able to execute a polyglot function object along with corresponding arguments" <|
|
||||
group_builder.specify "should be able to execute a polyglot function object along with corresponding arguments" <|
|
||||
fun = Java_Function.identity
|
||||
Polyglot.execute fun ["42"] . should_equal "42"
|
||||
Polyglot.execute fun ["42"].to_array . should_equal "42"
|
||||
@ -50,4 +50,8 @@ spec = Test.group "Polyglot" <|
|
||||
foreign js js_meaning = """
|
||||
return { meaning : 6 * 7 };
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -1,13 +1,17 @@
|
||||
from Standard.Base import all
|
||||
import Standard.Base.Errors.Illegal_State.Illegal_State
|
||||
|
||||
from Standard.Test import Test, Test_Suite, Problems
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
|
||||
spec = Test.group "Problem_Behavior" <|
|
||||
Test.specify "escalate_warnings should escalate warnings according to its setting" <|
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
||||
add_specs suite_builder = suite_builder.group "Problem_Behavior" group_builder->
|
||||
group_builder.specify "escalate_warnings should escalate warnings according to its setting" <|
|
||||
w = Illegal_State.Error "Foo"
|
||||
x = Warning.attach w 42
|
||||
|
||||
@ -23,7 +27,7 @@ spec = Test.group "Problem_Behavior" <|
|
||||
x + 2
|
||||
err3.should_fail_with Illegal_State
|
||||
|
||||
Test.specify "escalate_warnings should forward any dataflow errors as-is" <|
|
||||
group_builder.specify "escalate_warnings should forward any dataflow errors as-is" <|
|
||||
[Problem_Behavior.Report_Error, Problem_Behavior.Report_Warning, Problem_Behavior.Ignore].each pb->
|
||||
x = Error.throw (Illegal_State.Error "Foo")
|
||||
pb.escalate_warnings x . should_fail_with Illegal_State
|
||||
|
@ -7,11 +7,11 @@ import Standard.Base.Errors.Common.Unsupported_Argument_Types
|
||||
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
|
||||
import Standard.Base.Errors.Illegal_State.Illegal_State
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
spec = Test.group "Range" <|
|
||||
Test.specify "should be created with a start, an end and a step" <|
|
||||
|
||||
add_specs suite_builder = suite_builder.group "Range" group_builder->
|
||||
group_builder.specify "should be created with a start, an end and a step" <|
|
||||
range = 1.up_to 100
|
||||
range.start . should_equal 1
|
||||
range.end . should_equal 100
|
||||
@ -37,21 +37,21 @@ spec = Test.group "Range" <|
|
||||
range_5.end . should_equal 2
|
||||
range_5.step . should_equal -2
|
||||
|
||||
Test.specify "should allow to include the end" <|
|
||||
group_builder.specify "should allow to include the end" <|
|
||||
1.up_to 3 include_end=True . to_vector . should_equal [1, 2, 3]
|
||||
3.down_to 1 include_end=True . to_vector . should_equal [3, 2, 1]
|
||||
|
||||
1.up_to 1 include_end=True . to_vector . should_equal [1]
|
||||
1.down_to 1 include_end=True . to_vector . should_equal [1]
|
||||
|
||||
Test.specify "should allow creation with Range.new" <|
|
||||
group_builder.specify "should allow creation with Range.new" <|
|
||||
Range.new . should_equal (Range.Between 0 100 1)
|
||||
Range.new 5 20 . should_equal (Range.Between 5 20 1)
|
||||
Range.new 20 5 . should_equal (Range.Between 20 5 -1)
|
||||
Range.new 5 20 5 . should_equal (Range.Between 5 20 5)
|
||||
Range.new 20 5 5 . should_equal (Range.Between 20 5 -5)
|
||||
|
||||
Test.specify "should allow setting a new step magnitude" <|
|
||||
group_builder.specify "should allow setting a new step magnitude" <|
|
||||
1.up_to 2 . with_step 3 . should_equal (Range.Between 1 2 3)
|
||||
|
||||
0.up_to 10 . with_step 2 . should_equal (Range.Between 0 10 2)
|
||||
@ -60,7 +60,7 @@ spec = Test.group "Range" <|
|
||||
10.down_to 0 . with_step 2 . should_equal (Range.Between 10 0 -2)
|
||||
10.down_to 0 . with_step 2 . to_vector . should_equal [10, 8, 6, 4, 2]
|
||||
|
||||
Test.specify "should fail with type errors if not the wrong type" <|
|
||||
group_builder.specify "should fail with type errors if not the wrong type" <|
|
||||
Test.expect_panic_with (0.0.up_to 2) No_Such_Method
|
||||
Test.expect_panic_with (0.0.down_to 2) No_Such_Method
|
||||
Test.expect_panic_with (0.up_to 2.0) Type_Error
|
||||
@ -72,7 +72,7 @@ spec = Test.group "Range" <|
|
||||
1.up_to 2 . with_step 0 . should_fail_with Illegal_State
|
||||
1.up_to 2 . with_step -1 . should_fail_with Illegal_Argument
|
||||
|
||||
Test.specify "should have a length" <|
|
||||
group_builder.specify "should have a length" <|
|
||||
0.up_to 100 . length . should_equal 100
|
||||
100.down_to 0 . length . should_equal 100
|
||||
0.up_to 0 . length . should_equal 0
|
||||
@ -81,19 +81,19 @@ spec = Test.group "Range" <|
|
||||
0.up_to 10 . with_step 2 . length . should_equal 5
|
||||
10.down_to 0 . with_step 2 . length . should_equal 5
|
||||
0.up_to 10 . with_step 3 . length . should_equal 4
|
||||
Test.specify "should allow checking for emptiness" <|
|
||||
group_builder.specify "should allow checking for emptiness" <|
|
||||
0.up_to 0 . is_empty . should_be_true
|
||||
0.up_to -100 . is_empty . should_be_true
|
||||
0.up_to 1 . is_empty . should_be_false
|
||||
0.up_to 5 . is_empty . should_be_false
|
||||
5.down_to 0 . is_empty . should_be_false
|
||||
Test.specify "should allow checking for non emptiness" <|
|
||||
group_builder.specify "should allow checking for non emptiness" <|
|
||||
0.up_to 0 . not_empty . should_be_false
|
||||
0.up_to -100 . not_empty . should_be_false
|
||||
0.up_to 1 . not_empty . should_be_true
|
||||
0.up_to 5 . not_empty . should_be_true
|
||||
5.down_to 0 . not_empty . should_be_true
|
||||
Test.specify "should allow getting by index using at" <|
|
||||
group_builder.specify "should allow getting by index using at" <|
|
||||
0.up_to 0 . at 0 . should_fail_with Index_Out_Of_Bounds
|
||||
0.up_to 100 . at 0 . should_equal 0
|
||||
0.up_to 100 . at 5 . should_equal 5
|
||||
@ -106,7 +106,7 @@ spec = Test.group "Range" <|
|
||||
1.up_to 100 . with_step 5 . at 3 . should_equal 16
|
||||
1.up_to 100 . with_step 5 . at -1 . should_equal 96
|
||||
1.up_to 100 . with_step 5 . at -3 . should_equal 86
|
||||
Test.specify "should allow getting by index using get" <|
|
||||
group_builder.specify "should allow getting by index using get" <|
|
||||
0.up_to 0 . get 0 . should_equal Nothing
|
||||
0.up_to 100 . get 0 . should_equal 0
|
||||
0.up_to 100 . get 5 . should_equal 5
|
||||
@ -118,13 +118,13 @@ spec = Test.group "Range" <|
|
||||
1.up_to 100 . with_step 5 . get 3 . should_equal 16
|
||||
1.up_to 100 . with_step 5 . get -1 . should_equal 96
|
||||
1.up_to 100 . with_step 5 . get -3 . should_equal 86
|
||||
Test.specify "should allow getting first" <|
|
||||
group_builder.specify "should allow getting first" <|
|
||||
0.up_to 0 . first . should_fail_with Index_Out_Of_Bounds
|
||||
0.up_to 0 . first . catch . should_equal (Index_Out_Of_Bounds.Error 0 0)
|
||||
3.up_to 100 . first . should_equal 3
|
||||
0.down_to 0 . first . should_fail_with Index_Out_Of_Bounds
|
||||
0.down_to -3 . first . should_equal 0
|
||||
Test.specify "should allow getting second" <|
|
||||
group_builder.specify "should allow getting second" <|
|
||||
0.up_to 0 . second . should_fail_with Index_Out_Of_Bounds
|
||||
0.up_to 0 . second . catch . should_equal (Index_Out_Of_Bounds.Error 1 0)
|
||||
3.up_to 100 . second . should_equal 4
|
||||
@ -134,23 +134,23 @@ spec = Test.group "Range" <|
|
||||
0.down_to -3 . second . should_equal -1
|
||||
0.down_to -3 . with_step 2 . second . should_equal -2
|
||||
0.down_to -3 . with_step 4 . second . should_fail_with Index_Out_Of_Bounds
|
||||
Test.specify "should allow getting last" <|
|
||||
group_builder.specify "should allow getting last" <|
|
||||
0.up_to 0 . last . should_fail_with Index_Out_Of_Bounds
|
||||
0.up_to 0 . last . catch . should_equal (Index_Out_Of_Bounds.Error 0 0)
|
||||
3.up_to 100 . last . should_equal 99
|
||||
3.up_to 100 . with_step 25 . last . should_equal 78
|
||||
0.down_to 0 . last . should_fail_with Index_Out_Of_Bounds
|
||||
0.down_to -3 . last . should_equal -2
|
||||
Test.specify "should be able to be mapped over to make a Vector" <|
|
||||
group_builder.specify "should be able to be mapped over to make a Vector" <|
|
||||
empty = 0.up_to 0
|
||||
empty.map *2 . should_equal []
|
||||
elements = 0.up_to 10
|
||||
elements.map *2 . should_equal [0, 2, 4, 6, 8, 10, 12, 14, 16, 18]
|
||||
Test.specify "should allow to filter its elements, returning a vector" <|
|
||||
group_builder.specify "should allow to filter its elements, returning a vector" <|
|
||||
elements = 0.up_to 10
|
||||
elements.filter (x -> x % 2 == 0) . should_equal [0, 2, 4, 6, 8]
|
||||
|
||||
Test.specify "should filter elements by Filter_Condition" <|
|
||||
group_builder.specify "should filter elements by Filter_Condition" <|
|
||||
range = 1.up_to 6
|
||||
range.filter (Filter_Condition.Greater than=3) . should_equal [4, 5]
|
||||
range.filter (Filter_Condition.Less than=3.5) . should_equal [1, 2, 3]
|
||||
@ -177,7 +177,7 @@ spec = Test.group "Range" <|
|
||||
range.filter Filter_Condition.Is_Empty . should_equal []
|
||||
range.filter Filter_Condition.Not_Empty . should_equal [1, 2, 3, 4, 5]
|
||||
|
||||
Test.specify "should allow to partition its elements" <|
|
||||
group_builder.specify "should allow to partition its elements" <|
|
||||
elements = 0.up_to 10
|
||||
r1 = elements.partition (x -> x % 2 == 0)
|
||||
r1.first . should_equal [0, 2, 4, 6, 8]
|
||||
@ -187,18 +187,18 @@ spec = Test.group "Range" <|
|
||||
r2.first . should_equal [4, 5, 6, 7, 8, 9]
|
||||
r2.second . should_equal [0, 1, 2, 3]
|
||||
|
||||
Test.specify "should allow iteration" <|
|
||||
group_builder.specify "should allow iteration" <|
|
||||
vec_mut = Vector.new_builder
|
||||
1.up_to 6 . each (i -> vec_mut.append i)
|
||||
vec_mut.to_vector . should_equal [1, 2, 3, 4, 5]
|
||||
Test.specify "should allow iteration, with error propagation and early exit" <|
|
||||
group_builder.specify "should allow iteration, with error propagation and early exit" <|
|
||||
vec_mut = Vector.new_builder
|
||||
result = 1.up_to 6 . each_propagate i->
|
||||
if i >= 3 then Error.throw (Illegal_Argument.Error "dummy") else
|
||||
vec_mut.append i
|
||||
result . should_fail_with Illegal_Argument
|
||||
vec_mut.to_vector . should_equal [1, 2]
|
||||
Test.specify "should allow iteration with index" <|
|
||||
group_builder.specify "should allow iteration with index" <|
|
||||
vec_mut = Vector.new_builder
|
||||
5.up_to 8 . each_with_index ix-> elem->
|
||||
vec_mut.append (Pair.new ix elem)
|
||||
@ -208,28 +208,28 @@ spec = Test.group "Range" <|
|
||||
5.up_to 10 . with_step 2 . each_with_index ix-> elem->
|
||||
vec_mut_2.append (Pair.new ix elem)
|
||||
vec_mut_2.to_vector . should_equal [Pair.new 0 5, Pair.new 1 7, Pair.new 2 9]
|
||||
Test.specify "should be able to be folded" <|
|
||||
group_builder.specify "should be able to be folded" <|
|
||||
1.up_to 6 . fold 0 (+) . should_equal 15
|
||||
1.up_to 1 . fold 123 (+) . should_equal 123
|
||||
Test.specify "should be able to perform a running fold" <|
|
||||
group_builder.specify "should be able to perform a running fold" <|
|
||||
1.up_to 6 . running_fold 0 (+) . should_equal [1, 3, 6, 10, 15]
|
||||
1.up_to 1 . running_fold 123 (+) . should_equal []
|
||||
Test.specify "should be able to be reduced" <|
|
||||
group_builder.specify "should be able to be reduced" <|
|
||||
1.up_to 6 . reduce (+) . should_equal 15
|
||||
1.up_to 6 . with_step 2 . reduce (+) . should_equal 9
|
||||
1.up_to 1 . reduce (+) . should_fail_with Empty_Error
|
||||
1.up_to 1 . reduce (+) 0 . should_equal 0
|
||||
Test.specify "should check all" <|
|
||||
group_builder.specify "should check all" <|
|
||||
1.up_to 10 . all (> 0) . should_be_true
|
||||
1.up_to 10 . all (< 0) . should_be_false
|
||||
1.up_to 10 . all (Filter_Condition.Greater 10) . should_be_false
|
||||
Test.expect_panic_with (1.up_to 10 . all "invalid arg") Type_Error
|
||||
Test.specify "should check any" <|
|
||||
group_builder.specify "should check any" <|
|
||||
1.up_to 10 . any (> 5) . should_be_true
|
||||
1.up_to 10 . any (> 10) . should_be_false
|
||||
1.up_to 10 . any (Filter_Condition.Greater 5) . should_be_true
|
||||
Test.expect_panic_with (1.up_to 10 . any "invalid arg") Type_Error
|
||||
Test.specify "should find elements" <|
|
||||
group_builder.specify "should find elements" <|
|
||||
1.up_to 10 . find (> 5) . should_equal 6
|
||||
1.up_to 10 . find (Filter_Condition.Greater than=5) . should_equal 6
|
||||
1.up_to 10 . find (> 10) . should_be_a Nothing
|
||||
@ -238,7 +238,7 @@ spec = Test.group "Range" <|
|
||||
1.up_to 10 . find (< 5) start=10 . should_fail_with Index_Out_Of_Bounds
|
||||
1.up_to 10 . find (< 5) start=10 . catch . should_equal (Index_Out_Of_Bounds.Error 10 10)
|
||||
Test.expect_panic_with (1.up_to 10 . find "invalid arg") Type_Error
|
||||
Test.specify "should find index of elements" <|
|
||||
group_builder.specify "should find index of elements" <|
|
||||
1.up_to 10 . index_of (> 5) . should_equal 5
|
||||
1.up_to 10 . index_of 7 . should_equal 6
|
||||
1.up_to 10 . with_step 2 . index_of (> 4) . should_equal 2
|
||||
@ -256,7 +256,7 @@ spec = Test.group "Range" <|
|
||||
1.up_to 10 . index_of (Filter_Condition.Greater than=5) . should_equal 5
|
||||
1.up_to 10 . index_of "invalid arg" . should_fail_with Illegal_Argument
|
||||
1.up_to 10 . index_of 2.5 . should_fail_with Illegal_Argument
|
||||
Test.specify "should find last index of elements" <|
|
||||
group_builder.specify "should find last index of elements" <|
|
||||
1.up_to 10 . last_index_of (> 5) . should_equal 8
|
||||
1.up_to 10 . last_index_of 7 . should_equal 6
|
||||
1.up_to 10 . with_step 2 . last_index_of (> 4) . should_equal 4
|
||||
@ -274,14 +274,14 @@ spec = Test.group "Range" <|
|
||||
1.up_to 10 . last_index_of (Filter_Condition.Greater than=5) . should_equal 8
|
||||
1.up_to 10 . last_index_of "invalid arg" . should_fail_with Illegal_Argument
|
||||
1.up_to 10 . last_index_of 2.5 . should_fail_with Illegal_Argument
|
||||
Test.specify "should allow conversion to vector" <|
|
||||
group_builder.specify "should allow conversion to vector" <|
|
||||
1.up_to 6 . to_vector . should_equal [1, 2, 3, 4, 5]
|
||||
Test.specify "should allow reversing" <|
|
||||
group_builder.specify "should allow reversing" <|
|
||||
1.up_to 6 . reverse . should_equal (5.down_to 0)
|
||||
5.down_to 0 . reverse . should_equal (1.up_to 6)
|
||||
1.up_to 6 . with_step 2 . reverse . should_equal (5.down_to -1 . with_step 2)
|
||||
|
||||
Test.specify "should allow checking if a value is in the range" <|
|
||||
group_builder.specify "should allow checking if a value is in the range" <|
|
||||
0.up_to 10 . contains 5 . should_be_true
|
||||
0.up_to 10 . contains 0 . should_be_true
|
||||
0.up_to 10 . contains 9 . should_be_true
|
||||
@ -309,7 +309,7 @@ spec = Test.group "Range" <|
|
||||
builder = Vector.new_builder
|
||||
range.each builder.append
|
||||
builder.to_vector
|
||||
Test.specify "should behave correctly if it is empty" <|
|
||||
group_builder.specify "should behave correctly if it is empty" <|
|
||||
check_empty_range r =
|
||||
r.is_empty . should_be_true
|
||||
r.not_empty . should_be_false
|
||||
@ -334,7 +334,7 @@ spec = Test.group "Range" <|
|
||||
check_empty_range (Range.Between 0 10 -1)
|
||||
check_empty_range (Range.Between -1 0 -2)
|
||||
|
||||
Test.specify "should behave correctly when containing exactly one element" <|
|
||||
group_builder.specify "should behave correctly when containing exactly one element" <|
|
||||
r1 = Range.Between 10 11
|
||||
r1.is_empty . should_be_false
|
||||
r1.not_empty . should_be_true
|
||||
@ -351,7 +351,7 @@ spec = Test.group "Range" <|
|
||||
r1.find (x-> x*x == 25) . should_equal Nothing
|
||||
verify_contains r1 [10] [-1, 0, 1, 2, 9, 11, 12]
|
||||
|
||||
Test.specify "should behave correctly with step greater than 1" <|
|
||||
group_builder.specify "should behave correctly with step greater than 1" <|
|
||||
r1 = Range.Between 0 10 2
|
||||
r1.is_empty . should_be_false
|
||||
r1.not_empty . should_be_true
|
||||
@ -449,7 +449,7 @@ spec = Test.group "Range" <|
|
||||
r6.filter (_ < 4) . should_equal [0, 3]
|
||||
verify_contains r6 [0, 3, 6, 9] [-3, -2, -1, 1, 2, 4, 5, 7, 8, 10, 11]
|
||||
|
||||
Test.specify "should behave correctly with negative step" <|
|
||||
group_builder.specify "should behave correctly with negative step" <|
|
||||
r1 = Range.Between 4 0 -1
|
||||
r1.is_empty . should_be_false
|
||||
r1.not_empty . should_be_true
|
||||
@ -514,7 +514,7 @@ spec = Test.group "Range" <|
|
||||
r4.find (x-> x*x == 0) . should_equal Nothing
|
||||
verify_contains r4 [3] [-3, -2, -1, 0, 1, 2, 4, 5, 6, 7, 10]
|
||||
|
||||
Test.specify "should report errors if trying to set step to 0" <|
|
||||
group_builder.specify "should report errors if trying to set step to 0" <|
|
||||
0.up_to 0 . with_step 0 . should_fail_with Illegal_State
|
||||
invalid_range = Range.Between 0 0 0
|
||||
invalid_range . length . should_fail_with Illegal_State
|
||||
@ -538,4 +538,8 @@ spec = Test.group "Range" <|
|
||||
invalid_range . find _->True . should_fail_with Illegal_State
|
||||
invalid_range . contains 0 . should_fail_with Illegal_State
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -1,10 +1,10 @@
|
||||
from Standard.Base import Nothing, Vector, Number, Float, True, False, Regression
|
||||
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
spec =
|
||||
|
||||
add_specs suite_builder =
|
||||
## Regression test data produced using an Excel spreadsheet.
|
||||
https://github.com/enso-org/enso/files/9160145/Regression.tests.xlsx
|
||||
|
||||
@ -16,18 +16,18 @@ spec =
|
||||
_ : Float -> v.should_equal e epsilon=double_error
|
||||
_ -> v.should_equal e
|
||||
|
||||
Test.group "Regression" <|
|
||||
Test.specify "return an error if the vector lengths do not match" <|
|
||||
suite_builder.group "Regression" group_builder->
|
||||
group_builder.specify "return an error if the vector lengths do not match" <|
|
||||
known_xs = [2, 3, 5, 7, 9]
|
||||
known_ys = [4, 5, 7, 10]
|
||||
Regression.fit_least_squares known_xs known_ys . should_fail_with Illegal_Argument
|
||||
|
||||
Test.specify "return an error if the X values are all the same" <|
|
||||
group_builder.specify "return an error if the X values are all the same" <|
|
||||
known_xs = [2, 2, 2, 2]
|
||||
known_ys = [4, 5, 7, 10]
|
||||
Regression.fit_least_squares known_xs known_ys . should_fail_with Regression.Fit_Error
|
||||
|
||||
Test.specify "compute the linear trend line" <|
|
||||
group_builder.specify "compute the linear trend line" <|
|
||||
known_xs = [2, 3, 5, 7, 9]
|
||||
known_ys = [4, 5, 7, 10, 15]
|
||||
fitted = Regression.fit_least_squares known_xs known_ys
|
||||
@ -35,7 +35,7 @@ spec =
|
||||
fitted.intercept . should_equal 0.304878049 epsilon=double_error
|
||||
fitted.r_squared . should_equal 0.959530147 epsilon=double_error
|
||||
|
||||
Test.specify "predict values on a linear trend line" <|
|
||||
group_builder.specify "predict values on a linear trend line" <|
|
||||
known_xs = [2, 3, 5, 7, 9]
|
||||
known_ys = [4, 5, 7, 10, 15]
|
||||
fitted = Regression.fit_least_squares known_xs known_ys
|
||||
@ -43,7 +43,7 @@ spec =
|
||||
expected_ys = [1.823171, 6.378049, 9.414634, 12.45122, 15.487805]
|
||||
vector_compare (test_xs.map fitted.predict) expected_ys
|
||||
|
||||
Test.specify "compute the linear trend line with an intercept" <|
|
||||
group_builder.specify "compute the linear trend line with an intercept" <|
|
||||
known_xs = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
|
||||
known_ys = [8.02128, 11.02421, 13.99566, 17.02678, 20.00486, 22.95283, 26.0143, 29.03238, 31.96427, 35.03896]
|
||||
fitted = Regression.fit_least_squares known_xs known_ys (Regression.Model.Linear 100)
|
||||
@ -51,7 +51,7 @@ spec =
|
||||
fitted.intercept . should_equal 100.0 epsilon=double_error
|
||||
fitted.r_squared . should_equal 0.9999900045 epsilon=double_error
|
||||
|
||||
Test.specify "compute the exponential trend line" <|
|
||||
group_builder.specify "compute the exponential trend line" <|
|
||||
known_xs = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
|
||||
known_ys = [0.28652, 0.31735, 0.31963, 0.38482, 0.40056, 0.39013, 0.4976, 0.5665, 0.55457, 0.69135]
|
||||
fitted = Regression.fit_least_squares known_xs known_ys Regression.Model.Exponential
|
||||
@ -59,7 +59,7 @@ spec =
|
||||
fitted.b . should_equal 0.09358242 epsilon=double_error
|
||||
fitted.r_squared . should_equal 0.9506293649 epsilon=double_error
|
||||
|
||||
Test.specify "predict values on a exponential trend line" <|
|
||||
group_builder.specify "predict values on a exponential trend line" <|
|
||||
known_xs = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
|
||||
known_ys = [0.28652, 0.31735, 0.31963, 0.38482, 0.40056, 0.39013, 0.4976, 0.5665, 0.55457, 0.69135]
|
||||
fitted = Regression.fit_least_squares known_xs known_ys Regression.Model.Exponential
|
||||
@ -67,7 +67,7 @@ spec =
|
||||
expected_ys = [0.253564, 0.709829, 0.779464, 1.032103]
|
||||
vector_compare (test_xs.map fitted.predict) expected_ys
|
||||
|
||||
Test.specify "compute the exponential trend line with an intercept" <|
|
||||
group_builder.specify "compute the exponential trend line with an intercept" <|
|
||||
known_xs = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
|
||||
known_ys = [0.28652, 0.31735, 0.31963, 0.38482, 0.40056, 0.39013, 0.4976, 0.5665, 0.55457, 0.69135]
|
||||
fitted = Regression.fit_least_squares known_xs known_ys (Regression.Model.Exponential 0.2)
|
||||
@ -75,7 +75,7 @@ spec =
|
||||
fitted.b . should_equal 0.127482464 epsilon=double_error
|
||||
fitted.r_squared . should_equal 0.9566066546 epsilon=double_error
|
||||
|
||||
Test.specify "compute the logarithmic trend line" <|
|
||||
group_builder.specify "compute the logarithmic trend line" <|
|
||||
known_xs = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
|
||||
known_ys = [0.12128, 0.29057, 0.35933, 0.45949, 0.49113, 0.48285, 0.58132, 0.63144, 0.5916, 0.69158]
|
||||
fitted = Regression.fit_least_squares known_xs known_ys Regression.Model.Logarithmic
|
||||
@ -83,7 +83,7 @@ spec =
|
||||
fitted.b . should_equal 0.11857587 epsilon=double_error
|
||||
fitted.r_squared . should_equal 0.9730840179 epsilon=double_error
|
||||
|
||||
Test.specify "predict values on a logarithmic trend line" <|
|
||||
group_builder.specify "predict values on a logarithmic trend line" <|
|
||||
known_xs = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
|
||||
known_ys = [0.12128, 0.29057, 0.35933, 0.45949, 0.49113, 0.48285, 0.58132, 0.63144, 0.5916, 0.69158]
|
||||
fitted = Regression.fit_least_squares known_xs known_ys Regression.Model.Logarithmic
|
||||
@ -91,7 +91,7 @@ spec =
|
||||
expected_ys = [-0.417241, 0.676572, 0.696819, 0.748745]
|
||||
vector_compare (test_xs.map fitted.predict) expected_ys
|
||||
|
||||
Test.specify "compute the power trend line" <|
|
||||
group_builder.specify "compute the power trend line" <|
|
||||
known_xs = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
|
||||
known_ys = [0.26128, 0.28144, 0.26353, 0.30247, 0.28677, 0.23992, 0.30586, 0.32785, 0.26324, 0.3411]
|
||||
fitted = Regression.fit_least_squares known_xs known_ys Regression.Model.Power
|
||||
@ -99,7 +99,7 @@ spec =
|
||||
fitted.b . should_equal 0.065513849 epsilon=double_error
|
||||
fitted.r_squared . should_equal 0.2099579581 epsilon=double_error
|
||||
|
||||
Test.specify "predict values on a power trend line" <|
|
||||
group_builder.specify "predict values on a power trend line" <|
|
||||
known_xs = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
|
||||
known_ys = [0.26128, 0.28144, 0.26353, 0.30247, 0.28677, 0.23992, 0.30586, 0.32785, 0.26324, 0.3411]
|
||||
fitted = Regression.fit_least_squares known_xs known_ys Regression.Model.Power
|
||||
@ -107,4 +107,8 @@ spec =
|
||||
expected_ys = [0.222594, 0.302868, 0.3046, 0.309085]
|
||||
vector_compare (test_xs.map fitted.predict) expected_ys
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -1,12 +1,12 @@
|
||||
from Standard.Base import all
|
||||
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
|
||||
|
||||
from Standard.Test import Test, Test_Suite, Problems
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
spec =
|
||||
Test.group "Enso Set" <|
|
||||
Test.specify "should allow checking for emptiness" <|
|
||||
|
||||
add_specs suite_builder =
|
||||
suite_builder.group "Enso Set" group_builder->
|
||||
group_builder.specify "should allow checking for emptiness" <|
|
||||
empty_map = Set.empty
|
||||
non_empty = Set.empty . insert "foo"
|
||||
empty_map.is_empty . should_be_true
|
||||
@ -15,7 +15,7 @@ spec =
|
||||
empty_map.not_empty . should_be_false
|
||||
non_empty.not_empty . should_be_true
|
||||
|
||||
Test.specify "should be constructed from a vector" <|
|
||||
group_builder.specify "should be constructed from a vector" <|
|
||||
s1 = Set.from_vector [1, 2, 3, 2]
|
||||
s1.size . should_equal 3
|
||||
s1.to_vector.sort . should_equal [1, 2, 3]
|
||||
@ -23,14 +23,14 @@ spec =
|
||||
r2 = Set.from_vector [1, 2, 2] error_on_duplicates=True
|
||||
r2.should_fail_with Illegal_Argument
|
||||
|
||||
Test.specify "should allow checking contains" <|
|
||||
group_builder.specify "should allow checking contains" <|
|
||||
s1 = Set.from_vector [1, 2, 3, 2]
|
||||
s1.contains 1 . should_be_true
|
||||
s1.contains 2 . should_be_true
|
||||
s1.contains 3 . should_be_true
|
||||
s1.contains 4 . should_be_false
|
||||
|
||||
Test.specify "should allow checking contains with relational NULL logic" <|
|
||||
group_builder.specify "should allow checking contains with relational NULL logic" <|
|
||||
Set.from_vector [1, 2] . contains_relational 1 . should_be_true
|
||||
Set.from_vector [1, 2] . contains_relational 3 . should_be_false
|
||||
Set.from_vector [1, 2, Nothing] . contains_relational 1 . should_be_true
|
||||
@ -40,7 +40,7 @@ spec =
|
||||
Set.from_vector [Nothing] . contains_relational Nothing . should_equal Nothing
|
||||
Set.from_vector [] . contains_relational Nothing . should_be_false
|
||||
|
||||
Test.specify "should allow to compute a union, intersection and difference" <|
|
||||
group_builder.specify "should allow to compute a union, intersection and difference" <|
|
||||
s1 = Set.from_vector [1, 2]
|
||||
s2 = Set.from_vector [2, 3]
|
||||
s3 = Set.from_vector [3, 4]
|
||||
@ -53,7 +53,7 @@ spec =
|
||||
(s1.difference s3).to_vector.sort . should_equal [1, 2]
|
||||
(s1.difference s1).to_vector . should_equal []
|
||||
|
||||
Test.specify "should allow to check for equality of two sets" <|
|
||||
group_builder.specify "should allow to check for equality of two sets" <|
|
||||
s1 = Set.from_vector [1, 2]
|
||||
s2 = Set.from_vector [2, 1, 1]
|
||||
s3 = Set.from_vector [1, 2, 3]
|
||||
@ -62,4 +62,8 @@ spec =
|
||||
(s1 == s1) . should_be_true
|
||||
(s1 == s3) . should_be_false
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -4,8 +4,8 @@ import Standard.Base.Errors.Common.Incomparable_Values
|
||||
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
|
||||
import Standard.Base.Errors.Illegal_State.Illegal_State
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
|
||||
# === Test Resources ===
|
||||
|
||||
@ -45,7 +45,7 @@ Comparable.from (_:Bad_Comparator_Type) = Bad_Comparator
|
||||
|
||||
# Tests
|
||||
|
||||
spec =
|
||||
add_specs suite_builder =
|
||||
double_error = 0.000001
|
||||
|
||||
vector_compare values expected =
|
||||
@ -54,21 +54,21 @@ spec =
|
||||
_ : Float -> v.should_equal (expected.at i) epsilon=double_error
|
||||
_ -> v.should_equal (expected.at i)
|
||||
|
||||
Test.group "Statistics" <|
|
||||
suite_builder.group "Statistics" group_builder->
|
||||
simple_set = [1, 2, 3, 4, 5]
|
||||
number_set = [0.4, -18.56, -16.99, -16.43, -45.84, 13.44, -6.85, 9.68, -8.55, 10.87, 10.38, 33.85, -41.02, 1.87, -26.52, -13.87, -39.06, 25.92, -16.01, 42.01]
|
||||
missing_set = number_set.map_with_index i->v->(if i % 5 == 4 then Nothing else v)
|
||||
with_nans_set = number_set.map_with_index i->v->(if i % 5 == 4 then (if i % 10 == 9 then Number.nan else Nothing) else v)
|
||||
text_set = ["A", "B", Nothing, "D"]
|
||||
|
||||
Test.specify "should be able to count valid values" <|
|
||||
group_builder.specify "should be able to count valid values" <|
|
||||
simple_set.compute . should_equal 5
|
||||
number_set.compute . should_equal 20
|
||||
missing_set.compute . should_equal 16
|
||||
with_nans_set.compute . should_equal 16
|
||||
text_set.compute . should_equal 3
|
||||
|
||||
Test.specify "should be able to get minimum of maximum values" <|
|
||||
group_builder.specify "should be able to get minimum of maximum values" <|
|
||||
simple_set.compute Statistic.Minimum . should_equal 1
|
||||
number_set.compute Statistic.Minimum . should_equal -45.84 epsilon=double_error
|
||||
missing_set.compute Statistic.Minimum . should_equal -41.02 epsilon=double_error
|
||||
@ -80,72 +80,72 @@ spec =
|
||||
with_nans_set.compute Statistic.Maximum . should_equal 33.85 epsilon=double_error
|
||||
text_set.compute Statistic.Maximum . should_equal "D"
|
||||
|
||||
Test.specify "should be able to get sum of values" <|
|
||||
group_builder.specify "should be able to get sum of values" <|
|
||||
simple_set.compute Statistic.Sum . should_equal 15 epsilon=double_error
|
||||
number_set.compute Statistic.Sum . should_equal -101.28 epsilon=double_error
|
||||
missing_set.compute Statistic.Sum . should_equal -81.8 epsilon=double_error
|
||||
with_nans_set.compute Statistic.Sum . should_equal -81.8 epsilon=double_error
|
||||
|
||||
Test.specify "should be able to get mean of values" <|
|
||||
group_builder.specify "should be able to get mean of values" <|
|
||||
simple_set.compute Statistic.Mean . should_equal 3 epsilon=double_error
|
||||
number_set.compute Statistic.Mean . should_equal -5.064 epsilon=double_error
|
||||
missing_set.compute Statistic.Mean . should_equal -5.1125 epsilon=double_error
|
||||
with_nans_set.compute Statistic.Mean . should_equal -5.1125 epsilon=double_error
|
||||
|
||||
Test.specify "should be able to get sample variance of values" <|
|
||||
group_builder.specify "should be able to get sample variance of values" <|
|
||||
simple_set.compute Statistic.Variance . should_equal 2.5 epsilon=double_error
|
||||
number_set.compute Statistic.Variance . should_equal 582.0137832 epsilon=double_error
|
||||
missing_set.compute Statistic.Variance . should_equal 431.0218867 epsilon=double_error
|
||||
with_nans_set.compute Statistic.Variance . should_equal 431.0218867 epsilon=double_error
|
||||
[1].compute Statistic.Variance . is_nan . should_equal True
|
||||
|
||||
Test.specify "should be able to get population variance of values" <|
|
||||
group_builder.specify "should be able to get population variance of values" <|
|
||||
simple_set.compute (Statistic.Variance True) . should_equal 2 epsilon=double_error
|
||||
number_set.compute (Statistic.Variance True) . should_equal 552.913094 epsilon=double_error
|
||||
missing_set.compute (Statistic.Variance True) . should_equal 404.0830188 epsilon=double_error
|
||||
with_nans_set.compute (Statistic.Variance True) . should_equal 404.0830188 epsilon=double_error
|
||||
|
||||
Test.specify "should be able to get population standard deviation of values" <|
|
||||
group_builder.specify "should be able to get population standard deviation of values" <|
|
||||
simple_set.compute Statistic.Standard_Deviation . should_equal 1.58113883 epsilon=double_error
|
||||
number_set.compute Statistic.Standard_Deviation . should_equal 24.12496183 epsilon=double_error
|
||||
missing_set.compute Statistic.Standard_Deviation . should_equal 20.76106661 epsilon=double_error
|
||||
with_nans_set.compute Statistic.Standard_Deviation . should_equal 20.76106661 epsilon=double_error
|
||||
[1].compute Statistic.Standard_Deviation . is_nan . should_equal True
|
||||
|
||||
Test.specify "should be able to get sample standard deviation of values" <|
|
||||
group_builder.specify "should be able to get sample standard deviation of values" <|
|
||||
simple_set.compute (Statistic.Standard_Deviation True) . should_equal 1.414213562 epsilon=double_error
|
||||
number_set.compute (Statistic.Standard_Deviation True) . should_equal 23.51410415 epsilon=double_error
|
||||
missing_set.compute (Statistic.Standard_Deviation True) . should_equal 20.1018163 epsilon=double_error
|
||||
with_nans_set.compute (Statistic.Standard_Deviation True) . should_equal 20.1018163 epsilon=double_error
|
||||
|
||||
Test.specify "should be able to get sample skewness of values" <|
|
||||
group_builder.specify "should be able to get sample skewness of values" <|
|
||||
simple_set.compute Statistic.Skew . should_equal 0 epsilon=double_error
|
||||
number_set.compute Statistic.Skew . should_equal 0.165086552 epsilon=double_error
|
||||
missing_set.compute Statistic.Skew . should_equal 0.084238123 epsilon=double_error
|
||||
with_nans_set.compute Statistic.Skew . should_equal 0.084238123 epsilon=double_error
|
||||
[1, 2].compute Statistic.Skew . is_nan . should_equal True
|
||||
|
||||
Test.specify "should be able to get population skewness of values" <|
|
||||
group_builder.specify "should be able to get population skewness of values" <|
|
||||
simple_set.compute (Statistic.Skew True) . should_equal 0 epsilon=double_error
|
||||
number_set.compute (Statistic.Skew True) . should_equal 0.152437706 epsilon=double_error
|
||||
missing_set.compute (Statistic.Skew True) . should_equal 0.076125664 epsilon=double_error
|
||||
with_nans_set.compute (Statistic.Skew True) . should_equal 0.076125664 epsilon=double_error
|
||||
[1, 2].compute (Statistic.Skew True) . is_nan . should_equal True
|
||||
|
||||
Test.specify "should be able to get sample kurtosis of values" <|
|
||||
group_builder.specify "should be able to get sample kurtosis of values" <|
|
||||
simple_set.compute Statistic.Kurtosis . should_equal -1.2 epsilon=double_error
|
||||
number_set.compute Statistic.Kurtosis . should_equal -0.449422438 epsilon=double_error
|
||||
missing_set.compute Statistic.Kurtosis . should_equal -0.201991074 epsilon=double_error
|
||||
with_nans_set.compute Statistic.Kurtosis . should_equal -0.201991074 epsilon=double_error
|
||||
[1, 2, 3].compute Statistic.Kurtosis . is_nan . should_equal True
|
||||
|
||||
Test.specify "should allow bulk computation" <|
|
||||
group_builder.specify "should allow bulk computation" <|
|
||||
stats = [Statistic.Count, Statistic.Minimum, Statistic.Mean, Statistic.Variance, Statistic.Skew]
|
||||
expected = [20, -45.84, -5.064, 582.0137832, 0.165086552]
|
||||
values = number_set.compute_bulk stats
|
||||
vector_compare values expected
|
||||
|
||||
Test.specify "should allow running computation" <|
|
||||
group_builder.specify "should allow running computation" <|
|
||||
number_set.running . should_equal [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20]
|
||||
expected_counts = [1, 2, 3, 4, 4, 5, 6, 7, 8, 8, 9, 10, 11, 12, 12, 13, 14, 15, 16, 16]
|
||||
expected_sums = [0.4, -18.16, -35.15, -51.58, -51.58, -38.14, -44.99, -35.31, -43.86, -43.86, -33.48, 0.37, -40.65, -38.78, -38.78, -52.65, -91.71, -65.79, -81.8, -81.8]
|
||||
@ -153,7 +153,7 @@ spec =
|
||||
values = missing_set.running Statistic.Sum
|
||||
vector_compare values expected_sums
|
||||
|
||||
Test.specify "should allow running computation on vectors with missing" <|
|
||||
group_builder.specify "should allow running computation on vectors with missing" <|
|
||||
missing_set = [Nothing, 1, 3, Nothing, Number.nan, 2]
|
||||
missing_set.running . should_equal [0, 1, 2, 2, 2, 3]
|
||||
missing_set.running Statistic.Minimum . should_equal [Nothing, 1, 1, 1, 1, 1]
|
||||
@ -163,54 +163,54 @@ spec =
|
||||
missing_set.running Statistic.Mean . drop 1 . should_equal [1, 2, 2, 2, 2]
|
||||
|
||||
|
||||
Test.group "Statistics - empty Vector " <|
|
||||
Test.specify "should be able to count and sum on empty Vector" <|
|
||||
suite_builder.group "Statistics - empty Vector " group_builder->
|
||||
group_builder.specify "should be able to count and sum on empty Vector" <|
|
||||
[].compute . should_equal 0
|
||||
[].compute Statistic.Sum . should_equal 0
|
||||
|
||||
Test.specify "should fail with Empty_Error for Minimum and Maximum on empty Vector" <|
|
||||
group_builder.specify "should fail with Empty_Error for Minimum and Maximum on empty Vector" <|
|
||||
[].compute Statistic.Minimum . should_fail_with Empty_Error
|
||||
[].compute Statistic.Maximum . should_fail_with Empty_Error
|
||||
|
||||
Test.specify "should be NaN for other statistics sum on empty Vector" <|
|
||||
group_builder.specify "should be NaN for other statistics sum on empty Vector" <|
|
||||
[].compute Statistic.Mean . is_nan . should_equal True
|
||||
[].compute Statistic.Variance . is_nan . should_equal True
|
||||
[].compute Statistic.Skew . is_nan . should_equal True
|
||||
[].compute Statistic.Kurtosis . is_nan . should_equal True
|
||||
|
||||
Test.group "Statistics - NaN Vector " <|
|
||||
Test.specify "should be able to count and sum on NaN Vector" <|
|
||||
suite_builder.group "Statistics - NaN Vector " group_builder->
|
||||
group_builder.specify "should be able to count and sum on NaN Vector" <|
|
||||
[Number.nan, Number.nan].compute . should_equal 0
|
||||
[Number.nan, Number.nan].running . should_equal [0, 0]
|
||||
[Number.nan, Number.nan].compute Statistic.Sum . should_equal 0
|
||||
[Number.nan, Number.nan].running Statistic.Sum . should_equal [0, 0]
|
||||
|
||||
Test.specify "should fail with Empty_Error for Minimum and Maximum on NaN Vector" <|
|
||||
group_builder.specify "should fail with Empty_Error for Minimum and Maximum on NaN Vector" <|
|
||||
[Number.nan, Number.nan].compute Statistic.Minimum . should_fail_with Empty_Error
|
||||
[Number.nan, Number.nan].running Statistic.Minimum . should_equal [Nothing, Nothing]
|
||||
[Number.nan, Number.nan].compute Statistic.Maximum . should_fail_with Empty_Error
|
||||
[Number.nan, Number.nan].running Statistic.Maximum . should_equal [Nothing, Nothing]
|
||||
|
||||
Test.specify "should be NaN for other statistics sum on NaN Vector" <|
|
||||
group_builder.specify "should be NaN for other statistics sum on NaN Vector" <|
|
||||
[Number.nan, Number.nan, Number.nan, Number.nan].compute Statistic.Mean . is_nan . should_equal True
|
||||
[Number.nan, Number.nan, Number.nan, Number.nan].compute Statistic.Variance . is_nan . should_equal True
|
||||
[Number.nan, Number.nan, Number.nan, Number.nan].compute Statistic.Skew . is_nan . should_equal True
|
||||
[Number.nan, Number.nan, Number.nan, Number.nan].compute Statistic.Kurtosis . is_nan . should_equal True
|
||||
|
||||
Test.group "Statistics - Nothing Vector " <|
|
||||
Test.specify "should be able to count and sum on Nothing Vector" <|
|
||||
suite_builder.group "Statistics - Nothing Vector " group_builder->
|
||||
group_builder.specify "should be able to count and sum on Nothing Vector" <|
|
||||
[Nothing, Nothing].compute . should_equal 0
|
||||
[Nothing, Nothing].running . should_equal [0, 0]
|
||||
[Nothing, Nothing].compute Statistic.Sum . should_equal 0
|
||||
[Nothing, Nothing].running Statistic.Sum . should_equal [0, 0]
|
||||
|
||||
Test.specify "should fail with Empty_Error for Minimum and Maximum on Nothing Vector" <|
|
||||
group_builder.specify "should fail with Empty_Error for Minimum and Maximum on Nothing Vector" <|
|
||||
[Nothing, Nothing].compute Statistic.Minimum . should_fail_with Empty_Error
|
||||
[Nothing, Nothing].running Statistic.Minimum . should_equal [Nothing, Nothing]
|
||||
[Nothing, Nothing].compute Statistic.Maximum . should_fail_with Empty_Error
|
||||
[Nothing, Nothing].running Statistic.Maximum . should_equal [Nothing, Nothing]
|
||||
|
||||
Test.specify "should be NaN for other statistics sum on Nothing Vector" <|
|
||||
group_builder.specify "should be NaN for other statistics sum on Nothing Vector" <|
|
||||
[Nothing, Nothing, Nothing, Nothing].compute Statistic.Mean . is_nan . should_equal True
|
||||
[Nothing, Nothing, Nothing, Nothing].running Statistic.Mean . all _.is_nan . should_equal True
|
||||
[Nothing, Nothing, Nothing, Nothing].compute Statistic.Variance . is_nan . should_equal True
|
||||
@ -220,12 +220,12 @@ spec =
|
||||
[Nothing, Nothing, Nothing, Nothing].compute Statistic.Kurtosis . is_nan . should_equal True
|
||||
[Nothing, Nothing, Nothing, Nothing].running Statistic.Kurtosis . all _.is_nan . should_equal True
|
||||
|
||||
Test.group "Statistics - invalid input" <|
|
||||
suite_builder.group "Statistics - invalid input" group_builder->
|
||||
text_set = ["A", "B", Nothing, "D"]
|
||||
ord_set = [Ord.Value 10, Ord.Value 2, Nothing, Ord.Value 9]
|
||||
no_ord_set = [No_Ord.Value 10, No_Ord.Value 2, Nothing, No_Ord.Value 9]
|
||||
|
||||
Test.specify "should fail with Illegal_Argument on number based statistics for text Vector" <|
|
||||
group_builder.specify "should fail with Illegal_Argument on number based statistics for text Vector" <|
|
||||
text_set.compute Statistic.Sum . should_fail_with Illegal_Argument
|
||||
text_set.compute Statistic.Mean . should_fail_with Illegal_Argument
|
||||
text_set.compute Statistic.Variance . should_fail_with Illegal_Argument
|
||||
@ -233,12 +233,12 @@ spec =
|
||||
text_set.compute Statistic.Kurtosis . should_fail_with Illegal_Argument
|
||||
text_set.running Statistic.Sum . should_fail_with Illegal_Argument
|
||||
|
||||
Test.specify "should be able to do Count, Minimum and Maximum on custom type with custom ordered comparator" <|
|
||||
group_builder.specify "should be able to do Count, Minimum and Maximum on custom type with custom ordered comparator" <|
|
||||
ord_set.compute . should_equal 3
|
||||
ord_set.compute Statistic.Minimum . should_equal (Ord.Value 2)
|
||||
ord_set.compute Statistic.Maximum . should_equal (Ord.Value 10)
|
||||
|
||||
Test.specify "should fail with Incomparable_Values on custom incomparable type" <|
|
||||
group_builder.specify "should fail with Incomparable_Values on custom incomparable type" <|
|
||||
no_ord_set.compute . should_equal 3
|
||||
no_ord_set.running . should_equal [1,2,2,3]
|
||||
no_ord_set.compute Statistic.Minimum . should_fail_with Incomparable_Values
|
||||
@ -246,7 +246,7 @@ spec =
|
||||
no_ord_set.compute Statistic.Maximum . should_fail_with Incomparable_Values
|
||||
no_ord_set.running Statistic.Maximum . should_fail_with Incomparable_Values
|
||||
|
||||
Test.specify "should fail with the underlying error on broken comparator" <|
|
||||
group_builder.specify "should fail with the underlying error on broken comparator" <|
|
||||
bad_comparator_set = [Bad_Comparator_Type.Value 10, Bad_Comparator_Type.Value 2, Nothing, Bad_Comparator_Type.Value 9]
|
||||
bad_comparator_set.compute . should_equal 3
|
||||
bad_comparator_set.running . should_equal [1,2,2,3]
|
||||
@ -255,11 +255,11 @@ spec =
|
||||
bad_comparator_set.compute Statistic.Maximum . should_fail_with Illegal_State
|
||||
bad_comparator_set.running Statistic.Maximum . should_fail_with Illegal_State
|
||||
|
||||
Test.specify "should fail with Incomparable_Values on mixed Vectors" <|
|
||||
group_builder.specify "should fail with Incomparable_Values on mixed Vectors" <|
|
||||
[1, False].compute Statistic.Minimum . should_fail_with Incomparable_Values
|
||||
|
||||
Test.group "Rank Data" <|
|
||||
Test.specify "can rank a Float data series" <|
|
||||
suite_builder.group "Rank Data" group_builder->
|
||||
group_builder.specify "can rank a Float data series" <|
|
||||
values = [409.892906, 0.839952, 796.468572, 126.931298, -405.265005, -476.675817, 441.651325, 796.468572, 78.50094, 340.163324, 234.861926, 409.892906, 226.467105, 234.861926, 126.931298, 637.870512, -71.008044, -386.399663, -126.534337, -476.675817, 78.50094, -386.399663, 409.892906, 868.54485, 669.113037, 669.113037, 0.839952, 407.162613, -476.675817, 126.931298]
|
||||
Statistic.rank_data values . should_equal [9, 21.5, 2.5, 17, 27, 29, 7, 2.5, 19.5, 12, 13.5, 9, 15, 13.5, 17, 6, 23, 25.5, 24, 29, 19.5, 25.5, 9, 1, 4.5, 4.5, 21.5, 11, 29, 17]
|
||||
Statistic.rank_data values Rank_Method.Minimum . should_equal [8, 21, 2, 16, 27, 28, 7, 2, 19, 12, 13, 8, 15, 13, 16, 6, 23, 25, 24, 28, 19, 25, 8, 1, 4, 4, 21, 11, 28, 16]
|
||||
@ -267,62 +267,62 @@ spec =
|
||||
Statistic.rank_data values Rank_Method.Ordinal . should_equal [8, 21, 2, 16, 27, 28, 7, 3, 19, 12, 13, 9, 15, 14, 17, 6, 23, 25, 24, 29, 20, 26, 10, 1, 4, 5, 22, 11, 30, 18]
|
||||
Statistic.rank_data values Rank_Method.Dense . should_equal [6, 13, 2, 11, 17, 18, 5, 2, 12, 8, 9, 6, 10, 9, 11, 4, 14, 16, 15, 18, 12, 16, 6, 1, 3, 3, 13, 7, 18, 11]
|
||||
|
||||
Test.specify "can rank an Integer data series" <|
|
||||
group_builder.specify "can rank an Integer data series" <|
|
||||
values = [10, 1, 124, 10]
|
||||
Statistic.rank_data values . should_equal [2.5, 4, 1, 2.5]
|
||||
|
||||
Test.specify "can rank a Number data series" <|
|
||||
group_builder.specify "can rank a Number data series" <|
|
||||
values = [10.0, 1, 12.4, 10]
|
||||
Statistic.rank_data values . should_equal [2.5, 4, 1, 2.5]
|
||||
|
||||
Test.specify "can rank a Text data series" <|
|
||||
group_builder.specify "can rank a Text data series" <|
|
||||
values = ["G", "AA", "B", "G", "D"]
|
||||
Statistic.rank_data values . should_equal [1.5, 5, 4, 1.5, 3]
|
||||
|
||||
Test.specify "should fail with Incomparable_Values on custom incomparable type" <|
|
||||
group_builder.specify "should fail with Incomparable_Values on custom incomparable type" <|
|
||||
values = [No_Ord.Value 10, No_Ord.Value 2, No_Ord.Value 9]
|
||||
Statistic.rank_data values . should_fail_with Incomparable_Values
|
||||
|
||||
Test.specify "should fail with Incomparable_Values on mixed Vectors" <|
|
||||
group_builder.specify "should fail with Incomparable_Values on mixed Vectors" <|
|
||||
Statistic.rank_data [1, "A"] . should_fail_with Incomparable_Values
|
||||
|
||||
Test.specify "should fail with Illegal_Argument on Vectors with Nothing" <|
|
||||
group_builder.specify "should fail with Illegal_Argument on Vectors with Nothing" <|
|
||||
Statistic.rank_data [1, Nothing, 4] . should_fail_with Illegal_Argument
|
||||
|
||||
Test.group "Correlation Statistics" <|
|
||||
suite_builder.group "Correlation Statistics" group_builder->
|
||||
series_a = [0.22345,0.258315,0.74663,Nothing,0.686843,0.692246,Nothing,0.401859,0.725442,Nothing,0.963527,0.520363,0.633053,0.397123,Nothing,0.458942,0.036499,0.368194,0.598939,0.296476,0.093746,0.609329]
|
||||
series_b = [0.140743,Nothing,0.574639,0.251683,0.902023,0.08723,0.251813,0.1669,0.234405,Nothing,0.28774,0.471757,0.280681,0.925207,0.919041,0.626234,0.429497,0.358597,0.566118,0.333606,0.828172,0.887829]
|
||||
series_c = [Nothing,0.769797,0.281678,0.462145,0.727132,0.327978,Nothing,0.648639,0.562636,Nothing,0.159836,0.367404,0.877087,0.365483,Nothing,0.931873,0.723546,0.558085,0.163396,0.940997,0.399685,0.617509]
|
||||
series = [series_a, series_b, series_c]
|
||||
|
||||
Test.specify "can compute Covariance, Correlation and R Squared between a pair of series" <|
|
||||
group_builder.specify "can compute Covariance, Correlation and R Squared between a pair of series" <|
|
||||
series_a.compute (Statistic.Covariance series_b) . should_equal -0.0053554 epsilon=double_error
|
||||
series_a.compute (Statistic.Pearson series_b) . should_equal -0.08263943 epsilon=double_error
|
||||
series_a.compute (Statistic.Spearman series_b) . should_equal -0.09313725 epsilon=double_error
|
||||
series_a.compute (Statistic.R_Squared series_b) . should_equal 0.006829275 epsilon=double_error
|
||||
|
||||
Test.specify "can calculate a covariance matrix" <|
|
||||
group_builder.specify "can calculate a covariance matrix" <|
|
||||
matrix = Statistic.covariance_matrix series
|
||||
matrix.length . should_equal 3
|
||||
vector_compare (matrix.at 0) [0.0571699, -0.0053554, -0.02378204]
|
||||
vector_compare (matrix.at 1) [-0.0053554, 0.07707381, -0.00098274]
|
||||
vector_compare (matrix.at 2) [-0.02378204, -0.00098274, 0.05837098]
|
||||
|
||||
Test.specify "can calculate a pearson correlation matrix" <|
|
||||
group_builder.specify "can calculate a pearson correlation matrix" <|
|
||||
matrix = Statistic.pearson_correlation series
|
||||
matrix.length . should_equal 3
|
||||
vector_compare (matrix.at 0) [1, -0.08263943, -0.40469045]
|
||||
vector_compare (matrix.at 1) [-0.08263943, 1, -0.01537537]
|
||||
vector_compare (matrix.at 2) [-0.40469045, -0.01537537, 1]
|
||||
|
||||
Test.specify "can calculate a spearman rank correlation matrix" <|
|
||||
group_builder.specify "can calculate a spearman rank correlation matrix" <|
|
||||
matrix = Statistic.spearman_correlation series
|
||||
matrix.length . should_equal 3
|
||||
vector_compare (matrix.at 0) [1, -0.09313725, -0.43382353]
|
||||
vector_compare (matrix.at 1) [-0.09313725, 1, 0]
|
||||
vector_compare (matrix.at 2) [-0.43382353, 0, 1]
|
||||
|
||||
Test.specify "should fail with Illegal_Argument if different lengths" <|
|
||||
group_builder.specify "should fail with Illegal_Argument if different lengths" <|
|
||||
data = [[1,2,3,4],[10,20,30]]
|
||||
data.first.compute (Statistic.Covariance data.second) . should_fail_with Illegal_Argument
|
||||
data.first.compute (Statistic.Pearson data.second) . should_fail_with Illegal_Argument
|
||||
@ -332,7 +332,7 @@ spec =
|
||||
Statistic.pearson_correlation data . should_fail_with Illegal_Argument
|
||||
Statistic.spearman_correlation data . should_fail_with Illegal_Argument
|
||||
|
||||
Test.specify "should fail with Illegal_Argument if not number based" <|
|
||||
group_builder.specify "should fail with Illegal_Argument if not number based" <|
|
||||
text = [["A","BC","CD"], ["0", "1", "2"], ["H", "I", "J"]]
|
||||
text.first.compute (Statistic.Covariance text.second) . should_fail_with Illegal_Argument
|
||||
text.first.compute (Statistic.Pearson text.second) . should_fail_with Illegal_Argument
|
||||
@ -342,10 +342,14 @@ spec =
|
||||
Statistic.pearson_correlation text . should_fail_with Illegal_Argument
|
||||
Statistic.spearman_correlation text . should_fail_with Illegal_Argument
|
||||
|
||||
Test.group "Statistics - invalid input" <|
|
||||
Test.specify "should fail with Illegal_Argument on number based statistics for text Vector" <|
|
||||
suite_builder.group "Statistics - invalid input" group_builder->
|
||||
group_builder.specify "should fail with Illegal_Argument on number based statistics for text Vector" <|
|
||||
series = [["A", "B", Nothing, "D"], ["A", "B", Nothing, "D"]]
|
||||
Statistic.covariance_matrix series . should_fail_with Illegal_Argument
|
||||
Statistic.pearson_correlation series . should_fail_with Illegal_Argument
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -2,35 +2,35 @@ from Standard.Base import all
|
||||
import Standard.Base.Errors.Encoding_Error.Encoding_Error
|
||||
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
|
||||
|
||||
from Standard.Test import Test, Test_Suite, Problems
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
|
||||
polyglot java import java.lang.String as Java_String
|
||||
|
||||
spec =
|
||||
Test.group "Encoding object" <|
|
||||
Test.specify "Can get standard UTF encodings" <|
|
||||
add_specs suite_builder =
|
||||
suite_builder.group "Encoding object" group_builder->
|
||||
group_builder.specify "Can get standard UTF encodings" <|
|
||||
Encoding.utf_8 . should_equal (Encoding.Value "UTF-8")
|
||||
Encoding.utf_16_le . should_equal (Encoding.Value "UTF-16LE")
|
||||
Encoding.utf_16_be . should_equal (Encoding.Value "UTF-16BE")
|
||||
Encoding.utf_32_le . should_equal (Encoding.Value "UTF-32LE")
|
||||
Encoding.utf_32_be . should_equal (Encoding.Value "UTF-32BE")
|
||||
|
||||
Test.specify "Catches invalid character sets" <|
|
||||
group_builder.specify "Catches invalid character sets" <|
|
||||
invalid = Encoding.Value "NotAValidCharacterSet"
|
||||
invalid.to_java_charset . should_fail_with Illegal_Argument
|
||||
|
||||
Test.specify "Can get full set of character sets" <|
|
||||
group_builder.specify "Can get full set of character sets" <|
|
||||
character_sets = Encoding.all_character_sets
|
||||
character_sets.length . should_not_equal 0
|
||||
character_sets.contains "UTF-8" . should_equal True
|
||||
|
||||
Test.specify "Can get full set of encoding objects" <|
|
||||
group_builder.specify "Can get full set of encoding objects" <|
|
||||
encodings = Encoding.all_encodings
|
||||
encodings.length . should_not_equal 0
|
||||
encodings.contains Encoding.utf_8 . should_equal True
|
||||
|
||||
Test.group "ASCII" <|
|
||||
suite_builder.group "ASCII" group_builder->
|
||||
test = 'Hello World!'
|
||||
test_ascii = [72, 101, 108, 108, 111, 32, 87, 111, 114, 108, 100, 33]
|
||||
|
||||
@ -38,17 +38,17 @@ spec =
|
||||
invalid_ascii = [72, 101, 108, 108, 111, 32, 87, 111, 114, 108, 100, 33, -56]
|
||||
invalid_ascii_out = [72, 101, 108, 108, 111, 32, 87, 111, 114, 108, 100, 33, 63]
|
||||
|
||||
Test.specify "should dump ASCII bytes to a vector via encoding" <|
|
||||
group_builder.specify "should dump ASCII bytes to a vector via encoding" <|
|
||||
dump = test.bytes Encoding.ascii
|
||||
Test.assert_no_problems dump
|
||||
dump . should_equal test_ascii
|
||||
|
||||
Test.specify "should convert an array of bytes to text" <|
|
||||
group_builder.specify "should convert an array of bytes to text" <|
|
||||
result = Text.from_bytes test_ascii Encoding.ascii
|
||||
Test.assert_no_problems result
|
||||
result . should_equal test
|
||||
|
||||
Test.specify "Invalid ASCII should raise a problem when decoding (error by default)" <|
|
||||
group_builder.specify "Invalid ASCII should raise a problem when decoding (error by default)" <|
|
||||
action = Text.from_bytes invalid_ascii Encoding.ascii on_problems=_
|
||||
tester result = result . should_equal invalid
|
||||
problems = [Encoding_Error.Error "Encoding issues at 12."]
|
||||
@ -58,7 +58,7 @@ spec =
|
||||
default_error.should_fail_with Encoding_Error
|
||||
default_error.catch.message . should_equal "Encoding issues at 12."
|
||||
|
||||
Test.specify "Invalid ASCII should raise a problem when encoding (warning by default)" <|
|
||||
group_builder.specify "Invalid ASCII should raise a problem when encoding (warning by default)" <|
|
||||
action = invalid.bytes Encoding.ascii on_problems=_
|
||||
tester result = result . should_equal invalid_ascii_out
|
||||
problems = [Encoding_Error.Error "Encoding issues at 12."]
|
||||
@ -68,24 +68,24 @@ spec =
|
||||
default_warning.should_equal invalid_ascii_out
|
||||
Problems.get_attached_warnings default_warning . should_contain_the_same_elements_as problems
|
||||
|
||||
Test.group "UTF_8" <|
|
||||
suite_builder.group "UTF_8" group_builder->
|
||||
kshi = '\u0915\u094D\u0937\u093F'
|
||||
kshi_utf_8 = [-32, -92, -107, -32, -91, -115, -32, -92, -73, -32, -92, -65]
|
||||
|
||||
invalid = 'Hello World! ¢£¥\uFFFD'
|
||||
invalid_utf_8 = [72, 101, 108, 108, 111, 32, 87, 111, 114, 108, 100, 33, 32, -62, -94, -62, -93, -62, -91, -62]
|
||||
|
||||
Test.specify "should dump utf-8 bytes to a vector via encoding" <|
|
||||
group_builder.specify "should dump utf-8 bytes to a vector via encoding" <|
|
||||
dump = kshi.bytes Encoding.utf_8
|
||||
Test.assert_no_problems dump
|
||||
dump . should_equal kshi_utf_8
|
||||
|
||||
Test.specify "should dump utf-8 bytes to a vector" <|
|
||||
group_builder.specify "should dump utf-8 bytes to a vector" <|
|
||||
dump = kshi.utf_8
|
||||
Test.assert_no_problems dump
|
||||
dump.should_equal kshi_utf_8
|
||||
|
||||
Test.specify "should raise a problem when encoding (warning by default)" <|
|
||||
group_builder.specify "should raise a problem when encoding (warning by default)" <|
|
||||
unpaired_surrogate = Integer.parse "DEDC" 16
|
||||
text = Text.from_codepoints [unpaired_surrogate]
|
||||
|
||||
@ -97,23 +97,23 @@ spec =
|
||||
default_warning = text.utf_8
|
||||
Problems.get_attached_warnings default_warning . should_contain_the_same_elements_as problems
|
||||
|
||||
Test.specify "should convert an array of bytes to text via encoding" <|
|
||||
group_builder.specify "should convert an array of bytes to text via encoding" <|
|
||||
result = Text.from_bytes kshi_utf_8 Encoding.utf_8
|
||||
Test.assert_no_problems result
|
||||
result . should_equal kshi
|
||||
|
||||
Test.specify "should convert an array of bytes to text" <|
|
||||
group_builder.specify "should convert an array of bytes to text" <|
|
||||
result = Text.from_utf_8 kshi_utf_8
|
||||
Test.assert_no_problems result
|
||||
result . should_equal kshi
|
||||
|
||||
Test.specify "Invalid UTF-8 should raise a problem when decoding via encoding" <|
|
||||
group_builder.specify "Invalid UTF-8 should raise a problem when decoding via encoding" <|
|
||||
action = Text.from_bytes invalid_utf_8 Encoding.utf_8 on_problems=_
|
||||
tester result = result . should_equal invalid
|
||||
problems = [Encoding_Error.Error "Encoding issues at 19."]
|
||||
Problems.test_problem_handling action problems tester
|
||||
|
||||
Test.specify "Invalid UTF-8 should raise a problem when decoding (error by default)" <|
|
||||
group_builder.specify "Invalid UTF-8 should raise a problem when decoding (error by default)" <|
|
||||
action = Text.from_utf_8 invalid_utf_8 on_problems=_
|
||||
tester result = result . should_equal invalid
|
||||
problems = [Encoding_Error.Error "Encoding issues at 19."]
|
||||
@ -123,45 +123,45 @@ spec =
|
||||
default_error.should_fail_with Encoding_Error
|
||||
default_error.catch.message . should_equal "Encoding issues at 19."
|
||||
|
||||
Test.group "UTF_16 BigEndian" <|
|
||||
suite_builder.group "UTF_16 BigEndian" group_builder->
|
||||
kshi = '\u0915\u094D\u0937\u093F'
|
||||
kshi_utf_16 = [9, 21, 9, 77, 9, 55, 9, 63]
|
||||
|
||||
Test.specify "should dump utf-16 bytes to a vector via encoding" <|
|
||||
group_builder.specify "should dump utf-16 bytes to a vector via encoding" <|
|
||||
dump = kshi.bytes Encoding.utf_16_be
|
||||
Test.assert_no_problems dump
|
||||
dump . should_equal kshi_utf_16
|
||||
|
||||
Test.specify "should convert an array of bytes to text via encoding" <|
|
||||
group_builder.specify "should convert an array of bytes to text via encoding" <|
|
||||
result = Text.from_bytes kshi_utf_16 Encoding.utf_16_be
|
||||
Test.assert_no_problems result
|
||||
result . should_equal kshi
|
||||
|
||||
Test.group "UTF_16 LittleEndian" <|
|
||||
suite_builder.group "UTF_16 LittleEndian" group_builder->
|
||||
kshi = '\u0915\u094D\u0937\u093F'
|
||||
kshi_utf_16 = [21, 9, 77, 9, 55, 9, 63, 9]
|
||||
|
||||
Test.specify "should dump utf-16 bytes to a vector via encoding" <|
|
||||
group_builder.specify "should dump utf-16 bytes to a vector via encoding" <|
|
||||
dump = kshi.bytes Encoding.utf_16_le
|
||||
Test.assert_no_problems dump
|
||||
dump . should_equal kshi_utf_16
|
||||
|
||||
Test.specify "should convert an array of bytes to text via encoding" <|
|
||||
group_builder.specify "should convert an array of bytes to text via encoding" <|
|
||||
result = Text.from_bytes kshi_utf_16 Encoding.utf_16_le
|
||||
Test.assert_no_problems result
|
||||
result . should_equal kshi
|
||||
|
||||
Test.group "codepoints" <|
|
||||
suite_builder.group "codepoints" group_builder->
|
||||
facepalm = '\u{1F926}\u{1F3FC}\u200D\u2642\uFE0F'
|
||||
facepalm_codes = [129318, 127996, 8205, 9794, 65039]
|
||||
|
||||
Test.specify "should dump utf codepoints to a vector" <|
|
||||
group_builder.specify "should dump utf codepoints to a vector" <|
|
||||
facepalm.codepoints.should_equal facepalm_codes
|
||||
|
||||
Test.specify "should convert an array of codepoints to text" <|
|
||||
group_builder.specify "should convert an array of codepoints to text" <|
|
||||
Text.from_codepoints facepalm_codes . should_equal facepalm
|
||||
|
||||
Test.group "Windows 1252" <|
|
||||
suite_builder.group "Windows 1252" group_builder->
|
||||
test = 'Hello World! ¢£¥'
|
||||
test_windows = [72, 101, 108, 108, 111, 32, 87, 111, 114, 108, 100, 33, 32, -94, -93, -91]
|
||||
|
||||
@ -169,26 +169,30 @@ spec =
|
||||
invalid_windows = [72, 101, 108, 108, 111, 32, 87, 111, 114, 108, 100, 33, 32, -94, -93, -91, -127]
|
||||
invalid_windows_out = [72, 101, 108, 108, 111, 32, 87, 111, 114, 108, 100, 33, 32, -94, -93, -91, 63]
|
||||
|
||||
Test.specify "should dump Windows-1252 bytes to a vector via encoding" <|
|
||||
group_builder.specify "should dump Windows-1252 bytes to a vector via encoding" <|
|
||||
dump = test.bytes Encoding.windows_1252
|
||||
Test.assert_no_problems dump
|
||||
dump . should_equal test_windows
|
||||
|
||||
Test.specify "should convert an array of bytes to text" <|
|
||||
group_builder.specify "should convert an array of bytes to text" <|
|
||||
result = Text.from_bytes test_windows Encoding.windows_1252
|
||||
Test.assert_no_problems result
|
||||
result . should_equal test
|
||||
|
||||
Test.specify "Invalid Windows-1252 should raise a problem when decoding" <|
|
||||
group_builder.specify "Invalid Windows-1252 should raise a problem when decoding" <|
|
||||
action = Text.from_bytes invalid_windows Encoding.windows_1252 on_problems=_
|
||||
tester result = result . should_equal invalid
|
||||
problems = [Encoding_Error.Error "Encoding issues at 16."]
|
||||
Problems.test_problem_handling action problems tester
|
||||
|
||||
Test.specify "Invalid Windows-1252 should raise a problem when encoding" <|
|
||||
group_builder.specify "Invalid Windows-1252 should raise a problem when encoding" <|
|
||||
action = invalid.bytes Encoding.windows_1252 on_problems=_
|
||||
tester result = result . should_equal invalid_windows_out
|
||||
problems = [Encoding_Error.Error "Encoding issues at 16."]
|
||||
Problems.test_problem_handling action problems tester
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -2,52 +2,56 @@ from Standard.Base import all
|
||||
|
||||
import Standard.Base.Errors.Common.Syntax_Error
|
||||
import Standard.Base.Errors.Time_Error.Time_Error
|
||||
import Standard.Test.Extensions
|
||||
|
||||
import Standard.Base.Data.Json.Invalid_JSON
|
||||
|
||||
import Standard.Base.Data.Numbers.Number_Parse_Error
|
||||
from Standard.Test import Test, Test_Suite
|
||||
from Standard.Test_New import all
|
||||
|
||||
spec =
|
||||
Test.group "parse" <|
|
||||
Test.specify "Float" <|
|
||||
add_specs suite_builder =
|
||||
suite_builder.group "parse" group_builder->
|
||||
group_builder.specify "Float" <|
|
||||
"32.5".parse_float . should_equal <| Float.parse "32.5"
|
||||
l = Locale.new "cs"
|
||||
"32,5".parse_float l . should_equal <| Float.parse "32,5" l
|
||||
"abc".parse_float . should_fail_with Number_Parse_Error
|
||||
|
||||
Test.specify "Integer" <|
|
||||
group_builder.specify "Integer" <|
|
||||
"12343456".parse_integer . should_equal <| Integer.parse "12343456"
|
||||
"ABC123".parse_integer 16 . should_equal <| Integer.parse "ABC123" 16
|
||||
"abc".parse_integer . should_fail_with Number_Parse_Error
|
||||
|
||||
Test.specify "Json" <|
|
||||
group_builder.specify "Json" <|
|
||||
"[null, null, true, false]".parse_json . should_equal <| Json.parse "[null, null, true, false]"
|
||||
"[[".parse_json . should_fail_with Invalid_JSON
|
||||
|
||||
Test.specify "Date" <|
|
||||
group_builder.specify "Date" <|
|
||||
"1999-01-01".parse_date . should_equal <| Date.new 1999 1 1
|
||||
"1999 1 1".parse_date "yyyy M d" . should_equal <| Date.new 1999 1 1
|
||||
"1999-01-01".parse_date "yyyy M d" . should_fail_with Time_Error
|
||||
"13 Jan 2023".parse_date "d MMM yyyy" . should_equal <| Date.new 2023 1 13
|
||||
"13 January 2023".parse_date "d MMMM yyyy" . should_equal <| Date.new 2023 1 13
|
||||
|
||||
Test.specify "Date_Time" <|
|
||||
group_builder.specify "Date_Time" <|
|
||||
"2020-10-01T04:11:12-04:00".parse_date_time . should_equal <| Date_Time.parse "2020-10-01T04:11:12-04:00"
|
||||
"2020-05-06 04:30:20".parse_date_time "yyyy-MM-dd HH:mm:ss" . should_equal <| Date_Time.parse "2020-05-06 04:30:20" "yyyy-MM-dd HH:mm:ss"
|
||||
"asdf".parse_date_time . should_fail_with Time_Error
|
||||
|
||||
Test.specify "Time_Of_Day" <|
|
||||
group_builder.specify "Time_Of_Day" <|
|
||||
"15:05:30".parse_time_of_day . should_equal <| Time_Of_Day.parse "15:05:30"
|
||||
"4:30AM".parse_time_of_day "h:mma" . should_equal <| Time_Of_Day.parse "4:30AM" "h:mma"
|
||||
"half twelve".parse_time_of_day . should_fail_with Time_Error
|
||||
|
||||
Test.specify "Time_Zone" <|
|
||||
group_builder.specify "Time_Zone" <|
|
||||
"CET".parse_time_zone . should_equal <| Time_Zone.parse "CET"
|
||||
"foo".parse_time_zone . should_fail_with Time_Error
|
||||
|
||||
Test.specify "URI" <|
|
||||
group_builder.specify "URI" <|
|
||||
"http://example.com".to_uri . should_equal <| URI.parse "http://example.com"
|
||||
":::".to_uri . should_fail_with Syntax_Error
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -10,75 +10,92 @@ import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
|
||||
|
||||
from Standard.Base.Data.Text.Regex.Internal.Replacer import get_lru_size, replacer_cache_lookup
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
spec =
|
||||
Test.group "Compile" <|
|
||||
Test.specify "should be able to be compiled" <|
|
||||
type Data
|
||||
Value ~data
|
||||
|
||||
pattern self = self.data.at 0
|
||||
match self = self.data.at 1
|
||||
input self = self.data.at 2
|
||||
|
||||
setup = Data.Value <|
|
||||
pattern = Regex.compile "(.. .. )(?<letters>.+)()??(?<empty>)??"
|
||||
input = "aa ab abc a bc bcd"
|
||||
match = pattern.match input
|
||||
match . should_be_a Match
|
||||
[pattern, match, input]
|
||||
|
||||
|
||||
|
||||
add_specs suite_builder =
|
||||
suite_builder.group "Compile" group_builder->
|
||||
group_builder.specify "should be able to be compiled" <|
|
||||
pattern = Regex.compile "(?<dots>..)" case_insensitive=True
|
||||
pattern . should_be_a Regex
|
||||
|
||||
Test.specify "should throw Regex_Syntax_Error for a regex with incorrect syntax" <|
|
||||
group_builder.specify "should throw Regex_Syntax_Error for a regex with incorrect syntax" <|
|
||||
Regex.compile "ab(c(((((((" . should_fail_with Regex_Syntax_Error
|
||||
|
||||
Test.specify "should throw Regex_Syntax_Error for a regex with incorrect syntax (space in capture group name)" <|
|
||||
group_builder.specify "should throw Regex_Syntax_Error for a regex with incorrect syntax (space in capture group name)" <|
|
||||
Regex.compile "(?<dot s>..)" . should_fail_with Regex_Syntax_Error
|
||||
|
||||
Test.specify "should throw Regex_Syntax_Error for a regex with incorrect syntax (duplicate name)" <|
|
||||
group_builder.specify "should throw Regex_Syntax_Error for a regex with incorrect syntax (duplicate name)" <|
|
||||
Regex.compile "(?<foo>.)(?<foo>.)" . should_fail_with Regex_Syntax_Error
|
||||
|
||||
Test.specify "should disallow empty patterns in `compile`" <|
|
||||
group_builder.specify "should disallow empty patterns in `compile`" <|
|
||||
Regex.compile "" . should_fail_with Illegal_Argument
|
||||
|
||||
Test.specify "passing a non-string should fail with a type error" <|
|
||||
group_builder.specify "passing a non-string should fail with a type error" <|
|
||||
Test.expect_panic_with (Regex.compile 12) Type_Error
|
||||
p = Regex.compile "[a-z]"
|
||||
Test.expect_panic_with (Regex.compile p) Type_Error
|
||||
|
||||
Test.group "Escape" <|
|
||||
Test.specify "should escape an expression for use as a literal" <|
|
||||
suite_builder.group "Escape" group_builder->
|
||||
group_builder.specify "should escape an expression for use as a literal" <|
|
||||
Regex.escape "[a-z\d]+" . should_equal '\\[a-z\\d\\]\\+'
|
||||
|
||||
Test.group "Pattern.matches" <|
|
||||
Test.specify "should return True when the pattern matches against the input" <|
|
||||
|
||||
suite_builder.group "Pattern.matches" group_builder->
|
||||
group_builder.specify "should return True when the pattern matches against the input" <|
|
||||
pattern = Regex.compile "(.. .. )(?<letters>.+)()??(?<empty>)??"
|
||||
input = "aa ab abc a bc bcd"
|
||||
pattern.matches input . should_be_true
|
||||
|
||||
Test.specify "should return False when the pattern doesn't match against the input" <|
|
||||
group_builder.specify "should return False when the pattern doesn't match against the input" <|
|
||||
pattern = Regex.compile "aaz"
|
||||
input = "aa ab abc a bc bcd"
|
||||
pattern.matches input . should_be_false
|
||||
|
||||
Test.specify "should check for full matches" <|
|
||||
group_builder.specify "should check for full matches" <|
|
||||
pattern = Regex.compile "f.o"
|
||||
pattern.matches "foo" . should_be_true
|
||||
pattern.matches "foobar" . should_be_false
|
||||
|
||||
Test.specify "`matches` with an empty pattern should be an error" <|
|
||||
group_builder.specify "`matches` with an empty pattern should be an error" <|
|
||||
pattern = Regex.compile ""
|
||||
pattern.matches "ABC" . should_fail_with Illegal_Argument
|
||||
|
||||
Test.specify "`matches` against a non-Text should fail with Illegal_Argument" <|
|
||||
group_builder.specify "`matches` against a non-Text should fail with Illegal_Argument" <|
|
||||
pattern = Regex.compile "abc"
|
||||
pattern.matches 1 . should_fail_with Type_Error
|
||||
|
||||
Test.group "Pattern.match and .match_all" <|
|
||||
Test.specify "should be able to `match` the first instance of the pattern in the input" <|
|
||||
|
||||
suite_builder.group "Pattern.match and .match_all" group_builder->
|
||||
group_builder.specify "should be able to `match` the first instance of the pattern in the input" <|
|
||||
pattern = Regex.compile "(.. .. )(?<letters>.+)()??(?<empty>)??"
|
||||
input = "aa ab abc a bc bcd"
|
||||
match = pattern.match input
|
||||
match . should_be_a Match
|
||||
match.text 0 . should_equal input
|
||||
|
||||
Test.specify "should return `Nothing` if there are no matches in first mode" <|
|
||||
group_builder.specify "should return `Nothing` if there are no matches in first mode" <|
|
||||
pattern = Regex.compile "(.. .. )(?<letters>.+)()??(?<empty>)??"
|
||||
input = "abc"
|
||||
match = pattern.match input
|
||||
match . should_equal Nothing
|
||||
|
||||
Test.specify "should be able to `match` the all instances of the pattern in the input" <|
|
||||
group_builder.specify "should be able to `match` the all instances of the pattern in the input" <|
|
||||
pattern = Regex.compile "(..)"
|
||||
input = "abcdefghij"
|
||||
matches = pattern.match_all input
|
||||
@ -89,43 +106,44 @@ spec =
|
||||
matches.at 3 . text 0 . should_equal "gh"
|
||||
matches.at 4 . text 0 . should_equal "ij"
|
||||
|
||||
Test.specify "should return `[]` when an all match match fails" <|
|
||||
group_builder.specify "should return `[]` when an all match match fails" <|
|
||||
pattern = Regex.compile "(aa)"
|
||||
input = "abcdefghij"
|
||||
match = pattern.match_all input
|
||||
match . should_equal []
|
||||
|
||||
Test.specify "`match` with an empty pattern should be an error" <|
|
||||
group_builder.specify "`match` with an empty pattern should be an error" <|
|
||||
pattern = Regex.compile ""
|
||||
pattern.match "ABC" . should_fail_with Illegal_Argument
|
||||
|
||||
Test.specify "`match_all` with an empty pattern should be an error" <|
|
||||
group_builder.specify "`match_all` with an empty pattern should be an error" <|
|
||||
pattern = Regex.compile ""
|
||||
pattern.match_all "ABC" . should_fail_with Illegal_Argument
|
||||
|
||||
Test.specify "`match` against a non-Text should fail with Illegal_Argument" <|
|
||||
group_builder.specify "`match` against a non-Text should fail with Illegal_Argument" <|
|
||||
pattern = Regex.compile "abc"
|
||||
pattern.match 1 . should_fail_with Type_Error
|
||||
|
||||
Test.specify "`match_all` against a non-Text should fail with Illegal_Argument" <|
|
||||
group_builder.specify "`match_all` against a non-Text should fail with Illegal_Argument" <|
|
||||
pattern = Regex.compile "abc"
|
||||
pattern.match_all 1 . should_fail_with Type_Error
|
||||
|
||||
Test.group "Pattern.find and .find_all" <|
|
||||
Test.specify "should be able to `find` the first instance of the pattern in the input" <|
|
||||
|
||||
suite_builder.group "Pattern.find and .find_all" group_builder->
|
||||
group_builder.specify "should be able to `find` the first instance of the pattern in the input" <|
|
||||
pattern = Regex.compile "(..)"
|
||||
input = "abcdefghij"
|
||||
match = pattern.find input
|
||||
match . should_be_a Text
|
||||
match . should_equal "ab"
|
||||
|
||||
Test.specify "should return `Nothing` if there are no matches in first mode" <|
|
||||
group_builder.specify "should return `Nothing` if there are no matches in first mode" <|
|
||||
pattern = Regex.compile "(aa)"
|
||||
input = "abcdefghij"
|
||||
match = pattern.find input
|
||||
match . should_equal Nothing
|
||||
|
||||
Test.specify "should be able to `find` the all instances of the pattern in the input" <|
|
||||
group_builder.specify "should be able to `find` the all instances of the pattern in the input" <|
|
||||
pattern = Regex.compile "(..)"
|
||||
input = "abcdefghij"
|
||||
match = pattern.find_all input
|
||||
@ -136,13 +154,13 @@ spec =
|
||||
match.at 3 . should_equal "gh"
|
||||
match.at 4 . should_equal "ij"
|
||||
|
||||
Test.specify "should return `[]` when an all match match fails" <|
|
||||
group_builder.specify "should return `[]` when an all match match fails" <|
|
||||
pattern = Regex.compile "(aa)"
|
||||
input = "abcdefghij"
|
||||
match = pattern.find_all input
|
||||
match . should_equal []
|
||||
|
||||
Test.specify "should handle matching empty matches" <|
|
||||
group_builder.specify "should handle matching empty matches" <|
|
||||
pattern = Regex.compile ".*"
|
||||
pattern.find_all "Hello World" . should_equal ["Hello World", ""]
|
||||
pattern.find_all "" . should_equal [""]
|
||||
@ -151,39 +169,40 @@ spec =
|
||||
pattern_2.find_all "Hello World" . should_equal ["Hello Worl", ""]
|
||||
pattern_2.find_all "" . should_equal []
|
||||
|
||||
Test.specify "should correctly handle edge cases where one-letter matches happen at the end of the word" <|
|
||||
group_builder.specify "should correctly handle edge cases where one-letter matches happen at the end of the word" <|
|
||||
Regex.compile "(a+|1+)" . find_all "a1a1" . should_equal ["a", "1", "a", "1"]
|
||||
Regex.compile "([a]+|[1]+)" . find_all "a1a1" . should_equal ["a", "1", "a", "1"]
|
||||
Regex.compile "([0-9]+|[^0-9]+)" . find_all "a1b2" . should_equal ["a", "1", "b", "2"]
|
||||
|
||||
Test.specify "`find` with an empty pattern should be an error" <|
|
||||
group_builder.specify "`find` with an empty pattern should be an error" <|
|
||||
pattern = Regex.compile ""
|
||||
pattern.find "ABC" . should_fail_with Illegal_Argument
|
||||
|
||||
Test.specify "`find_all` with an empty pattern should be an error" <|
|
||||
group_builder.specify "`find_all` with an empty pattern should be an error" <|
|
||||
pattern = Regex.compile ""
|
||||
pattern.find_all "ABC" . should_fail_with Illegal_Argument
|
||||
|
||||
Test.group "Pattern.split" <|
|
||||
Test.specify "should be able to `split` on the first instance of the pattern" <|
|
||||
|
||||
suite_builder.group "Pattern.split" group_builder->
|
||||
group_builder.specify "should be able to `split` on the first instance of the pattern" <|
|
||||
pattern = Regex.compile "cd"
|
||||
input = "abcdefcdghij"
|
||||
texts = pattern.split input only_first=True
|
||||
texts . should_equal ["ab", "efcdghij"]
|
||||
|
||||
Test.specify "should return the original text if there are no matches in first mode" <|
|
||||
group_builder.specify "should return the original text if there are no matches in first mode" <|
|
||||
pattern = Regex.compile "aa"
|
||||
input = "abcdefghij"
|
||||
texts = pattern.split input only_first=True
|
||||
texts . should_equal ["abcdefghij"]
|
||||
|
||||
Test.specify "should return the original text if there are no matches in all mode" <|
|
||||
group_builder.specify "should return the original text if there are no matches in all mode" <|
|
||||
pattern = Regex.compile "aa"
|
||||
input = "abcdefghij"
|
||||
texts = pattern.split input
|
||||
texts . should_equal ["abcdefghij"]
|
||||
|
||||
Test.specify "should be able to `split` on the all instances of the pattern in the input" <|
|
||||
group_builder.specify "should be able to `split` on the all instances of the pattern in the input" <|
|
||||
pattern = Regex.compile "a"
|
||||
pattern.split "bacadaeaf" . should_equal ["b", "c", "d", "e", "f"]
|
||||
pattern.split "baab" . should_equal ["b", "", "b"]
|
||||
@ -192,92 +211,93 @@ spec =
|
||||
pattern.split "a" . should_equal ["", ""]
|
||||
pattern.split "abaca" . should_equal ["", "b", "c", ""]
|
||||
|
||||
Test.specify "should split without normalization" <|
|
||||
group_builder.specify "should split without normalization" <|
|
||||
pattern = Regex.compile "s"
|
||||
pattern.split 'aśsśs\u{301}śb' . should_equal ['aś', 'ś', '\u{301}śb']
|
||||
|
||||
Test.specify "`split` against a non-Text should fail with Illegal_Argument" <|
|
||||
group_builder.specify "`split` against a non-Text should fail with Illegal_Argument" <|
|
||||
pattern = Regex.compile "abc"
|
||||
pattern.split 1 . should_fail_with Type_Error
|
||||
|
||||
Test.group "Pattern.tokenize" <|
|
||||
Test.specify "can tokenize with simple regexes without capturing groups"
|
||||
|
||||
suite_builder.group "Pattern.tokenize" group_builder->
|
||||
group_builder.specify "can tokenize with simple regexes without capturing groups"
|
||||
Regex.compile "[a-z]+" . tokenize "1-800-regex-yes" . should_equal ["regex", "yes"]
|
||||
Regex.compile "[a-z]+" case_insensitive=True . tokenize "1-800-REGEX-YES" . should_equal ["REGEX", "YES"]
|
||||
Regex.compile "\d\d" . tokenize "12 hi345 67r890r" . should_equal ["12", "34", "67", "89"]
|
||||
|
||||
Test.specify "can tokenize with regexes with capturing groups"
|
||||
group_builder.specify "can tokenize with regexes with capturing groups"
|
||||
Regex.compile "(\d\d)\d" . tokenize "12 hi345 67r890r" . should_equal ["34", "89"]
|
||||
Regex.compile "[a-z]+(\d+)" . tokenize "xy blink182 !!matchbox20 foo" . should_equal ["182", "20"]
|
||||
Regex.compile "[a-z]+(\d*)" . tokenize "xy blink182 !!matchbox20 foo" . should_equal ["", "182", "20", ""]
|
||||
|
||||
Test.specify "ignores non-capturing groups"
|
||||
group_builder.specify "ignores non-capturing groups"
|
||||
Regex.compile "(?:(\d\d)\d)" . tokenize "12 hi345 67r890r" . should_equal ["34", "89"]
|
||||
Regex.compile "(\d\d)(?:\d)" . tokenize "12 hi345 67r890r" . should_equal ["34", "89"]
|
||||
Regex.compile "(?<foo>\d\d)(?:\d)" . tokenize "12 hi345 67r890r" . should_equal ["34", "89"]
|
||||
Regex.compile "(?:[a-z]+)(\d+)" . tokenize "xy blink182 !!matchbox20 foo" . should_equal ["182", "20"]
|
||||
|
||||
Test.specify "ignores nested groups"
|
||||
group_builder.specify "ignores nested groups"
|
||||
Regex.compile "(\d(\d))\d" . tokenize "12 hi345 67r890r" . should_equal ["34", "89"]
|
||||
Regex.compile "(?<foo>\d(?<bar>\d))\d" . tokenize "12 hi345 67r890r" . should_equal ["34", "89"]
|
||||
Regex.compile "[a-z]+((\d)\d*)" . tokenize "xy blink182 !!matchbox20 foo" . should_equal ["182", "20"]
|
||||
Regex.compile "\d(\d(\d\d)\d)\d" . tokenize "012345678901" . should_equal ["1234", "7890"]
|
||||
|
||||
Test.specify "non-participating groups are rendered as the empty string"
|
||||
group_builder.specify "non-participating groups are rendered as the empty string"
|
||||
Regex.compile "(\d).(?:(\d)|([a-z])).(\d)" . tokenize "3_4_0" . should_equal ['340']
|
||||
Regex.compile "(\d).(?:(\d)|([a-z])).(\d)" . tokenize "3_q_0" . should_equal ['3q0']
|
||||
|
||||
Test.specify "handles unicode" <|
|
||||
group_builder.specify "handles unicode" <|
|
||||
Regex.compile "[áê]+" . tokenize "aááêe xêy" . should_equal ["ááê", "ê"]
|
||||
# `+` only applies to the accent `\u{301}`, not to the entire grapheme.
|
||||
Regex.compile 'a\u{301}+' . tokenize 'aa\u{301}a\u{301}êe xêy' . should_equal ['a\u{301}', 'a\u{301}']
|
||||
Regex.compile '(?:a\u{301})+' . tokenize 'aa\u{301}a\u{301}êe xêy' . should_equal ['a\u{301}a\u{301}']
|
||||
Regex.compile "x([áê]+)y" . tokenize "xáy xêy" . should_equal ["á", "ê"]
|
||||
|
||||
Test.specify "examples are correct" <|
|
||||
group_builder.specify "examples are correct" <|
|
||||
Regex.compile "..." . tokenize "ABCDEF" . should_equal ["ABC","DEF"]
|
||||
Regex.compile "(.).(.)" . tokenize "ABCDEF" . should_equal ["AC","DF"]
|
||||
Regex.compile "(\S+)(?:\s+|$)" . tokenize 'Hello Big\r\nWide\tWorld\nGoodbye!' . should_equal ["Hello","Big","Wide","World","Goodbye!"]
|
||||
|
||||
Test.group "Pattern.replace" <|
|
||||
Test.specify "should be able to `replace` the first instance of the pattern in the input" <|
|
||||
suite_builder.group "Pattern.replace" group_builder->
|
||||
group_builder.specify "should be able to `replace` the first instance of the pattern in the input" <|
|
||||
pattern = Regex.compile "abc"
|
||||
input = "aa ab abc a bc abc"
|
||||
match = pattern.replace input "REPLACED" only_first=True
|
||||
match . should_be_a Text
|
||||
match . should_equal "aa ab REPLACED a bc abc"
|
||||
|
||||
Test.specify "should return the string unchanged if there are no matches to replace in only_first mode" <|
|
||||
group_builder.specify "should return the string unchanged if there are no matches to replace in only_first mode" <|
|
||||
pattern = Regex.compile "xyz"
|
||||
input = "aa ab ac ad"
|
||||
match = pattern.replace input "REPLACED" only_first=True
|
||||
match . should_equal input
|
||||
|
||||
Test.specify "should be able to replace the all instances of the pattern in the input" <|
|
||||
group_builder.specify "should be able to replace the all instances of the pattern in the input" <|
|
||||
pattern = Regex.compile "aa"
|
||||
input = "aa ab aa ac ad aa aa ax"
|
||||
match = pattern.replace input "REPLACED"
|
||||
match . should_equal "REPLACED ab REPLACED ac ad REPLACED REPLACED ax"
|
||||
|
||||
Test.specify "should return the input when an all replace fails" <|
|
||||
group_builder.specify "should return the input when an all replace fails" <|
|
||||
pattern = Regex.compile "aa"
|
||||
input = "abcdefghij"
|
||||
match = pattern.replace input "REPLACED"
|
||||
match . should_equal input
|
||||
|
||||
Test.specify "should be able to replace the entire input only if it matches" <|
|
||||
group_builder.specify "should be able to replace the entire input only if it matches" <|
|
||||
pattern = Regex.compile "(.. .. )(?<letters>.+)()??(?<empty>)??"
|
||||
input = "aa ab abc a bc bcd"
|
||||
match = pattern.replace input "REPLACED"
|
||||
match . should_equal "REPLACED"
|
||||
|
||||
Test.specify "should not perform overlapping replacements in all mode" <|
|
||||
group_builder.specify "should not perform overlapping replacements in all mode" <|
|
||||
pattern = Regex.compile "(..)"
|
||||
input = "aa ab"
|
||||
match = pattern.replace input "REPLACED"
|
||||
match . should_equal "REPLACEDREPLACEDb"
|
||||
|
||||
Test.specify "should handle capture groups in replacement" <|
|
||||
group_builder.specify "should handle capture groups in replacement" <|
|
||||
pattern = Regex.compile "(?<capture>[a-z]+)"
|
||||
pattern.replace "foo bar, baz" "[$1]" . should_equal "[foo] [bar], [baz]"
|
||||
pattern.replace "foo bar, baz" "[$1]" only_first=True . should_equal "[foo] bar, baz"
|
||||
@ -290,240 +310,221 @@ spec =
|
||||
pattern.replace "foo bar, baz" "[$&]" . should_equal "[foo] [bar], [baz]"
|
||||
pattern.replace "foo bar, baz" "[$&]" only_first=True . should_equal "[foo] bar, baz"
|
||||
|
||||
Test.specify "should handle unicode in capture group names" <|
|
||||
group_builder.specify "should handle unicode in capture group names" <|
|
||||
pattern = Regex.compile "(?<건반>[a-z]+)"
|
||||
pattern.replace "foo bar, baz" "[$<건반>]" . should_equal "[foo] [bar], [baz]"
|
||||
|
||||
Test.group "should correctly evaluate documentation examples" <|
|
||||
Test.specify "example 1" <|
|
||||
pattern = Regex.compile 'aa'
|
||||
pattern.replace 'aaa' 'b' . should_equal 'ba'
|
||||
|
||||
Test.specify "example 2" <|
|
||||
pattern = Regex.compile '[lo]'
|
||||
pattern.replace 'Hello World!' '#' . should_equal 'He### W#r#d!'
|
||||
suite_builder.group "should correctly evaluate documentation examples" group_builder->
|
||||
group_builder.specify "example 1" <|
|
||||
pattern = Regex.compile 'aa'
|
||||
pattern.replace 'aaa' 'b' . should_equal 'ba'
|
||||
|
||||
Test.specify "example 3" <|
|
||||
pattern = Regex.compile 'l'
|
||||
pattern.replace 'Hello World!' '#' only_first=True . should_equal 'He#lo World!'
|
||||
group_builder.specify "example 2" <|
|
||||
pattern = Regex.compile '[lo]'
|
||||
pattern.replace 'Hello World!' '#' . should_equal 'He### W#r#d!'
|
||||
|
||||
Test.specify "example 4" <|
|
||||
pattern = Regex.compile '"(.*?)"'
|
||||
pattern.replace '"abc" foo "bar" baz' '($1)' . should_equal '(abc) foo (bar) baz'
|
||||
group_builder.specify "example 3" <|
|
||||
pattern = Regex.compile 'l'
|
||||
pattern.replace 'Hello World!' '#' only_first=True . should_equal 'He#lo World!'
|
||||
|
||||
Test.specify "example 5" <|
|
||||
pattern = Regex.compile "aa"
|
||||
input = "aa ab aa ac ad aa aa ax"
|
||||
match = pattern.replace input "xyz"
|
||||
match . should_equal "xyz ab xyz ac ad xyz xyz ax"
|
||||
group_builder.specify "example 4" <|
|
||||
pattern = Regex.compile '"(.*?)"'
|
||||
pattern.replace '"abc" foo "bar" baz' '($1)' . should_equal '(abc) foo (bar) baz'
|
||||
|
||||
Test.specify "example 6" <|
|
||||
pattern = Regex.compile "([a-z]+)"
|
||||
pattern.replace "foo bar, baz" "[$1]" . should_equal "[foo] [bar], [baz]"
|
||||
group_builder.specify "example 5" <|
|
||||
pattern = Regex.compile "aa"
|
||||
input = "aa ab aa ac ad aa aa ax"
|
||||
match = pattern.replace input "xyz"
|
||||
match . should_equal "xyz ab xyz ac ad xyz xyz ax"
|
||||
|
||||
Test.specify "`replace` with an empty pattern should be an error" <|
|
||||
group_builder.specify "example 6" <|
|
||||
pattern = Regex.compile "([a-z]+)"
|
||||
pattern.replace "foo bar, baz" "[$1]" . should_equal "[foo] [bar], [baz]"
|
||||
|
||||
group_builder.specify "`replace` with an empty pattern should be an error" <|
|
||||
pattern = Regex.compile ""
|
||||
pattern.replace "ABC" . should_fail_with Illegal_Argument
|
||||
|
||||
Test.specify "`replace` against a non-Text should fail with Illegal_Argument" <|
|
||||
group_builder.specify "`replace` against a non-Text should fail with Illegal_Argument" <|
|
||||
pattern = Regex.compile "abc"
|
||||
pattern.replace 1 "abc" . should_fail_with Type_Error
|
||||
|
||||
Test.group "Match.text" <|
|
||||
pattern = Regex.compile "(.. .. )(?<letters>.+)()??(?<empty>)??"
|
||||
input = "aa ab abc a bc bcd"
|
||||
match = pattern.match input
|
||||
match . should_be_a Match
|
||||
|
||||
Test.specify "should return the full match with index 0" <|
|
||||
match.text 0 . should_equal "aa ab abc a bc bcd"
|
||||
suite_builder.group "Match.text" group_builder->
|
||||
data = Data.setup
|
||||
|
||||
Test.specify "should return the group contents if it matches by index" <|
|
||||
match.text 1 . should_equal "aa ab "
|
||||
group_builder.specify "should return the full match with index 0" <|
|
||||
data.match.text 0 . should_equal "aa ab abc a bc bcd"
|
||||
|
||||
Test.specify "should return the group contents if it matches by name" <|
|
||||
match.text "letters" . should_equal "abc a bc bcd"
|
||||
group_builder.specify "should return the group contents if it matches by index" <|
|
||||
data.match.text 1 . should_equal "aa ab "
|
||||
|
||||
Test.specify "should return Nothing if the group did not match" <|
|
||||
match.text 3 . should_equal Nothing
|
||||
group_builder.specify "should return the group contents if it matches by name" <|
|
||||
data.match.text "letters" . should_equal "abc a bc bcd"
|
||||
|
||||
Test.specify "should fail with No_Such_Group_Error if the group did not exist" <|
|
||||
match.text "fail" . should_fail_with No_Such_Group
|
||||
match.text 5 . should_fail_with No_Such_Group
|
||||
group_builder.specify "should return Nothing if the group did not match" <|
|
||||
data.match.text 3 . should_equal Nothing
|
||||
|
||||
Test.specify "should make named groups accessible by index" <|
|
||||
match.text 2 . should_equal (match.text "letters")
|
||||
group_builder.specify "should fail with No_Such_Group_Error if the group did not exist" <|
|
||||
data.match.text "fail" . should_fail_with No_Such_Group
|
||||
data.match.text 5 . should_fail_with No_Such_Group
|
||||
|
||||
Test.group "Match.groups" <|
|
||||
pattern = Regex.compile "(.. .. )(?<letters>.+)()??(?<empty>)??"
|
||||
input = "aa ab abc a bc bcd"
|
||||
match = pattern.match input
|
||||
match . should_be_a Match
|
||||
group_builder.specify "should make named groups accessible by index" <|
|
||||
data.match.text 2 . should_equal (data.match.text "letters")
|
||||
|
||||
Test.specify "should return the results of all groups" <|
|
||||
groups = match.groups
|
||||
|
||||
suite_builder.group "Match.groups" group_builder->
|
||||
data = Data.setup
|
||||
|
||||
group_builder.specify "should return the results of all groups" <|
|
||||
groups = data.match.groups
|
||||
groups.length . should_equal 5
|
||||
groups.should_equal ["aa ab abc a bc bcd", "aa ab ", "abc a bc bcd", Nothing, Nothing]
|
||||
|
||||
Test.specify "should replace unmatched groups by a user-specified value" <|
|
||||
groups = match.groups "UNMATCHED"
|
||||
group_builder.specify "should replace unmatched groups by a user-specified value" <|
|
||||
groups = data.match.groups "UNMATCHED"
|
||||
groups.length . should_equal 5
|
||||
groups.should_equal ["aa ab abc a bc bcd", "aa ab ", "abc a bc bcd", "UNMATCHED", "UNMATCHED"]
|
||||
|
||||
Test.group "Match.named_groups" <|
|
||||
pattern = Regex.compile "(.. .. )(?<letters>.+)()??(?<empty>)??"
|
||||
input = "aa ab abc a bc bcd"
|
||||
match = pattern.match input
|
||||
match . should_be_a Match.Value
|
||||
|
||||
Test.specify "should provide access to info about group names" <|
|
||||
pattern.named_groups.sort . should_equal ["empty", "letters"]
|
||||
pattern.group_nums_to_names . should_equal <| Map.from_vector [[2, "letters"],[4, "empty"]]
|
||||
suite_builder.group "Match.named_groups" group_builder->
|
||||
data = Data.setup
|
||||
|
||||
Test.specify "should return the results of all named groups" <|
|
||||
groups = match.named_groups
|
||||
group_builder.specify "should provide access to info about group names" <|
|
||||
data.pattern.named_groups.sort . should_equal ["empty", "letters"]
|
||||
data.pattern.group_nums_to_names . should_equal <| Map.from_vector [[2, "letters"],[4, "empty"]]
|
||||
|
||||
group_builder.specify "should return the results of all named groups" <|
|
||||
groups = data.match.named_groups
|
||||
groups.keys.sort . should_equal ["empty", "letters"]
|
||||
groups.size . should_equal 2
|
||||
groups.at "letters" . should_equal "abc a bc bcd"
|
||||
groups.at "empty" . should_equal Nothing
|
||||
|
||||
Test.specify "should replace unmatched groups by a user-specified value" <|
|
||||
groups = match.named_groups "UNMATCHED"
|
||||
group_builder.specify "should replace unmatched groups by a user-specified value" <|
|
||||
groups = data.match.named_groups "UNMATCHED"
|
||||
groups.size . should_equal 2
|
||||
groups.at "letters" . should_equal "abc a bc bcd"
|
||||
groups.at "empty" . should_equal "UNMATCHED"
|
||||
|
||||
Test.group "Match.start" <|
|
||||
pattern = Regex.compile "(.. .. )(?<letters>.+)()??(?<empty>)??"
|
||||
input = "aa ab abc a bc bcd"
|
||||
match = pattern.match input
|
||||
match . should_be_a Match
|
||||
suite_builder.group "Match.start" group_builder->
|
||||
data = Data.setup
|
||||
|
||||
Test.specify "should return the start of a group by index" <|
|
||||
match.start 1 . should_equal 0
|
||||
group_builder.specify "should return the start of a group by index" <|
|
||||
data.match.start 1 . should_equal 0
|
||||
|
||||
Test.specify "should return the start of a group by name" <|
|
||||
match.start "letters" . should_equal 6
|
||||
group_builder.specify "should return the start of a group by name" <|
|
||||
data.match.start "letters" . should_equal 6
|
||||
|
||||
Test.specify "should return Nothing if the group didn't match" <|
|
||||
match.start 3 . should_equal Nothing
|
||||
match.start "empty" . should_equal Nothing
|
||||
group_builder.specify "should return Nothing if the group didn't match" <|
|
||||
data.match.start 3 . should_equal Nothing
|
||||
data.match.start "empty" . should_equal Nothing
|
||||
|
||||
Test.specify "should return No_Such_Group_Error if the group doesn't exist" <|
|
||||
match.start 5 . should_fail_with No_Such_Group
|
||||
match.start "nonexistent" . should_fail_with No_Such_Group
|
||||
group_builder.specify "should return No_Such_Group_Error if the group doesn't exist" <|
|
||||
data.match.start 5 . should_fail_with No_Such_Group
|
||||
data.match.start "nonexistent" . should_fail_with No_Such_Group
|
||||
|
||||
Test.group "Match.end" <|
|
||||
pattern = Regex.compile "(.. .. )(?<letters>.+)()??(?<empty>)??"
|
||||
input = "aa ab abc a bc bcd"
|
||||
match = pattern.match input
|
||||
match . should_be_a Match
|
||||
|
||||
Test.specify "should return the end of a group by index" <|
|
||||
match.end 1 . should_equal 6
|
||||
suite_builder.group "Match.end" group_builder->
|
||||
data = Data.setup
|
||||
|
||||
Test.specify "should return the end of a group by name" <|
|
||||
match.end "letters" . should_equal 18
|
||||
group_builder.specify "should return the end of a group by index" <|
|
||||
data.match.end 1 . should_equal 6
|
||||
|
||||
Test.specify "should return Nothing if the group didn't match" <|
|
||||
match.end 3 . should_equal Nothing
|
||||
match.end "empty" . should_equal Nothing
|
||||
group_builder.specify "should return the end of a group by name" <|
|
||||
data.match.end "letters" . should_equal 18
|
||||
|
||||
Test.specify "should return No_Such_Group_Error if the group doesn't exist" <|
|
||||
match.end 5 . should_fail_with No_Such_Group
|
||||
match.end "nonexistent" . should_fail_with No_Such_Group
|
||||
group_builder.specify "should return Nothing if the group didn't match" <|
|
||||
data.match.end 3 . should_equal Nothing
|
||||
data.match.end "empty" . should_equal Nothing
|
||||
|
||||
Test.group "Match.utf_16_start" <|
|
||||
pattern = Regex.compile "(.. .. )(?<letters>.+)()??(?<empty>)??"
|
||||
input = "aa ab abc a bc bcd"
|
||||
match = pattern.match input
|
||||
match . should_be_a Match
|
||||
group_builder.specify "should return No_Such_Group_Error if the group doesn't exist" <|
|
||||
data.match.end 5 . should_fail_with No_Such_Group
|
||||
data.match.end "nonexistent" . should_fail_with No_Such_Group
|
||||
|
||||
Test.specify "should return the start of a group by index" <|
|
||||
match.utf_16_start 1 . should_equal 0
|
||||
|
||||
Test.specify "should return the start of a group by name" <|
|
||||
match.utf_16_start "letters" . should_equal 6
|
||||
suite_builder.group "Match.utf_16_start" group_builder->
|
||||
data = Data.setup
|
||||
|
||||
Test.specify "should return Nothing if the group didn't match" <|
|
||||
match.utf_16_start 3 . should_equal Nothing
|
||||
match.utf_16_start "empty" . should_equal Nothing
|
||||
group_builder.specify "should return the start of a group by index" <|
|
||||
data.match.utf_16_start 1 . should_equal 0
|
||||
|
||||
Test.specify "should return No_Such_Group_Error if the group doesn't exist" <|
|
||||
match.utf_16_start 5 . should_fail_with No_Such_Group
|
||||
match.utf_16_start "nonexistent" . should_fail_with No_Such_Group
|
||||
group_builder.specify "should return the start of a group by name" <|
|
||||
data.match.utf_16_start "letters" . should_equal 6
|
||||
|
||||
Test.group "Match.utf_16_end" <|
|
||||
pattern = Regex.compile "(.. .. )(?<letters>.+)()??(?<empty>)??"
|
||||
input = "aa ab abc a bc bcd"
|
||||
match = pattern.match input
|
||||
match . should_be_a Match
|
||||
group_builder.specify "should return Nothing if the group didn't match" <|
|
||||
data.match.utf_16_start 3 . should_equal Nothing
|
||||
data.match.utf_16_start "empty" . should_equal Nothing
|
||||
|
||||
Test.specify "should return the end of a group by index" <|
|
||||
match.utf_16_end 1 . should_equal 6
|
||||
group_builder.specify "should return No_Such_Group_Error if the group doesn't exist" <|
|
||||
data.match.utf_16_start 5 . should_fail_with No_Such_Group
|
||||
data.match.utf_16_start "nonexistent" . should_fail_with No_Such_Group
|
||||
|
||||
Test.specify "should return the end of a group by name" <|
|
||||
match.utf_16_end "letters" . should_equal 18
|
||||
|
||||
Test.specify "should return Nothing if the group didn't match" <|
|
||||
match.utf_16_end 3 . should_equal Nothing
|
||||
match.utf_16_end "empty" . should_equal Nothing
|
||||
suite_builder.group "Match.utf_16_end" group_builder->
|
||||
data = Data.setup
|
||||
|
||||
Test.specify "should return No_Such_Group_Error if the group doesn't exist" <|
|
||||
match.utf_16_end 5 . should_fail_with No_Such_Group
|
||||
match.utf_16_end "nonexistent" . should_fail_with No_Such_Group
|
||||
group_builder.specify "should return the end of a group by index" <|
|
||||
data.match.utf_16_end 1 . should_equal 6
|
||||
|
||||
Test.group "Match.span" <|
|
||||
pattern = Regex.compile "(.. .. )(?<letters>.+)()??(?<empty>)??"
|
||||
input = "aa ab abc a bc bcd"
|
||||
match = pattern.match input
|
||||
match . should_be_a Match
|
||||
group_builder.specify "should return the end of a group by name" <|
|
||||
data.match.utf_16_end "letters" . should_equal 18
|
||||
|
||||
Test.specify "should get the span of a group by index" <|
|
||||
match.span 1 . should_equal (Span.Value (0.up_to 6) input)
|
||||
group_builder.specify "should return Nothing if the group didn't match" <|
|
||||
data.match.utf_16_end 3 . should_equal Nothing
|
||||
data.match.utf_16_end "empty" . should_equal Nothing
|
||||
|
||||
Test.specify "should get the span of a group by name" <|
|
||||
match.span "letters" . should_equal (Span.Value (6.up_to 18) input)
|
||||
group_builder.specify "should return No_Such_Group_Error if the group doesn't exist" <|
|
||||
data.match.utf_16_end 5 . should_fail_with No_Such_Group
|
||||
data.match.utf_16_end "nonexistent" . should_fail_with No_Such_Group
|
||||
|
||||
Test.specify "should return Nothing if the group didn't match" <|
|
||||
match.span 3 . should_equal Nothing
|
||||
match.span "empty" . should_equal Nothing
|
||||
|
||||
Test.specify "should fail with a No_Such_Group_Error if the group doesn't exist" <|
|
||||
match.span 5 . should_fail_with No_Such_Group
|
||||
match.span "nonexistent" . should_fail_with No_Such_Group
|
||||
suite_builder.group "Match.span" group_builder->
|
||||
data = Data.setup
|
||||
|
||||
Test.group "Match.utf_16_span" <|
|
||||
pattern = Regex.compile "(.. .. )(?<letters>.+)()??(?<empty>)??"
|
||||
input = "aa ab abc a bc bcd"
|
||||
match = pattern.match input
|
||||
match . should_be_a Match
|
||||
group_builder.specify "should get the span of a group by index" <|
|
||||
data.match.span 1 . should_equal (Span.Value (0.up_to 6) data.input)
|
||||
|
||||
Test.specify "should get the UTF16 span of a group by index" <|
|
||||
match.utf_16_span 1 . should_equal (Utf_16_Span.Value (0.up_to 6) input)
|
||||
group_builder.specify "should get the span of a group by name" <|
|
||||
data.match.span "letters" . should_equal (Span.Value (6.up_to 18) data.input)
|
||||
|
||||
Test.specify "should get the UTF16 span of a group by name" <|
|
||||
match.utf_16_span "letters" . should_equal (Utf_16_Span.Value (6.up_to 18) input)
|
||||
group_builder.specify "should return Nothing if the group didn't match" <|
|
||||
data.match.span 3 . should_equal Nothing
|
||||
data.match.span "empty" . should_equal Nothing
|
||||
|
||||
Test.specify "should return Nothing if the group didn't match" <|
|
||||
match.utf_16_span 3 . should_equal Nothing
|
||||
match.utf_16_span "empty" . should_equal Nothing
|
||||
group_builder.specify "should fail with a No_Such_Group_Error if the group doesn't exist" <|
|
||||
data.match.span 5 . should_fail_with No_Such_Group
|
||||
data.match.span "nonexistent" . should_fail_with No_Such_Group
|
||||
|
||||
Test.specify "should fail with a No_Such_Group_Error if the group doesn't exist" <|
|
||||
match.utf_16_span 5 . should_fail_with No_Such_Group
|
||||
match.utf_16_span "nonexistent" . should_fail_with No_Such_Group
|
||||
|
||||
Test.group "Match.to_display_text" <|
|
||||
pattern = Regex.compile "(.. .. )(?<letters>.+)()??(?<empty>)??"
|
||||
input = "aa ab abc a bc bcd"
|
||||
suite_builder.group "Match.utf_16_span" group_builder->
|
||||
data = Data.setup
|
||||
|
||||
Test.specify "should not error" <|
|
||||
match = pattern.match input
|
||||
group_builder.specify "should get the UTF16 span of a group by index" <|
|
||||
data.match.utf_16_span 1 . should_equal (Utf_16_Span.Value (0.up_to 6) data.input)
|
||||
|
||||
group_builder.specify "should get the UTF16 span of a group by name" <|
|
||||
data.match.utf_16_span "letters" . should_equal (Utf_16_Span.Value (6.up_to 18) data.input)
|
||||
|
||||
group_builder.specify "should return Nothing if the group didn't match" <|
|
||||
data.match.utf_16_span 3 . should_equal Nothing
|
||||
data.match.utf_16_span "empty" . should_equal Nothing
|
||||
|
||||
group_builder.specify "should fail with a No_Such_Group_Error if the group doesn't exist" <|
|
||||
data.match.utf_16_span 5 . should_fail_with No_Such_Group
|
||||
data.match.utf_16_span "nonexistent" . should_fail_with No_Such_Group
|
||||
|
||||
suite_builder.group "Match.to_display_text" group_builder->
|
||||
data = Data.setup
|
||||
|
||||
group_builder.specify "should not error" <|
|
||||
match = data.pattern.match data.input
|
||||
match . should_be_a Match
|
||||
match.to_display_text . should_equal "Match {aa ab abc a bc bcd}"
|
||||
|
||||
Test.group "caching" <|
|
||||
Test.specify "Replacer cache drops old values" <|
|
||||
suite_builder.group "caching" group_builder->
|
||||
group_builder.specify "Replacer cache drops old values" <|
|
||||
pattern = Regex.compile('([a-c])')
|
||||
|
||||
# Add enough values to flush out the first values.
|
||||
@ -533,4 +534,8 @@ spec =
|
||||
replacer_cache_lookup "$1$1x0" . should_equal Nothing
|
||||
replacer_cache_lookup "$1$1x1" . should_not_equal Nothing
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -2,12 +2,12 @@ from Standard.Base import all
|
||||
import Standard.Base.Data.Text.Span.Span
|
||||
import Standard.Base.Data.Text.Span.Utf_16_Span
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
spec = Test.group "Text.Span" <|
|
||||
|
||||
Test.specify "should be able to be created over a text" <|
|
||||
add_specs suite_builder = suite_builder.group "Text.Span" group_builder->
|
||||
|
||||
group_builder.specify "should be able to be created over a text" <|
|
||||
text = "Hello!"
|
||||
span = Span.Value (0.up_to 3) text
|
||||
span.start . should_equal 0
|
||||
@ -15,13 +15,13 @@ spec = Test.group "Text.Span" <|
|
||||
span.parent . should_equal text
|
||||
span.text . should_equal "Hel"
|
||||
|
||||
Test.specify "should be able to be converted to code units" <|
|
||||
group_builder.specify "should be able to be converted to code units" <|
|
||||
text = 'ae\u{301}fz'
|
||||
span = Span.Value (1.up_to 3) text
|
||||
span.to_utf_16_span . should_equal (Utf_16_Span.Value (1.up_to 4) text)
|
||||
span.text . should_equal 'e\u{301}f'
|
||||
|
||||
Test.specify "should expand to the associated grapheme clusters" <|
|
||||
group_builder.specify "should expand to the associated grapheme clusters" <|
|
||||
text = 'a\u{301}e\u{302}o\u{303}'
|
||||
span = Utf_16_Span.Value (1.up_to 5) text
|
||||
extended = span.to_grapheme_span
|
||||
@ -38,4 +38,8 @@ spec = Test.group "Text.Span" <|
|
||||
Utf_16_Span.Value (0.up_to 3) text . to_grapheme_span . should_equal (Span.Value (0.up_to 2) text)
|
||||
Utf_16_Span.Value (0.up_to 2) text . to_grapheme_span . should_equal (Span.Value (0.up_to 1) text)
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -1,11 +1,11 @@
|
||||
from Standard.Base import all
|
||||
from Standard.Base.Data.Text.Text_Sub_Range import character_ranges
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
spec = Test.group "Text_Sub_Range_Data" <|
|
||||
Test.specify "should correctly split a text into grapheme cluster ranges expressed in codepoint indices" <|
|
||||
|
||||
add_specs suite_builder = suite_builder.group "Text_Sub_Range_Data" group_builder->
|
||||
group_builder.specify "should correctly split a text into grapheme cluster ranges expressed in codepoint indices" <|
|
||||
character_ranges "" . should_equal []
|
||||
character_ranges "A" . should_equal [0.up_to 1]
|
||||
character_ranges "abc" . should_equal [0.up_to 1, 1.up_to 2, 2.up_to 3]
|
||||
@ -21,7 +21,7 @@ spec = Test.group "Text_Sub_Range_Data" <|
|
||||
character_ranges accent_2 . should_equal [0.up_to 2]
|
||||
character_ranges kshi+facepalm+accent_1+accent_2 . should_equal [0.up_to 4, 4.up_to 11, 11.up_to 12, 12.up_to 14]
|
||||
|
||||
Test.specify "should correctly split a text into grapheme cluster ranges expressed in codepoint indices" <|
|
||||
group_builder.specify "should correctly split a text into grapheme cluster ranges expressed in codepoint indices" <|
|
||||
character_ranges "" . should_equal []
|
||||
character_ranges "A" . should_equal [0.up_to 1]
|
||||
character_ranges "abc" . should_equal [0.up_to 1, 1.up_to 2, 2.up_to 3]
|
||||
@ -37,4 +37,8 @@ spec = Test.group "Text_Sub_Range_Data" <|
|
||||
character_ranges accent_2 . should_equal [0.up_to 2]
|
||||
character_ranges kshi+facepalm+accent_1+accent_2 . should_equal [0.up_to 4, 4.up_to 11, 11.up_to 12, 12.up_to 14]
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -4,13 +4,13 @@ polyglot java import org.enso.base.Text_Utils
|
||||
polyglot java import org.enso.base.text.CaseFoldedString
|
||||
polyglot java import com.ibm.icu.text.BreakIterator
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
|
||||
polyglot java import java.lang.Exception as JException
|
||||
|
||||
spec =
|
||||
Test.group "Text_Utils" <|
|
||||
add_specs suite_builder =
|
||||
suite_builder.group "Text_Utils" group_builder->
|
||||
kshi = '\u0915\u094D\u0937\u093F'
|
||||
facepalm = '\u{1F926}\u{1F3FC}\u200D\u2642\uFE0F'
|
||||
text = "a"+kshi+facepalm+'e\u{301}Z'
|
||||
@ -18,7 +18,7 @@ spec =
|
||||
codepoints_count = grapheme.char_vector.length
|
||||
Vector.new codepoints_count _->ix
|
||||
|
||||
Test.specify "should correctly translate an codepoint index to a grapheme index" <|
|
||||
group_builder.specify "should correctly translate an codepoint index to a grapheme index" <|
|
||||
codepoints_to_graphemes . each_with_index codepoint_ix-> grapheme_ix->
|
||||
found_grapheme_ix = Text_Utils.utf16_index_to_grapheme_index text codepoint_ix
|
||||
found_grapheme_ix.should_equal grapheme_ix
|
||||
@ -37,7 +37,7 @@ spec =
|
||||
Text_Utils.utf16_index_to_grapheme_index 'a\u{301}' 1 . should_equal 0
|
||||
Text_Utils.utf16_index_to_grapheme_index 'a\u{301}' 2 . should_equal 1
|
||||
|
||||
Test.specify "should correctly translate a series of codepoint indices to a grapheme indices in a batch" <|
|
||||
group_builder.specify "should correctly translate a series of codepoint indices to a grapheme indices in a batch" <|
|
||||
translate_indices text ixes =
|
||||
Vector.from_polyglot_array <| Text_Utils.utf16_indices_to_grapheme_indices text ixes
|
||||
codepoint_indices = Vector.new text.char_vector.length ix->ix
|
||||
@ -48,7 +48,7 @@ spec =
|
||||
translate_indices "aB" [0, 1, 2] . should_equal [0, 1, 2]
|
||||
translate_indices 'a\u{301}' [0, 1, 2] . should_equal [0, 0, 1]
|
||||
|
||||
Test.specify "should correctly case-fold a string and translate codeunits to graphemes" <|
|
||||
group_builder.specify "should correctly case-fold a string and translate codeunits to graphemes" <|
|
||||
text = 'a\u{301}AZßffią'
|
||||
folded = CaseFoldedString.fold text Locale.default.java_locale
|
||||
folded.getFoldedString . should_equal 'a\u{301}azssffią'
|
||||
@ -61,7 +61,7 @@ spec =
|
||||
Test.expect_panic_with (folded.findGrapheme -1) JException
|
||||
Test.expect_panic_with (folded.findGrapheme folded.getFoldedString.char_vector.length+1) JException
|
||||
|
||||
Test.specify "should correctly take prefix and suffix of a string" <|
|
||||
group_builder.specify "should correctly take prefix and suffix of a string" <|
|
||||
txt = 's\u0301ąśc\u0301'
|
||||
Text_Utils.take_prefix txt 1 . should_equal 's\u0301'
|
||||
Text_Utils.take_prefix txt 2 . should_equal 's\u0301ą'
|
||||
@ -93,27 +93,31 @@ spec =
|
||||
Text_Utils.take_suffix (kshi+kshi+'a'+kshi) 2 . should_equal 'a'+kshi
|
||||
Text_Utils.take_suffix (kshi+kshi+'a'+kshi) 1 . should_equal kshi
|
||||
|
||||
Test.group "to_display_text" <|
|
||||
Test.specify "simple conversion" <|
|
||||
suite_builder.group "to_display_text" group_builder->
|
||||
group_builder.specify "simple conversion" <|
|
||||
"Hello".to_display_text . should_equal "Hello"
|
||||
|
||||
Test.specify "long text conversion" <|
|
||||
group_builder.specify "long text conversion" <|
|
||||
long = "Hello World! ".repeat 1024
|
||||
disp = long.to_display_text
|
||||
disp.length . should_equal 80
|
||||
disp.characters.take (First 5) . should_equal [ 'H', 'e', 'l', 'l', 'o' ]
|
||||
disp.characters.take (Last 6) . should_equal ['l', 'd', '!', ' ', ' ', '…']
|
||||
|
||||
Test.specify "grapheme 1 conversion" <|
|
||||
group_builder.specify "grapheme 1 conversion" <|
|
||||
txt = 'a\u0321\u0302'*100
|
||||
txt.to_display_text . should_equal ('a\u0321\u0302'*78 + ' …')
|
||||
|
||||
Test.specify "grapheme 2 conversion" <|
|
||||
group_builder.specify "grapheme 2 conversion" <|
|
||||
txt = '\u0915\u094D\u0937\u093F'*100
|
||||
txt.to_display_text . should_equal ('\u0915\u094D\u0937\u093F'*78 + ' …')
|
||||
|
||||
Test.specify "grapheme 3 conversion" <|
|
||||
group_builder.specify "grapheme 3 conversion" <|
|
||||
txt = '\u{1F926}\u{1F3FC}\u200D\u2642\uFE0F'*100
|
||||
txt.to_display_text . should_equal ('\u{1F926}\u{1F3FC}\u200D\u2642\uFE0F'*78 + ' …')
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -11,8 +11,8 @@ import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
|
||||
from Standard.Base.Data.Text.Text_Sub_Range.Text_Sub_Range import all
|
||||
from Standard.Base.Data.Index_Sub_Range.Index_Sub_Range import all
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
|
||||
type Auto
|
||||
Value a
|
||||
@ -58,11 +58,11 @@ type Manual
|
||||
from the back.
|
||||
- Note that currently the regex-based operations may not handle the edge
|
||||
cases described above too well.
|
||||
spec =
|
||||
add_specs suite_builder =
|
||||
accent_1 = '\u00E9'
|
||||
accent_2 = '\u0065\u{301}'
|
||||
|
||||
Test.group "Text" <|
|
||||
suite_builder.group "Text" group_builder->
|
||||
kshi = '\u0915\u094D\u0937\u093F'
|
||||
facepalm = '\u{1F926}\u{1F3FC}\u200D\u2642\uFE0F'
|
||||
utf_8_whitespace = 'foo\n bar baz \u202F quux'
|
||||
@ -73,17 +73,17 @@ spec =
|
||||
건반(Korean).
|
||||
sentence_words = ['I', 'have', 'a', 'very', 'long', 'block', 'of', 'text', ',', 'here', '.', 'It', 'goes', 'on', 'and', 'on', ',', 'containing', 'things', 'like', 'decimal', 'points', '(', '1.0314e3', ')', 'and', 'other', 'language', 'scripts', 'as', 'well', '건반', '(', 'Korean', ')', '.']
|
||||
|
||||
Test.specify "should allow naive length computation over grapheme clusters" <|
|
||||
group_builder.specify "should allow naive length computation over grapheme clusters" <|
|
||||
kshi.length . should_equal 1
|
||||
facepalm.length . should_equal 1
|
||||
|
||||
Test.specify "should be able to tell if Text is normalized" <|
|
||||
group_builder.specify "should be able to tell if Text is normalized" <|
|
||||
'a'.is_normalized . should_be_true
|
||||
"14.95€".is_normalized . should_be_true
|
||||
'é'.is_normalized . should_be_false
|
||||
'e\u{301}'.is_normalized . should_be_true
|
||||
|
||||
Test.specify "should compare strings using utf normalization" <|
|
||||
group_builder.specify "should compare strings using utf normalization" <|
|
||||
"abc"=="def" . should_be_false
|
||||
'a'=='b' . should_be_false
|
||||
'a'=='a' . should_be_true
|
||||
@ -134,13 +134,13 @@ spec =
|
||||
Ordering.compare common_prefix+complex_letter_3+later_suffix common_prefix+complex_letter_1+earlier_suffix . should_equal Ordering.Greater
|
||||
Ordering.compare common_prefix+complex_letter_1+later_suffix common_prefix+complex_letter_2+earlier_suffix . should_equal Ordering.Greater
|
||||
|
||||
Test.specify "normalizes correctly" <|
|
||||
group_builder.specify "normalizes correctly" <|
|
||||
'aśb'.codepoints . should_equal [97, 347, 98]
|
||||
'as\u0301b'.codepoints . should_equal [97, 115, 769, 98]
|
||||
'aśb'.normalize.codepoints . should_equal [97, 115, 769, 98]
|
||||
'as\u0301b'.normalize.codepoints . should_equal [97, 115, 769, 98]
|
||||
|
||||
Test.specify "normalizes correctly using different standard Unicode normalization modes" <|
|
||||
group_builder.specify "normalizes correctly using different standard Unicode normalization modes" <|
|
||||
## GREEK UPSILON WITH ACUTE AND HOOK SYMBOL behaves differently with each mode.
|
||||
See https://unicode.org/faq/normalization.html
|
||||
s = 'ϓ'
|
||||
@ -151,7 +151,7 @@ spec =
|
||||
s.normalize Normalization.NFKD . codepoints . should_equal [933, 769]
|
||||
s.normalize Normalization.NFKCCasefold . codepoints . should_equal [973]
|
||||
|
||||
Test.specify "should correctly handle case-insensitive equality" <|
|
||||
group_builder.specify "should correctly handle case-insensitive equality" <|
|
||||
"aBc" . equals_ignore_case "Abc" . should_be_true
|
||||
"abc" . equals_ignore_case "abd" . should_be_false
|
||||
"" . equals_ignore_case "" . should_be_true
|
||||
@ -172,11 +172,11 @@ spec =
|
||||
"Kongressstraße"=="Kongressstrasse" . should_be_false
|
||||
"Kongressstraße" . equals_ignore_case "Kongressstrasse" . should_be_true
|
||||
|
||||
Test.specify "should split the text into grapheme clusters" <|
|
||||
group_builder.specify "should split the text into grapheme clusters" <|
|
||||
str = kshi + facepalm + accent_1 + accent_2
|
||||
str.characters . should_equal [kshi, facepalm, accent_1, accent_2]
|
||||
|
||||
Test.specify "should allow access by index to a grapheme cluster" <|
|
||||
group_builder.specify "should allow access by index to a grapheme cluster" <|
|
||||
str = kshi + facepalm + accent_1 + accent_2
|
||||
str.at 0 . should_equal kshi
|
||||
str.at 1 . should_equal facepalm
|
||||
@ -190,7 +190,7 @@ spec =
|
||||
str.second . should_equal facepalm
|
||||
str.last . should_equal accent_2
|
||||
|
||||
Test.specify "should allow access by negative index to a grapheme cluster" <|
|
||||
group_builder.specify "should allow access by negative index to a grapheme cluster" <|
|
||||
str = kshi + facepalm + accent_1 + accent_2
|
||||
str.at -4 . should_equal kshi
|
||||
str.at -3 . should_equal facepalm
|
||||
@ -201,7 +201,7 @@ spec =
|
||||
str.get -2 . should_equal accent_1
|
||||
str.get -1 . should_equal accent_2
|
||||
|
||||
Test.specify "should return a dataflow error when accessing characters out of bounds" <|
|
||||
group_builder.specify "should return a dataflow error when accessing characters out of bounds" <|
|
||||
str = kshi + facepalm + accent_1 + accent_2
|
||||
str.at -5 . should_fail_with Index_Out_Of_Bounds
|
||||
str.at -5 . catch . should_equal (Index_Out_Of_Bounds.Error -5 4)
|
||||
@ -214,14 +214,14 @@ spec =
|
||||
"".second.should_fail_with Index_Out_Of_Bounds
|
||||
"".last.should_fail_with Index_Out_Of_Bounds
|
||||
|
||||
Test.specify "should be able to split the text into words" <|
|
||||
group_builder.specify "should be able to split the text into words" <|
|
||||
"I have not one, but two cats.".words . should_equal ['I', 'have', 'not', 'one', ',', 'but', 'two', 'cats', '.']
|
||||
"แมวมีสี่ขา".words . should_equal ['แมว', 'มี', 'สี่', 'ขา']
|
||||
sentences.words . should_equal sentence_words
|
||||
"I ❤️ Unicode! 🙂🙂".words . should_equal ['I', '❤️', 'Unicode', '!', '🙂', '🙂']
|
||||
'"แมวมีสี่ขา" means that a cat has four legs.'.words . should_equal ['"', 'แมว', 'มี', 'สี่', 'ขา', '"', 'means', 'that', 'a', 'cat', 'has', 'four', 'legs', '.']
|
||||
|
||||
Test.specify "should be able to split the text into lines" <|
|
||||
group_builder.specify "should be able to split the text into lines" <|
|
||||
utf_8_vertical = 'foo\n bar \r\n baz \r quux'
|
||||
utf_8_vertical_split = ["foo", " bar ", " baz ", " quux"]
|
||||
utf_8_vertical.lines . should_equal utf_8_vertical_split
|
||||
@ -247,7 +247,7 @@ spec =
|
||||
multiline.lines . should_equal ['Hello', 'world']
|
||||
'🚀🚧\n\u{301}a\u{301}\r건반'.lines . should_equal ['🚀🚧', '\u{301}a\u{301}', '건반']
|
||||
|
||||
Test.specify "should be able to split the text on arbitrary text sequence" <|
|
||||
group_builder.specify "should be able to split the text on arbitrary text sequence" <|
|
||||
"foo, bar, baz" . split ", " . should_equal ["foo", "bar", "baz"]
|
||||
text = "Namespace::package::package::Type"
|
||||
text.split "::" . should_equal ["Namespace", "package", "package", "Type"]
|
||||
@ -260,7 +260,7 @@ spec =
|
||||
'aśbs\u{301}c'.split 'ś' . should_equal ['a', 'b', 'c']
|
||||
'abc'.split '' . should_fail_with Illegal_Argument
|
||||
|
||||
Test.specify "should be able to split the text on arbitrary text sequence, case-insensitively" <|
|
||||
group_builder.specify "should be able to split the text on arbitrary text sequence, case-insensitively" <|
|
||||
"AbCdABCDabDCba" . split "ab" case_sensitivity=Case_Sensitivity.Insensitive . should_equal ["", "Cd", "CD", "DCba"]
|
||||
"abc".split "d" case_sensitivity=Case_Sensitivity.Insensitive . should_equal ["abc"]
|
||||
"AAA".split "a" case_sensitivity=Case_Sensitivity.Insensitive . should_equal ["", "", "", ""]
|
||||
@ -269,7 +269,7 @@ spec =
|
||||
'aŚbS\u{301}c'.split 'ś' case_sensitivity=Case_Sensitivity.Insensitive . should_equal ['a', 'b', 'c']
|
||||
'abc'.split '' case_sensitivity=Case_Sensitivity.Insensitive . should_fail_with Illegal_Argument
|
||||
|
||||
Test.specify "should be able to split the text on Regex patterns" <|
|
||||
group_builder.specify "should be able to split the text on Regex patterns" <|
|
||||
"cababdabe" . split "ab" use_regex=True . should_equal ["c", "", "d", "e"]
|
||||
"cababdabe" . split "(ab)+" use_regex=True . should_equal ["c", "d", "e"]
|
||||
"abc" . split "[a-z]" use_regex=True . should_equal ["", "", "", ""]
|
||||
@ -280,63 +280,63 @@ spec =
|
||||
"".split "a" use_regex=True . should_equal [""]
|
||||
'abc'.split '' use_regex=True . should_fail_with Illegal_Argument
|
||||
|
||||
Test.specify "should be able to split the text on Regex patterns, case-insensitively" <|
|
||||
group_builder.specify "should be able to split the text on Regex patterns, case-insensitively" <|
|
||||
"CAbaBDaBe" . split "ab" use_regex=True case_sensitivity=Case_Sensitivity.Insensitive . should_equal ["C", "", "D", "e"]
|
||||
"caBAbdAbe" . split "(ab)+" use_regex=True case_sensitivity=Case_Sensitivity.Insensitive . should_equal ["c", "d", "e"]
|
||||
"ABc" . split "[a-z]" use_regex=True case_sensitivity=Case_Sensitivity.Insensitive . should_equal ["", "", "", ""]
|
||||
|
||||
Test.specify "regex and non-regex `split` handle accented grapheme splitting differently" <|
|
||||
group_builder.specify "regex and non-regex `split` handle accented grapheme splitting differently" <|
|
||||
'aśbs\u{301}c'.split 'ś' use_regex=True . should_equal ['a', 'bs\u{301}c']
|
||||
'aśbs\u{301}c'.split 'ś' . should_equal ['a', 'b', 'c']
|
||||
|
||||
Test.specify "should be able to split the text on UTF-8 whitespace" <|
|
||||
group_builder.specify "should be able to split the text on UTF-8 whitespace" <|
|
||||
utf_8_whitespace.split "\s+" use_regex=True . should_equal utf_8_whitespace_split
|
||||
'abc def\tghi'.split '\\s+' use_regex=True . should_equal ["abc", "def", "ghi"]
|
||||
|
||||
Test.specify 'should be able to split with a vector of strings' <|
|
||||
group_builder.specify 'should be able to split with a vector of strings' <|
|
||||
'azbzczdzezfzg'.split ['b', 'zez'] . should_equal ['az', 'zczd', 'fzg']
|
||||
'a1b2c3d4e5f6g7h8'.split ['c', '5'] . should_equal ['a1b2', '3d4e', 'f6g7h8']
|
||||
|
||||
Test.specify 'should handle overlapping delimiters correctly' <|
|
||||
group_builder.specify 'should handle overlapping delimiters correctly' <|
|
||||
'blah x 123'.split [' ', ' x ' , 'x'] . should_equal ['blah', '', '', '123']
|
||||
'abcdef'.split ['bc', 'cd'] . should_equal ['a', 'def']
|
||||
'abcdef'.split ['cd', 'bc'] . should_equal ['a', 'def']
|
||||
'abcdef'.split ['bc', 'bcd'] . should_equal ['a', 'def']
|
||||
'abcdef'.split ['bcd', 'bc'] . should_equal ['a', 'ef']
|
||||
|
||||
Test.specify 'should be able to split with a vector of strings, case insensitively' <|
|
||||
group_builder.specify 'should be able to split with a vector of strings, case insensitively' <|
|
||||
'azBZczDZEZFzg'.split ['B', 'zez'] case_sensitivity=Case_Sensitivity.Insensitive . should_equal ['az', 'ZczD', 'Fzg']
|
||||
'blah X 123'.split [' ', ' x ' , 'x'] case_sensitivity=Case_Sensitivity.Insensitive . should_equal ['blah', '', '', '123']
|
||||
'A1B2C3D4E5F6G7H8'.split ['c', '5'] case_sensitivity=Case_Sensitivity.Insensitive . should_equal ['A1B2', '3D4E', 'F6G7H8']
|
||||
|
||||
Test.specify 'should be able to split with a vector of strings, using regexes' <|
|
||||
group_builder.specify 'should be able to split with a vector of strings, using regexes' <|
|
||||
'a1b2c3d4e5f6g7h8'.split ['[cde]', '[456]'] use_regex=True . should_equal ['a1b2', '3', '', '', '', 'f', 'g7h8']
|
||||
'abcde1fghij2klmnop'.split ["\d", '[hm]'] use_regex=True . should_equal ['abcde', 'fg', 'ij', 'kl', 'nop']
|
||||
|
||||
Test.specify "should handle unicode normalization the same for single and multiple delimiters" <|
|
||||
group_builder.specify "should handle unicode normalization the same for single and multiple delimiters" <|
|
||||
'aśbs\u0301c'.split 'ś' . should_equal ['a', 'b', 'c']
|
||||
'aśbs\u0301c'.split ['ś'] . should_equal ['a', 'b', 'c']
|
||||
'aśbs\u0301c'.split 's\u0301' . should_equal ['a', 'b', 'c']
|
||||
'aśbs\u0301c'.split ['s\u0301'] . should_equal ['a', 'b', 'c']
|
||||
'aśbs\u0301cdef'.split ['ś', 'de'] . should_equal ['a', 'b', 'c', 'f']
|
||||
|
||||
Test.specify "should handle unicode normalization the same for single and multiple delimiters, case-insensitively" <|
|
||||
group_builder.specify "should handle unicode normalization the same for single and multiple delimiters, case-insensitively" <|
|
||||
'aśbS\u0301c'.split 'ś' case_sensitivity=Case_Sensitivity.Insensitive . should_equal ['a', 'b', 'c']
|
||||
'aśbS\u0301c'.split ['ś'] case_sensitivity=Case_Sensitivity.Insensitive . should_equal ['a', 'b', 'c']
|
||||
'aŚbS\u0301c'.split 's\u0301' case_sensitivity=Case_Sensitivity.Insensitive . should_equal ['a', 'b', 'c']
|
||||
'aśbS\u0301c'.split ['s\u0301'] case_sensitivity=Case_Sensitivity.Insensitive . should_equal ['a', 'b', 'c']
|
||||
'aŚbS\u0301cdef'.split ['ś', 'de'] case_sensitivity=Case_Sensitivity.Insensitive . should_equal ['a', 'b', 'c', 'f']
|
||||
|
||||
Test.specify "should handle splitting the same for the special case of a 1-element vector" <|
|
||||
group_builder.specify "should handle splitting the same for the special case of a 1-element vector" <|
|
||||
'abcdefgh'.split 'c' . should_equal ['ab', 'defgh']
|
||||
'abcdefgh'.split ['c'] . should_equal ['ab', 'defgh']
|
||||
'abcdefgh'.split ['c', 'q'] . should_equal ['ab', 'defgh']
|
||||
|
||||
Test.specify "should split on the leftmost delimiter in the case of a tie" <|
|
||||
group_builder.specify "should split on the leftmost delimiter in the case of a tie" <|
|
||||
'abcdefgh'.split ['c', 'cd'] . should_equal ['ab', 'defgh']
|
||||
'abcdefgh'.split ['cd', 'c'] . should_equal ['ab', 'efgh']
|
||||
|
||||
Test.specify "should throw Illegal_Argument for a bad or empty delimiter" <|
|
||||
group_builder.specify "should throw Illegal_Argument for a bad or empty delimiter" <|
|
||||
'abc'.split '' . should_fail_with Illegal_Argument
|
||||
'abc'.split [] . should_fail_with Illegal_Argument
|
||||
'abc'.split ['a', ''] . should_fail_with Illegal_Argument
|
||||
@ -347,16 +347,16 @@ spec =
|
||||
'abc'.split ['a', ''] case_sensitivity=Case_Sensitivity.Insensitive . should_fail_with Illegal_Argument
|
||||
'abc'.split 3 case_sensitivity=Case_Sensitivity.Insensitive . should_fail_with Illegal_Argument
|
||||
|
||||
Test.specify "examples should be correct" <|
|
||||
group_builder.specify "examples should be correct" <|
|
||||
"Namespace::package::package::Type".split "::" . should_equal ["Namespace", "package", "package", "Type"]
|
||||
"abc--def==>ghi".split "[-=>]+" use_regex=True . should_equal ["abc", "def", "ghi"]
|
||||
'abc def\tghi'.split '\\s+' use_regex=True . should_equal ["abc", "def", "ghi"]
|
||||
|
||||
Test.specify "should convert any type to text automatically and using provided methods" <|
|
||||
group_builder.specify "should convert any type to text automatically and using provided methods" <|
|
||||
t = Auto.Value (Manual.Value 123) . to_text
|
||||
t.should_equal "(Auto.Value [[[MyREP 123]]])"
|
||||
|
||||
Test.specify "should escape special characters when debug-printing text" <|
|
||||
group_builder.specify "should escape special characters when debug-printing text" <|
|
||||
text_1 = '''
|
||||
foo
|
||||
bar\r\tbaz
|
||||
@ -364,7 +364,7 @@ spec =
|
||||
text_2 = '\n\0\t\a\b\f\r\v\e\'\\'
|
||||
text_2.pretty.should_equal "'\n\0\t\a\b\f\r\v\e\'\\'"
|
||||
|
||||
Test.specify "should return text as is when converting to text" <|
|
||||
group_builder.specify "should return text as is when converting to text" <|
|
||||
text_1 = '''
|
||||
foo
|
||||
bar\r\tbaz
|
||||
@ -372,7 +372,7 @@ spec =
|
||||
text_2 = '\n\t\a\b\f\r\v\e\''
|
||||
text_2.to_text.should_equal text_2
|
||||
|
||||
Test.specify "should allow taking or dropping every other character" <|
|
||||
group_builder.specify "should allow taking or dropping every other character" <|
|
||||
"ABCDE".take (Every 1) . should_equal "ABCDE"
|
||||
"ABCDE".take (Every 2) . should_equal "ACE"
|
||||
"ABCD".take (Every 2) . should_equal "AC"
|
||||
@ -395,7 +395,7 @@ spec =
|
||||
"ABCDEFGH".drop (Every 3 first=1) . should_equal "ACDFG"
|
||||
"ABCDEFGHI".drop (Every 3 first=1) . should_equal "ACDFGI"
|
||||
|
||||
Test.specify "should allow taking or dropping a random sample of a substring" <|
|
||||
group_builder.specify "should allow taking or dropping a random sample of a substring" <|
|
||||
"AAAAA".take (Sample 3) . should_equal "AAA"
|
||||
"AAAAA".drop (Sample 3) . should_equal "AA"
|
||||
|
||||
@ -423,7 +423,7 @@ spec =
|
||||
"ABCD".drop (Sample 2 seed)
|
||||
samples_2.should_contain_the_same_elements_as ["AB", "AC", "AD", "BC", "CD", "BD"]
|
||||
|
||||
Test.specify "should allow taking or dropping many indices or subranges (possibly overlapping)" <|
|
||||
group_builder.specify "should allow taking or dropping many indices or subranges (possibly overlapping)" <|
|
||||
"123"*1000 . take (By_Index (Vector.new 3000 ix-> 2999-ix)) . should_equal "321"*1000
|
||||
"123"*1000 . take (By_Index (Vector.new 3000 _-> 0)) . should_equal "1"*3000
|
||||
"123456"*1000 . take (By_Index (Vector.new 100 ix-> Range.Between 6*ix+1 6*ix+3)) . should_equal "23"*100
|
||||
@ -450,7 +450,7 @@ spec =
|
||||
"0123456789".drop (By_Index [4.up_to 6, 0.up_to 4, 0, 0]) . should_equal "6789"
|
||||
"0123456789".drop (By_Index [2.up_to 5, 0.up_to 3, 0, 0]) . should_equal "56789"
|
||||
|
||||
Test.specify "should allow selecting substrings by characters" <|
|
||||
group_builder.specify "should allow selecting substrings by characters" <|
|
||||
txt = kshi + facepalm + accent_1 + accent_2
|
||||
txt.take (First 2) . should_equal (kshi + facepalm)
|
||||
txt.drop (First 2) . should_equal (accent_1 + accent_2)
|
||||
@ -479,7 +479,7 @@ spec =
|
||||
txt.take (By_Index [0, 0, 0.up_to 2]) . should_equal (kshi + kshi + kshi + facepalm)
|
||||
txt.drop (By_Index [2.up_to 4, 0.up_to 2]) . should_equal ""
|
||||
|
||||
Test.specify "take should work as in the examples" <|
|
||||
group_builder.specify "take should work as in the examples" <|
|
||||
"Hello World!".take First . should_equal "H"
|
||||
"Hello World!".take (First 5) . should_equal "Hello"
|
||||
"Hello World!".take (First 100) . should_equal "Hello World!"
|
||||
@ -512,7 +512,7 @@ spec =
|
||||
"Hello World!".take (By_Index [0.up_to 3, 6, 6.up_to 12 . with_step 2]) . should_equal "HelWWrd"
|
||||
"Hello World!".take (Sample 3 seed=42) . should_equal "l d"
|
||||
|
||||
Test.specify "take should report errors for start indices out of bounds but just go till the end if the end index is OOB" <|
|
||||
group_builder.specify "take should report errors for start indices out of bounds but just go till the end if the end index is OOB" <|
|
||||
txt = "Hello World!"
|
||||
txt.take (0.up_to 14) . should_equal txt
|
||||
txt.take (6.up_to 100) . should_equal "World!"
|
||||
@ -543,7 +543,7 @@ spec =
|
||||
txt.take (By_Index [0.up_to 2 . with_step 2, txt.length.up_to 100 . with_step 2]) . should_fail_with Index_Out_Of_Bounds
|
||||
"".take (By_Index 0) . should_fail_with Index_Out_Of_Bounds
|
||||
|
||||
Test.specify "take should work on grapheme clusters" <|
|
||||
group_builder.specify "take should work on grapheme clusters" <|
|
||||
txt_1 = 'He\u0302llo\u0308 Wo\u0301rld!'
|
||||
txt_2 = 'He\u0302llo\u0308 Wo\u0308rld!'
|
||||
txt_1.take (Every 2) . should_equal 'Hlo\u0308Wrd'
|
||||
@ -572,7 +572,7 @@ spec =
|
||||
txt_2.take (3.up_to 5) . should_equal 'lo\u{308}'
|
||||
txt_2.take (5.up_to 12) . should_equal ' Wo\u{308}rld!'
|
||||
|
||||
Test.specify "take should work on emojis" <|
|
||||
group_builder.specify "take should work on emojis" <|
|
||||
'✨🚀🚧😍😃😎😙😉☺'.take First . should_equal '✨'
|
||||
'✨🚀🚧😍😃😎😙😉☺'.take (First 2) . should_equal '✨🚀'
|
||||
'✨🚀🚧😍😃😎😙😉☺'.take . should_equal '✨'
|
||||
@ -587,7 +587,7 @@ spec =
|
||||
'✨🚀🚧😍😃😍😎😙😉☺'.take (While c->c!="😃") . should_equal '✨🚀🚧😍'
|
||||
'✨🚀🚧😍😃😍😎😙😉☺'.take (3.up_to 6) . should_equal '😍😃😍'
|
||||
|
||||
Test.specify "take should correctly handle edge cases" <|
|
||||
group_builder.specify "take should correctly handle edge cases" <|
|
||||
"ABC".take . should_equal "A"
|
||||
|
||||
"".take First . should_equal ""
|
||||
@ -624,7 +624,7 @@ spec =
|
||||
"".take (Sample 0) . should_equal ""
|
||||
"".take (Sample 100) . should_equal ""
|
||||
|
||||
Test.specify "drop should work as in the examples" <|
|
||||
group_builder.specify "drop should work as in the examples" <|
|
||||
"Hello World!".drop First . should_equal "ello World!"
|
||||
"Hello World!".drop (First 5) . should_equal " World!"
|
||||
"Hello World!".drop (First 100) . should_equal ""
|
||||
@ -656,7 +656,7 @@ spec =
|
||||
"Hello World!".drop (By_Index [0.up_to 3, 6, 6.up_to 12 . with_step 2]) . should_equal "lo ol!"
|
||||
"Hello World!".drop (Sample 3 seed=42) . should_equal "HeloWorl!"
|
||||
|
||||
Test.specify "drop should report errors for start indices out of bounds but just go till the end if the end index is OOB" <|
|
||||
group_builder.specify "drop should report errors for start indices out of bounds but just go till the end if the end index is OOB" <|
|
||||
txt = "Hello World!"
|
||||
txt.drop (0.up_to 14) . should_equal ""
|
||||
txt.drop (First 100) . should_equal ""
|
||||
@ -678,7 +678,7 @@ spec =
|
||||
txt.drop (5.up_to 100 . with_step 2) . should_equal "HelloWrd"
|
||||
txt.drop (By_Index [0, 1, 0, 5.up_to 100 . with_step 2]) . should_equal "lloWrd"
|
||||
|
||||
Test.specify "drop should work on grapheme clusters" <|
|
||||
group_builder.specify "drop should work on grapheme clusters" <|
|
||||
txt_1 = 'He\u0302llo\u0308 Wo\u0301rld!'
|
||||
txt_2 = 'He\u0302llo\u0308 Wo\u0308rld!'
|
||||
txt_1.drop (Every 2) . should_equal 'e\u0302l o\u0301l!'
|
||||
@ -707,7 +707,7 @@ spec =
|
||||
txt_2.drop (3.up_to 5) . should_equal 'He\u{302}l Wo\u{308}rld!'
|
||||
txt_2.drop (5.up_to 12) . should_equal 'He\u{302}llo\u{308}'
|
||||
|
||||
Test.specify "drop should work on emojis" <|
|
||||
group_builder.specify "drop should work on emojis" <|
|
||||
'✨🚀🚧😍😃😎😙😉☺'.drop First . should_equal '🚀🚧😍😃😎😙😉☺'
|
||||
'✨🚀🚧😍😃😎😙😉☺'.drop (First 2) . should_equal '🚧😍😃😎😙😉☺'
|
||||
'✨🚀🚧😍😃😎😙😉☺'.drop . should_equal '🚀🚧😍😃😎😙😉☺'
|
||||
@ -721,7 +721,7 @@ spec =
|
||||
'✨🚀🚧😍😃😍😎😙😉☺'.drop (While c->c!="😃") . should_equal '😃😍😎😙😉☺'
|
||||
'✨🚀🚧😍😃😍😎😙😉☺'.drop (3.up_to 6) . should_equal '✨🚀🚧😎😙😉☺'
|
||||
|
||||
Test.specify "drop should correctly handle edge cases" <|
|
||||
group_builder.specify "drop should correctly handle edge cases" <|
|
||||
"ABC".drop . should_equal "BC"
|
||||
|
||||
"".drop First . should_equal ""
|
||||
@ -757,7 +757,7 @@ spec =
|
||||
"".drop (Sample 0) . should_equal ""
|
||||
"".drop (Sample 100) . should_equal ""
|
||||
|
||||
Test.specify "take and drop should gracefully handle missing constructor arguments" <|
|
||||
group_builder.specify "take and drop should gracefully handle missing constructor arguments" <|
|
||||
"".take "FOO" . should_fail_with Type_Error
|
||||
"".drop "FOO" . should_fail_with Type_Error
|
||||
|
||||
@ -776,7 +776,7 @@ spec =
|
||||
# Double-check that constructors of _unexpected_ types are still yielding a type error.
|
||||
"".take (Case_Sensitivity.Insensitive ...) . should_fail_with Type_Error
|
||||
|
||||
Test.specify "should correctly convert character case" <|
|
||||
group_builder.specify "should correctly convert character case" <|
|
||||
"FooBar Baz".to_case Case.Lower . should_equal "foobar baz"
|
||||
"FooBar Baz".to_case Case.Upper . should_equal "FOOBAR BAZ"
|
||||
|
||||
@ -806,15 +806,15 @@ spec =
|
||||
"123".to_case Case.Upper . should_equal "123"
|
||||
"abc123".to_case Case.Upper . should_equal "ABC123"
|
||||
|
||||
Test.specify "should dump characters to a vector" <|
|
||||
group_builder.specify "should dump characters to a vector" <|
|
||||
kshi_chars = kshi.char_vector
|
||||
kshi_chars . should_equal [2325, 2381, 2359, 2367]
|
||||
|
||||
Test.specify "should convert a vector of characters to text" <|
|
||||
group_builder.specify "should convert a vector of characters to text" <|
|
||||
kshi_chars = [2325, 2381, 2359, 2367]
|
||||
Text.from_char_vector kshi_chars . should_equal kshi
|
||||
|
||||
Test.specify "should insert text at a non-negative index position" <|
|
||||
group_builder.specify "should insert text at a non-negative index position" <|
|
||||
"Hello World!".insert 0 " Cruel" . should_equal " CruelHello World!"
|
||||
"Hello World!".insert 5 " Cruel" . should_equal "Hello Cruel World!"
|
||||
"Hello World!".insert ("Hello World!".length - 1) " Cruel" . should_equal "Hello World Cruel!"
|
||||
@ -825,11 +825,11 @@ spec =
|
||||
txt.insert 2 " Cruel" . should_equal (kshi + facepalm + " Cruel" + accent_1)
|
||||
txt.insert 3 " Cruel" . should_equal (kshi + facepalm + accent_1 + " Cruel")
|
||||
|
||||
Test.specify "should report Index_Out_Of_Bounds.Error when inserting text at an invalid non-negative index position" <|
|
||||
group_builder.specify "should report Index_Out_Of_Bounds.Error when inserting text at an invalid non-negative index position" <|
|
||||
"Hello World!".insert ("Hello World!".length + 1) "foo" . should_fail_with Index_Out_Of_Bounds
|
||||
(kshi + facepalm + accent_1).insert 4 "foo" . should_fail_with Index_Out_Of_Bounds
|
||||
|
||||
Test.specify "should insert text at a negative index position" <|
|
||||
group_builder.specify "should insert text at a negative index position" <|
|
||||
"Hello World!".insert -1 " Cruel" . should_equal "Hello World! Cruel"
|
||||
"Hello World!".insert -5 " Cruel" . should_equal "Hello Wo Cruelrld!"
|
||||
"Hello World!".insert -("Hello World!".length) " Cruel" . should_equal "H Cruelello World!"
|
||||
@ -838,12 +838,12 @@ spec =
|
||||
txt.insert -1 " Cruel" . should_equal (txt + " Cruel")
|
||||
txt.insert -(txt.length) " Cruel" . should_equal (kshi + " Cruel" + facepalm + accent_1)
|
||||
|
||||
Test.specify "should report Index_Out_Of_Bounds.Error when inserting text at an invalid negative index position" <|
|
||||
group_builder.specify "should report Index_Out_Of_Bounds.Error when inserting text at an invalid negative index position" <|
|
||||
"Hello World!".insert -("Hello World!".length + 2) " Cruel" . should_fail_with Index_Out_Of_Bounds
|
||||
txt = kshi + facepalm + accent_1
|
||||
txt.insert -(txt.length + 2) " Cruel" . should_fail_with Index_Out_Of_Bounds
|
||||
|
||||
Test.specify "should be able to check by index if is a digit" <|
|
||||
group_builder.specify "should be able to check by index if is a digit" <|
|
||||
str = kshi + "A12" + accent_2
|
||||
str.is_digit . should_be_false
|
||||
str.is_digit 1 . should_be_false
|
||||
@ -852,7 +852,7 @@ spec =
|
||||
str.is_digit 4 . should_be_false
|
||||
str.is_digit 5 . should_fail_with Index_Out_Of_Bounds
|
||||
|
||||
Test.specify "should be able to check by negative index if is a digit" <|
|
||||
group_builder.specify "should be able to check by negative index if is a digit" <|
|
||||
str = kshi + "A12" + accent_2
|
||||
str.is_digit -1 . should_be_false
|
||||
str.is_digit -2 . should_be_true
|
||||
@ -861,7 +861,7 @@ spec =
|
||||
str.is_digit -5 . should_be_false
|
||||
str.is_digit -100 . should_fail_with Index_Out_Of_Bounds
|
||||
|
||||
Test.specify "should be able to check if a text consists only of whitespace" <|
|
||||
group_builder.specify "should be able to check if a text consists only of whitespace" <|
|
||||
' \t\n'.is_whitespace . should_be_true
|
||||
'AB'.is_whitespace . should_be_false
|
||||
' A '.is_whitespace . should_be_false
|
||||
@ -870,12 +870,12 @@ spec =
|
||||
# The Unicode Zero Width Space is not considered whitespace
|
||||
'\u{200b}'.is_whitespace . should_be_false
|
||||
|
||||
Test.specify "should return a dataflow error when checking is digit for out of bounds" <|
|
||||
group_builder.specify "should return a dataflow error when checking is digit for out of bounds" <|
|
||||
str = kshi + "A12" + accent_2
|
||||
str.at -6 . should_fail_with Index_Out_Of_Bounds
|
||||
str.at 5 . should_fail_with Index_Out_Of_Bounds
|
||||
|
||||
Test.specify "should be able to reverse characters" <|
|
||||
group_builder.specify "should be able to reverse characters" <|
|
||||
"Hello World!".reverse . should_equal "!dlroW olleH"
|
||||
|
||||
"".reverse . should_equal ""
|
||||
@ -886,7 +886,7 @@ spec =
|
||||
'ほげほげ'.reverse . should_equal 'げほげほ'
|
||||
'\u{10000}'.reverse . should_equal '\u{10000}'
|
||||
|
||||
Test.specify "should allow to iterate over characters" <|
|
||||
group_builder.specify "should allow to iterate over characters" <|
|
||||
str = kshi + accent_1 + accent_2 + 'abc'
|
||||
builder = Vector.new_builder
|
||||
str.each builder.append
|
||||
@ -896,7 +896,7 @@ spec =
|
||||
'a'.each builder2.append
|
||||
builder2.to_vector . should_equal ['a']
|
||||
|
||||
Test.specify "should check for contains using Unicode normalization" <|
|
||||
group_builder.specify "should check for contains using Unicode normalization" <|
|
||||
"Hello".contains "ell" . should_be_true
|
||||
"Hello".contains "eLl" . should_be_false
|
||||
"Hello".contains "ell" Case_Sensitivity.Default . should_be_true
|
||||
@ -934,7 +934,7 @@ spec =
|
||||
's\u{301}' . contains 'ś' . should_be_true
|
||||
'ś' . contains 's\u{301}' . should_be_true
|
||||
|
||||
Test.specify "should allow for case-insensitive contains checks" <|
|
||||
group_builder.specify "should allow for case-insensitive contains checks" <|
|
||||
"Hello!".contains 'LO' Case_Sensitivity.Insensitive . should_be_true
|
||||
"FoObar" . contains "foo" Case_Sensitivity.Insensitive . should_be_true
|
||||
"aaaIAAA" . contains "i" Case_Sensitivity.Insensitive . should_be_true
|
||||
@ -947,7 +947,7 @@ spec =
|
||||
"Straße" . contains "ss" Case_Sensitivity.Insensitive . should_be_true
|
||||
"Strasse" . contains "ß" Case_Sensitivity.Insensitive . should_be_true
|
||||
|
||||
Test.specify "should check for starts_with using Unicode normalization" <|
|
||||
group_builder.specify "should check for starts_with using Unicode normalization" <|
|
||||
"Hello".starts_with "He" . should_be_true
|
||||
"Hello".starts_with "he" . should_be_false
|
||||
"Hello".starts_with "He" Case_Sensitivity.Default . should_be_true
|
||||
@ -971,12 +971,12 @@ spec =
|
||||
|
||||
"Hello!".starts_with "he" . should_be_false
|
||||
|
||||
Test.specify "starts_with should work as shown in the examples" <|
|
||||
group_builder.specify "starts_with should work as shown in the examples" <|
|
||||
"Hello!".starts_with "Hello" . should_be_true
|
||||
"Hello!".starts_with "hello" . should_be_false
|
||||
"Hello!".starts_with "hello" Case_Sensitivity.Insensitive . should_be_true
|
||||
|
||||
Test.specify "should allow for case-insensitive starts_with checks" <|
|
||||
group_builder.specify "should allow for case-insensitive starts_with checks" <|
|
||||
"Hello".starts_with "he" Case_Sensitivity.Insensitive . should_be_true
|
||||
|
||||
"Ściana".starts_with 's\u{301}' Case_Sensitivity.Insensitive . should_be_true
|
||||
@ -995,7 +995,7 @@ spec =
|
||||
|
||||
"Hello!".starts_with "he" Case_Sensitivity.Insensitive . should_be_true
|
||||
|
||||
Test.specify "should check for ends_with using Unicode normalization" <|
|
||||
group_builder.specify "should check for ends_with using Unicode normalization" <|
|
||||
"Hello".ends_with "lo" . should_be_true
|
||||
"Hello".ends_with "LO" . should_be_false
|
||||
"Hello".ends_with "lo" Case_Sensitivity.Default . should_be_true
|
||||
@ -1015,12 +1015,12 @@ spec =
|
||||
"" . ends_with "" . should_be_true
|
||||
"foo foo foo" . ends_with "foo" . should_be_true
|
||||
|
||||
Test.specify "ends_with should work as shown in the examples" <|
|
||||
group_builder.specify "ends_with should work as shown in the examples" <|
|
||||
"Hello World".ends_with "World" . should_be_true
|
||||
"Hello World".ends_with "world" . should_be_false
|
||||
"Hello World".ends_with "world" Case_Sensitivity.Insensitive . should_be_true
|
||||
|
||||
Test.specify "should allow for case-insensitive ends_with checks" <|
|
||||
group_builder.specify "should allow for case-insensitive ends_with checks" <|
|
||||
"Hello".ends_with "LO" Case_Sensitivity.Insensitive . should_be_true
|
||||
|
||||
"rzeczywistość".ends_with 'C\u{301}' Case_Sensitivity.Insensitive . should_be_true
|
||||
@ -1037,7 +1037,7 @@ spec =
|
||||
"" . ends_with "" Case_Sensitivity.Insensitive . should_be_true
|
||||
"fOo FOO fOo" . ends_with "FoO" Case_Sensitivity.Insensitive . should_be_true
|
||||
|
||||
Test.specify "should allow to pad a text" <|
|
||||
group_builder.specify "should allow to pad a text" <|
|
||||
"Hello World!".pad 15 . should_equal "Hello World! "
|
||||
"HELLO".pad 9 "AB" . should_equal "HELLOABAB"
|
||||
"HELLO".pad 8 "AB" . should_equal "HELLOABA"
|
||||
@ -1067,7 +1067,7 @@ spec =
|
||||
cluster.
|
||||
'e'.pad 7 '\u{301}' . length . should_equal 1
|
||||
|
||||
Test.specify "should allow to trim a text" <|
|
||||
group_builder.specify "should allow to trim a text" <|
|
||||
" Hello! ".trim . should_equal "Hello!"
|
||||
" Hello! ".trim Location.Start . should_equal "Hello! "
|
||||
" Hello! ".trim Location.End . should_equal " Hello!"
|
||||
@ -1105,12 +1105,12 @@ spec =
|
||||
' \u{301} '.trim . should_equal ' \u{301}'
|
||||
' \u{301}'.trim . should_equal ' \u{301}'
|
||||
|
||||
Test.specify "should allow repeating as in the examples" <|
|
||||
group_builder.specify "should allow repeating as in the examples" <|
|
||||
"ABBA".repeat 5 . should_equal "ABBAABBAABBAABBAABBA"
|
||||
"A".repeat 5 . should_equal "AAAAA"
|
||||
"Hello ".repeat 2 . should_equal "Hello Hello "
|
||||
|
||||
Test.specify "should allow more general repeating" <|
|
||||
group_builder.specify "should allow more general repeating" <|
|
||||
'He\u{302}llo\u{308}'.repeat 1 . should_equal 'He\u{302}llo\u{308}'
|
||||
'He\u{302}llo\u{308}'.repeat 3 . should_equal 'He\u{302}llo\u{308}He\u{302}llo\u{308}He\u{302}llo\u{308}'
|
||||
'He\u{302}llo\u{308}'.repeat 0 . should_equal ''
|
||||
@ -1120,12 +1120,12 @@ spec =
|
||||
|
||||
'✨🚀🚧'.repeat 2 . should_equal '✨🚀🚧✨🚀🚧'
|
||||
|
||||
Test.specify "should allow repeating using * as in the examples" <|
|
||||
group_builder.specify "should allow repeating using * as in the examples" <|
|
||||
"ABBA"*5 . should_equal "ABBAABBAABBAABBAABBA"
|
||||
"A"*5 . should_equal "AAAAA"
|
||||
"Hello "*2 . should_equal "Hello Hello "
|
||||
|
||||
Test.specify "should allow more general repeating using *" <|
|
||||
group_builder.specify "should allow more general repeating using *" <|
|
||||
'He\u{302}llo\u{308}'*1 . should_equal 'He\u{302}llo\u{308}'
|
||||
'He\u{302}llo\u{308}'*3 . should_equal 'He\u{302}llo\u{308}He\u{302}llo\u{308}He\u{302}llo\u{308}'
|
||||
'He\u{302}llo\u{308}'*0 . should_equal ''
|
||||
@ -1135,7 +1135,7 @@ spec =
|
||||
|
||||
'✨🚀🚧'*2 . should_equal '✨🚀🚧✨🚀🚧'
|
||||
|
||||
Test.specify "locate should work as shown in examples" <|
|
||||
group_builder.specify "locate should work as shown in examples" <|
|
||||
example_1 =
|
||||
"Hello World!".locate "J" . should_equal Nothing
|
||||
"Hello World!".locate "o" . should_equal (Span.Value (4.up_to 5) "Hello World!")
|
||||
@ -1196,7 +1196,7 @@ spec =
|
||||
example_6
|
||||
example_7
|
||||
|
||||
Test.specify "should allow to locate occurrences within a text" <|
|
||||
group_builder.specify "should allow to locate occurrences within a text" <|
|
||||
"Hello World!".locate_all "J" . should_equal []
|
||||
"Hello World!".locate_all "o" . map .start . should_equal [4, 7]
|
||||
|
||||
@ -1215,7 +1215,7 @@ spec =
|
||||
abc.locate "" mode=Matching_Mode.Last . should_equal (Span.Value (3.up_to 3) abc)
|
||||
abc.locate_all "" . should_equal [Span.Value (0.up_to 0) abc, Span.Value (1.up_to 1) abc, Span.Value (2.up_to 2) abc, Span.Value (3.up_to 3) abc]
|
||||
|
||||
Test.specify "should allow to get indexes of values within a text" <|
|
||||
group_builder.specify "should allow to get indexes of values within a text" <|
|
||||
"Hello World!".index_of "o" . should_equal 4
|
||||
"Hello World!".index_of "o" start=5 . should_equal 7
|
||||
"Hello World!".index_of "o" start=-5 . should_equal 7
|
||||
@ -1233,7 +1233,7 @@ spec =
|
||||
abc.index_of "" start=3 . should_equal 3
|
||||
abc.last_index_of "" . should_equal 3
|
||||
|
||||
Test.specify "should allow case-insensitive matching in locate" <|
|
||||
group_builder.specify "should allow case-insensitive matching in locate" <|
|
||||
hello = "Hello WORLD!"
|
||||
case_insensitive = Case_Sensitivity.Insensitive
|
||||
hello.locate "world" . should_equal Nothing
|
||||
@ -1284,7 +1284,7 @@ spec =
|
||||
abc.locate "" case_sensitivity=case_insensitive mode=Matching_Mode.Last . should_equal (Span.Value (3.up_to 3) abc)
|
||||
abc.locate_all "" case_sensitivity=case_insensitive . should_equal [Span.Value (0.up_to 0) abc, Span.Value (1.up_to 1) abc, Span.Value (2.up_to 2) abc, Span.Value (3.up_to 3) abc]
|
||||
|
||||
Test.specify "find should match regexes" <|
|
||||
group_builder.specify "find should match regexes" <|
|
||||
hello = "Hello World!"
|
||||
|
||||
hello.find ".o" Case_Sensitivity.Insensitive . text 0 . should_equal "lo"
|
||||
@ -1295,18 +1295,18 @@ spec =
|
||||
## Regex matching does not do case folding
|
||||
"Strasse".find "ß" Case_Sensitivity.Insensitive . should_equal Nothing
|
||||
|
||||
Test.specify "find should produce correct spans" <|
|
||||
group_builder.specify "find should produce correct spans" <|
|
||||
"Hello World!".find ".o" Case_Sensitivity.Insensitive . span 0 . should_equal (Span.Value (3.up_to 5) "Hello World!")
|
||||
"Hello World!".find_all ".o" . map (match-> match.span 0) . should_equal [Span.Value (3.up_to 5) "Hello World!", Span.Value (6.up_to 8) "Hello World!"]
|
||||
"foobar".find "BAR" Case_Sensitivity.Insensitive . span 0 . should_equal (Span.Value (3.up_to 6) "foobar")
|
||||
|
||||
Test.specify "find_all should handle 0 length matches" <|
|
||||
group_builder.specify "find_all should handle 0 length matches" <|
|
||||
"Hello World".find_all ".*" . map (_.text) . should_equal ["Hello World", ""]
|
||||
"".find_all ".*" . map (_.text) . should_equal [""]
|
||||
"Hello World".find_all ".*(?=.)" . map (_.text) . should_equal ["Hello Worl", ""]
|
||||
"".find_all ".*(?=.)" . map (_.text) . should_equal []
|
||||
|
||||
Test.specify "should handle accents and other multi-point graphemes" <|
|
||||
group_builder.specify "should handle accents and other multi-point graphemes" <|
|
||||
accents = 'a\u{301}e\u{301}o\u{301}he\u{301}h'
|
||||
|
||||
accents.find 'h' . text 0 . should_equal 'h'
|
||||
@ -1324,16 +1324,16 @@ spec =
|
||||
accents.find 'h' . text 0 . should_equal 'h'
|
||||
accents.find 'e\u{301}' . text 0 . should_equal 'e\u{301}'
|
||||
|
||||
Test.specify "should correctly handle regex edge cases in `find`" <|
|
||||
group_builder.specify "should correctly handle regex edge cases in `find`" <|
|
||||
"".find "foo" . should_equal Nothing
|
||||
"".find_all "foo" . should_equal []
|
||||
|
||||
Test.specify "should handle overlapping matches as shown in the examples" <|
|
||||
group_builder.specify "should handle overlapping matches as shown in the examples" <|
|
||||
"aaa".locate "aa" mode=Matching_Mode.Last case_sensitivity=Case_Sensitivity.Sensitive . should_equal (Span.Value (1.up_to 3) "aaa")
|
||||
|
||||
"aaa aaa".locate "aa" mode=Matching_Mode.Last case_sensitivity=Case_Sensitivity.Sensitive . should_equal (Span.Value (5.up_to 7) "aaa aaa")
|
||||
|
||||
Test.specify "should default to exact matching for locate but regex for match" <|
|
||||
group_builder.specify "should default to exact matching for locate but regex for match" <|
|
||||
txt = "aba[bc]adacae"
|
||||
"ab".locate "ab" . should_equal (Span.Value (0.up_to 2) "ab")
|
||||
"ab".locate "a[bc]" . should_equal Nothing
|
||||
@ -1349,41 +1349,41 @@ spec =
|
||||
txt.find "a[bc]" . text 0 . should_equal "ab"
|
||||
txt.find_all "a[bc]" . map (match-> match.text 0) . should_equal ["ab", "ac"]
|
||||
|
||||
Test.group "Regex: find and find_all" <|
|
||||
Test.specify "should be possible on text" <|
|
||||
suite_builder.group "Regex: find and find_all" group_builder->
|
||||
group_builder.specify "should be possible on text" <|
|
||||
"My Text: Goes Here".find "^My Text: (.+)$" . text 0 . should_equal "My Text: Goes Here"
|
||||
|
||||
Test.specify "should be possible on unicode text" <|
|
||||
group_builder.specify "should be possible on unicode text" <|
|
||||
txt = "maza건반zaa"
|
||||
txt.find "^a..z$" . should_equal Nothing
|
||||
txt.find "^m..a..z.a$" . text 0 . should_equal "maza건반zaa"
|
||||
txt.find "a..z" . text 0 . should_equal "a건반z"
|
||||
|
||||
Test.specify "`find` with an empty pattern should be an error" <|
|
||||
group_builder.specify "`find` with an empty pattern should be an error" <|
|
||||
'b'.find '' . should_fail_with Illegal_Argument
|
||||
|
||||
Test.specify "`find_all` with an empty pattern should be an error" <|
|
||||
group_builder.specify "`find_all` with an empty pattern should be an error" <|
|
||||
'b'.find_all '' . should_fail_with Illegal_Argument
|
||||
|
||||
Test.specify "should be possible in case-insensitive mode" <|
|
||||
group_builder.specify "should be possible in case-insensitive mode" <|
|
||||
"MY".find "my" Case_Sensitivity.Insensitive . text 0 . should_equal "MY"
|
||||
|
||||
Test.specify "should allow access to the entire match text" <|
|
||||
group_builder.specify "should allow access to the entire match text" <|
|
||||
"abcddd".find "ab(c(d+))" . text . should_equal "abcddd"
|
||||
|
||||
Test.specify "should allow access to groups via .get" <|
|
||||
group_builder.specify "should allow access to groups via .get" <|
|
||||
"abcddd".find "ab(c(d+))" . get 0 . should_equal "abcddd"
|
||||
"abcddd".find "ab(c(d+))" . get 1 . should_equal "cddd"
|
||||
"abcddd".find "ab(c(d+))" . get 2 . should_equal "ddd"
|
||||
"abcddd".find "ab(c(d+))" . get 3 if_missing="MISSING" . should_equal "MISSING"
|
||||
|
||||
Test.specify "should allow access to groups via .at" <|
|
||||
group_builder.specify "should allow access to groups via .at" <|
|
||||
"abcddd".find "ab(c(d+))" . at 0 . should_equal "abcddd"
|
||||
"abcddd".find "ab(c(d+))" . at 1 . should_equal "cddd"
|
||||
"abcddd".find "ab(c(d+))" . at 2 . should_equal "ddd"
|
||||
"abcddd".find "ab(c(d+))" . at 3 . should_fail_with Index_Out_Of_Bounds
|
||||
|
||||
Test.specify "should handle the Unicode normalization" pending="Use this to test exposed normalization methods" <|
|
||||
group_builder.specify "should handle the Unicode normalization" pending="Use this to test exposed normalization methods" <|
|
||||
## This test passed for the builtin Java regex library, using
|
||||
Pattern.CANON_EQ, but since that option is buggy and rarely use,
|
||||
we won't attempt to recreate it with Truffle regex. Instead,
|
||||
@ -1392,42 +1392,42 @@ spec =
|
||||
accents = 'a\u{301}e\u{301}o\u{301}'
|
||||
accents.find accent_1 . span 0 . should_equal (Span.Value (1.up_to 2) 'a\u{301}e\u{301}o\u{301}')
|
||||
|
||||
Test.specify "can return a vector of all match groups" <|
|
||||
group_builder.specify "can return a vector of all match groups" <|
|
||||
"abc".find "ab((c)|(d))" . groups . should_equal ['abc', 'c', 'c', Nothing]
|
||||
|
||||
Test.specify "should default to group 0 in .span and .span" <|
|
||||
group_builder.specify "should default to group 0 in .span and .span" <|
|
||||
"abacadae".find "a[bc]" . utf_16_span . should_equal (Utf_16_Span.Value (0.up_to 2) "abacadae")
|
||||
'a\u{301}e\u{301}o\u{301}'.find 'e\u{301}' . span . should_equal (Span.Value (1.up_to 2) 'a\u{301}e\u{301}o\u{301}')
|
||||
|
||||
Test.specify "should allow to match one or more occurrences of a pattern in the text" <|
|
||||
group_builder.specify "should allow to match one or more occurrences of a pattern in the text" <|
|
||||
"abacadae".find_all "a[bc]" . map (match-> match.span 0) . should_equal [Span.Value (0.up_to 2) "abacadae", Span.Value (2.up_to 4) "abacadae"]
|
||||
"abacadae".find_all "a." . map (match-> match.span 0) . should_equal [Span.Value (0.up_to 2) "abacadae", Span.Value (2.up_to 4) "abacadae", Span.Value (4.up_to 6) "abacadae", Span.Value (6.up_to 8) "abacadae"]
|
||||
"abacadae".find_all "a.*" . map (match-> match.span 0) . should_equal [Span.Value (0.up_to 8) "abacadae"]
|
||||
"abacadae".find_all "a.+?" . map (match-> match.span 0) . should_equal [Span.Value (0.up_to 2) "abacadae", Span.Value (2.up_to 4) "abacadae", Span.Value (4.up_to 6) "abacadae", Span.Value (6.up_to 8) "abacadae"]
|
||||
|
||||
Test.specify "should allow access to match groups by number" <|
|
||||
group_builder.specify "should allow access to match groups by number" <|
|
||||
"abcddd".find "ab(c(d+))" . text 0 . should_equal "abcddd"
|
||||
"abcddd".find "ab(c(d+))" . text 1 . should_equal "cddd"
|
||||
"abcddd".find "ab(c(d+))" . text 2 . should_equal "ddd"
|
||||
|
||||
Test.specify "should allow access to match groups by name" <|
|
||||
group_builder.specify "should allow access to match groups by name" <|
|
||||
"abcddd".find "ab(?<cee>c(d+))" . text "cee" . should_equal "cddd"
|
||||
|
||||
Test.specify "should throw No_Such_Group for an out-of-range group number" <|
|
||||
group_builder.specify "should throw No_Such_Group for an out-of-range group number" <|
|
||||
"abcddd".find "ab(c(d+))" . text 3 . should_fail_with No_Such_Group
|
||||
"abcddd".find "ab(c(d+))" . text 12 . should_fail_with No_Such_Group
|
||||
"abcddd".find "ab(c(d+))" . text (-1) . should_fail_with No_Such_Group
|
||||
|
||||
Test.specify "should throw No_Such_Group for an invalid group name" <|
|
||||
group_builder.specify "should throw No_Such_Group for an invalid group name" <|
|
||||
"abcddd".find "ab(?<cee>c(d+))" . text "dee" . should_fail_with No_Such_Group
|
||||
|
||||
Test.specify "should throw No_Such_Group for an invalid group name (when there are no named groups at all)" <|
|
||||
group_builder.specify "should throw No_Such_Group for an invalid group name (when there are no named groups at all)" <|
|
||||
"abcddd".find "ab(c(d+))" . text "dee" . should_fail_with No_Such_Group
|
||||
|
||||
Test.specify "should throw Regex_Syntax_Error for a regex with incorrect syntax" <|
|
||||
group_builder.specify "should throw Regex_Syntax_Error for a regex with incorrect syntax" <|
|
||||
"abcddd".find "ab(c(((((((" . text 0 . should_fail_with Regex_Syntax_Error
|
||||
|
||||
Test.specify ".text should return Nothing if the group did not participate in the match" <|
|
||||
group_builder.specify ".text should return Nothing if the group did not participate in the match" <|
|
||||
match_c = "abc".find "ab((c)|(d))"
|
||||
match_c.text 1 . should_equal "c"
|
||||
match_c.text 2 . should_equal "c"
|
||||
@ -1437,85 +1437,85 @@ spec =
|
||||
match_d.text 2 . should_equal Nothing
|
||||
match_d.text 3 . should_equal "d"
|
||||
|
||||
Test.specify "should expand a partial-grapheme match to the whole grapheme" <|
|
||||
group_builder.specify "should expand a partial-grapheme match to the whole grapheme" <|
|
||||
'e\u{301}'.find '\u{301}' . text 0 . should_equal 'e\u{301}'
|
||||
|
||||
Test.specify "should not allow non-default locale" <|
|
||||
group_builder.specify "should not allow non-default locale" <|
|
||||
locale = Locale.new "en" "GB" "UTF-8"
|
||||
'a'.find 'a' case_sensitivity=(Case_Sensitivity.Insensitive locale) . should_fail_with Illegal_Argument
|
||||
'a'.find_all 'a' case_sensitivity=(Case_Sensitivity.Insensitive locale) . should_fail_with Illegal_Argument
|
||||
|
||||
Test.group "Text.match" <|
|
||||
Test.specify "should work correctly" <|
|
||||
suite_builder.group "Text.match" group_builder->
|
||||
group_builder.specify "should work correctly" <|
|
||||
"My Text: Goes Here".match "^My Text: (.+)$" . should_be_true
|
||||
"555-801-1923".match "^\d{3}-\d{3}-\d{4}$" . should_be_true
|
||||
"Hello".match "^[a-z]+$" . should_be_false
|
||||
"Hello".match "^[a-z]+$" Case_Sensitivity.Insensitive . should_be_true
|
||||
|
||||
Test.specify "should only match whole input" <|
|
||||
group_builder.specify "should only match whole input" <|
|
||||
"Hello".match "[a-z]" . should_be_false
|
||||
"abcd".match "bcd" . should_be_false
|
||||
"abcd".match "abc" . should_be_false
|
||||
"x".match "[a-z]" . should_be_true
|
||||
|
||||
Test.specify "`match` with an empty pattern should be an error" <|
|
||||
group_builder.specify "`match` with an empty pattern should be an error" <|
|
||||
'b'.match '' . should_fail_with Illegal_Argument
|
||||
|
||||
Test.specify "should be possible on unicode text" <|
|
||||
group_builder.specify "should be possible on unicode text" <|
|
||||
"Korean: 건반".match "^Korean: (.+)$" . should_be_true
|
||||
|
||||
Test.specify "should be possible in case-insensitive mode" <|
|
||||
group_builder.specify "should be possible in case-insensitive mode" <|
|
||||
"MY".match "my" Case_Sensitivity.Insensitive . should_be_true
|
||||
|
||||
Test.specify "should not allow non-default locale" <|
|
||||
group_builder.specify "should not allow non-default locale" <|
|
||||
locale = Locale.new "en" "GB" "UTF-8"
|
||||
'a'.match 'a' case_sensitivity=(Case_Sensitivity.Insensitive locale) . should_fail_with Illegal_Argument
|
||||
|
||||
Test.group "Regex splitting" <|
|
||||
Test.specify "should be possible on text" <|
|
||||
suite_builder.group "Regex splitting" group_builder->
|
||||
group_builder.specify "should be possible on text" <|
|
||||
splits = "abcde".split "[bd]" use_regex=True
|
||||
splits.length . should_equal 3
|
||||
splits.at 0 . should_equal "a"
|
||||
splits.at 1 . should_equal "c"
|
||||
splits.at 2 . should_equal "e"
|
||||
|
||||
Test.specify "should be possible on unicode text" <|
|
||||
group_builder.specify "should be possible on unicode text" <|
|
||||
match = "Korean: 건반 (hangul)".split " " use_regex=True
|
||||
match.length . should_equal 3
|
||||
match.at 0 . should_equal "Korean:"
|
||||
match.at 1 . should_equal "건반"
|
||||
match.at 2 . should_equal "(hangul)"
|
||||
|
||||
Test.specify "should be possible in case-insensitive mode" <|
|
||||
group_builder.specify "should be possible in case-insensitive mode" <|
|
||||
splits = "abaBa".split "b" use_regex=True case_sensitivity=Case_Sensitivity.Insensitive
|
||||
splits.length . should_equal 3
|
||||
splits.at 0 . should_equal "a"
|
||||
splits.at 1 . should_equal "a"
|
||||
splits.at 2 . should_equal "a"
|
||||
|
||||
Test.group "Regex tokenizing" <|
|
||||
Test.specify "can tokenize with simple regexes without capturing groups"
|
||||
suite_builder.group "Regex tokenizing" group_builder->
|
||||
group_builder.specify "can tokenize with simple regexes without capturing groups"
|
||||
"1-800-regex-yes" . tokenize "[a-z]+" . should_equal ["regex", "yes"]
|
||||
"1-800-REGEX-YES" . tokenize "[a-z]+" case_sensitivity=Case_Sensitivity.Insensitive . should_equal ["REGEX", "YES"]
|
||||
"12 hi345 67r890r" . tokenize "\d\d" . should_equal ["12", "34", "67", "89"]
|
||||
|
||||
Test.specify "examples are correct" <|
|
||||
group_builder.specify "examples are correct" <|
|
||||
"ABCDEF" . tokenize "..." . should_equal ["ABC","DEF"]
|
||||
"ABCDEF" . tokenize "(.).(.)" . should_equal ["AC","DF"]
|
||||
'Hello Big\r\nWide\tWorld\nGoodbye!' . tokenize "(\S+)(?:\s+|$)" . should_equal ["Hello","Big","Wide","World","Goodbye!"]
|
||||
|
||||
Test.group "Text.replace" <|
|
||||
Test.specify "should work as in examples" <|
|
||||
suite_builder.group "Text.replace" group_builder->
|
||||
group_builder.specify "should work as in examples" <|
|
||||
'aaa'.replace 'aa' 'b' . should_equal 'ba'
|
||||
"Hello World!".replace "[lo]".to_regex "#" . should_equal "He### W#r#d!"
|
||||
"Hello World!".replace "l" "#" only_first=True . should_equal "He#lo World!"
|
||||
'"abc" foo "bar" baz'.replace '"(.*?)"'.to_regex '($1)' . should_equal '(abc) foo (bar) baz'
|
||||
|
||||
Test.specify "works when mapped over a vector of inputs" <|
|
||||
group_builder.specify "works when mapped over a vector of inputs" <|
|
||||
inputs = ["axyz", "bxyz", "xabcz", "zazaz"]
|
||||
inputs.map (s-> s.replace "[abc]".to_regex "q") . should_equal ["qxyz", "qxyz", "xqqqz", "zqzqz"]
|
||||
|
||||
Test.specify "should correctly handle empty-string edge cases" <|
|
||||
group_builder.specify "should correctly handle empty-string edge cases" <|
|
||||
[True, False] . each only_first->
|
||||
'aaa'.replace '' 'foo' only_first=only_first . should_equal 'aaa'
|
||||
'a'.replace 'a' '' only_first=only_first . should_equal ''
|
||||
@ -1525,14 +1525,14 @@ spec =
|
||||
'aba' . replace 'a' '' . should_equal 'b'
|
||||
'aba' . replace 'c' '' . should_equal 'aba'
|
||||
|
||||
Test.specify "should correctly handle first, all and last matching with overlapping occurrences" <|
|
||||
group_builder.specify "should correctly handle first, all and last matching with overlapping occurrences" <|
|
||||
"aaa aaa".replace "aa" "c" . should_equal "ca ca"
|
||||
"aaa aaa".replace "aa" "c" only_first=True . should_equal "ca aaa"
|
||||
|
||||
Test.specify "Regex `replace` with an empty pattern should be an error" <|
|
||||
group_builder.specify "Regex `replace` with an empty pattern should be an error" <|
|
||||
'b'.replace ''.to_regex 'c' . should_fail_with Illegal_Argument
|
||||
|
||||
Test.specify "should correctly handle case-insensitive matches" <|
|
||||
group_builder.specify "should correctly handle case-insensitive matches" <|
|
||||
'AaąĄ' . replace "A" "-" case_sensitivity=Case_Sensitivity.Insensitive . should_equal '--ąĄ'
|
||||
'AaąĄ' . replace "A" "-" . should_equal '-aąĄ'
|
||||
'HeLlO wOrLd' . replace 'hElLo' 'Hey,' case_sensitivity=Case_Sensitivity.Sensitive . should_equal 'HeLlO wOrLd'
|
||||
@ -1548,7 +1548,7 @@ spec =
|
||||
"Iiİı" . replace "İ" "-" case_sensitivity=Case_Sensitivity.Insensitive . should_equal "Ii-ı"
|
||||
"Iiİı" . replace "ı" "-" case_sensitivity=Case_Sensitivity.Insensitive . should_equal "Iiİ-"
|
||||
|
||||
Test.specify "should correctly handle Unicode" <|
|
||||
group_builder.specify "should correctly handle Unicode" <|
|
||||
'ß'.replace 'S' 'A' case_sensitivity=Case_Sensitivity.Insensitive . should_equal 'AA'
|
||||
'ß'.replace 'ß' 'A' case_sensitivity=Case_Sensitivity.Insensitive . should_equal 'A'
|
||||
'affib'.replace 'i' 'X' case_sensitivity=Case_Sensitivity.Insensitive . should_equal 'aXb'
|
||||
@ -1580,15 +1580,15 @@ spec =
|
||||
|
||||
"Korean: 건반".replace "건반" "keyboard" . should_equal "Korean: keyboard"
|
||||
|
||||
Test.specify "Text.to_regex" <|
|
||||
group_builder.specify "Text.to_regex" <|
|
||||
"^\d{3}-\d{3}-\d{4}$".to_regex . matches "555-801-1923" . should_be_true
|
||||
"aa".to_regex case_insensitive=True . matches "Aa" . should_be_true
|
||||
|
||||
Test.specify "regex and non-regex `replace` handle accented grapheme splitting differently" <|
|
||||
group_builder.specify "regex and non-regex `replace` handle accented grapheme splitting differently" <|
|
||||
'sśs\u{301}' . replace 's' 'O' . should_equal 'Ośs\u{301}'
|
||||
'sśs\u{301}' . replace 's'.to_regex 'O' . should_equal 'OśO\u{301}'
|
||||
|
||||
Test.specify "should perform simple replacement in Regex mode" <|
|
||||
group_builder.specify "should perform simple replacement in Regex mode" <|
|
||||
"ababab".replace "b".to_regex "a" . should_equal "aaaaaa"
|
||||
"ababab".replace "b".to_regex "a" only_first=True . should_equal "aaabab"
|
||||
|
||||
@ -1603,30 +1603,30 @@ spec =
|
||||
"aaa aaa".replace "aa".to_regex "c" . should_equal "ca ca"
|
||||
"aaa aaa".replace "aa".to_regex "c" only_first=True . should_equal "ca aaa"
|
||||
|
||||
Test.specify "in Regex mode should work with Unicode" <|
|
||||
group_builder.specify "in Regex mode should work with Unicode" <|
|
||||
"Korean: 건반".replace "건반".to_regex "keyboard" . should_equal "Korean: keyboard"
|
||||
'sśs\u{301}'.replace 'ś'.to_regex '-' . should_equal 's-s\u{301}'
|
||||
'sśs\u{301}'.replace 's\u{301}'.to_regex '-' . should_equal 'sś-'
|
||||
|
||||
Test.specify "in Regex mode should allow referring to capture groups in substitutions" <|
|
||||
group_builder.specify "in Regex mode should allow referring to capture groups in substitutions" <|
|
||||
'<a href="url">content</a>'.replace '<a href="(.*?)">(.*?)</a>'.to_regex '$2 is at $1' . should_equal 'content is at url'
|
||||
'<a href="url">content</a>'.replace '<a href="(?<address>.*?)">(?<text>.*?)</a>'.to_regex '$<text> is at $<address>' . should_equal 'content is at url'
|
||||
|
||||
Test.specify "should not allow non-default locale in regex replace" <|
|
||||
group_builder.specify "should not allow non-default locale in regex replace" <|
|
||||
locale = Locale.new "en" "GB" "UTF-8"
|
||||
'a'.replace 'a'.to_regex 'b' case_sensitivity=(Case_Sensitivity.Insensitive locale) . should_fail_with Illegal_Argument
|
||||
|
||||
Test.specify "should allow non-default locale in text replace" <|
|
||||
group_builder.specify "should allow non-default locale in text replace" <|
|
||||
locale = Locale.new "en" "GB" "UTF-8"
|
||||
'a'.replace 'a' 'b' case_sensitivity=(Case_Sensitivity.Insensitive locale) . should_equal 'b'
|
||||
|
||||
Test.specify "should work with a regex param" <|
|
||||
group_builder.specify "should work with a regex param" <|
|
||||
'aaa'.replace 'aa'.to_regex 'b' . should_equal 'ba'
|
||||
"Hello World!".replace "[lo]".to_regex "#" . should_equal "He### W#r#d!"
|
||||
"Hello World!".replace "l".to_regex "#" only_first=True . should_equal "He#lo World!"
|
||||
'"abc" foo "bar" baz'.replace '"(.*?)"'.to_regex '($1)' . should_equal '(abc) foo (bar) baz'
|
||||
|
||||
Test.specify "should respect the regex's case sensitivity setting, and override the regex's case sensitivity setting with an explicit case_sensitivity param" <|
|
||||
group_builder.specify "should respect the regex's case sensitivity setting, and override the regex's case sensitivity setting with an explicit case_sensitivity param" <|
|
||||
r_cs = "aa".to_regex
|
||||
r_ci = "aa".to_regex True
|
||||
|
||||
@ -1647,19 +1647,23 @@ spec =
|
||||
"Aaa".replace r_ci "b" case_sensitivity=Case_Sensitivity.Sensitive . should_equal "Ab"
|
||||
"aaa".replace r_ci "b" case_sensitivity=Case_Sensitivity.Insensitive . should_equal "ba"
|
||||
"Aaa".replace r_ci "b" case_sensitivity=Case_Sensitivity.Insensitive . should_equal "ba"
|
||||
Test.group "Text.substring" <|
|
||||
Test.specify "should work with all examples" <|
|
||||
suite_builder.group "Text.substring" group_builder->
|
||||
group_builder.specify "should work with all examples" <|
|
||||
"Hello World!".substring 3 2 . should_equal "lo"
|
||||
"Hello World!".substring 5 . should_equal " World!"
|
||||
"Hello World!".substring 5 7 . should_equal " World!"
|
||||
"Hello World!".substring -7 3 . should_equal " Wo"
|
||||
|
||||
Test.specify "should error on length less than 0" <|
|
||||
group_builder.specify "should error on length less than 0" <|
|
||||
"Hello World!".substring 5 -2 . should_fail_with Illegal_Argument
|
||||
|
||||
Test.specify "should error if negative start index is too large"
|
||||
group_builder.specify "should error if negative start index is too large"
|
||||
"Hello World!".substring -20 5 . should_fail_with Index_Out_Of_Bounds
|
||||
Test.specify "should error if start index is larger than string"
|
||||
group_builder.specify "should error if start index is larger than string"
|
||||
"Hello World!".substring 20 5 . should_fail_with Index_Out_Of_Bounds
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -1,31 +1,31 @@
|
||||
from Standard.Base import all
|
||||
|
||||
from Standard.Test import Test
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
spec name create_new_date =
|
||||
Test.group (name + " date part tests") <|
|
||||
Test.specify "should return if a leap year" <|
|
||||
|
||||
add_specs suite_builder name create_new_date =
|
||||
suite_builder.group (name + " date part tests") group_builder->
|
||||
group_builder.specify "should return if a leap year" <|
|
||||
create_new_date 2022 8 25 . is_leap_year . should_equal False
|
||||
create_new_date 1999 12 31 . is_leap_year . should_equal False
|
||||
create_new_date 1996 6 19 . is_leap_year . should_equal True
|
||||
create_new_date 2000 1 1 . is_leap_year . should_equal True
|
||||
create_new_date 1900 1 1 . is_leap_year . should_equal False
|
||||
|
||||
Test.specify "should return the number of days in the year" <|
|
||||
group_builder.specify "should return the number of days in the year" <|
|
||||
create_new_date 2022 8 25 . length_of_year . should_equal 365
|
||||
create_new_date 1999 12 31 . length_of_year . should_equal 365
|
||||
create_new_date 1996 6 19 . length_of_year . should_equal 366
|
||||
create_new_date 2000 1 1 . length_of_year . should_equal 366
|
||||
create_new_date 1900 1 1 . length_of_year . should_equal 365
|
||||
|
||||
Test.specify "should return the century" <|
|
||||
group_builder.specify "should return the century" <|
|
||||
create_new_date 2022 8 25 . century . should_equal 21
|
||||
create_new_date 1999 12 31 . century . should_equal 20
|
||||
create_new_date 2000 1 1 . century . should_equal 20
|
||||
create_new_date 2001 1 1 . century . should_equal 21
|
||||
|
||||
Test.specify "should return the quarter" <|
|
||||
group_builder.specify "should return the quarter" <|
|
||||
create_new_date 2022 2 1 . quarter . should_equal 1
|
||||
create_new_date 1987 1 1 . quarter . should_equal 1
|
||||
create_new_date 1996 2 29 . quarter . should_equal 1
|
||||
@ -37,7 +37,7 @@ spec name create_new_date =
|
||||
create_new_date 2019 10 1 . quarter . should_equal 4
|
||||
create_new_date 2019 12 31 . quarter . should_equal 4
|
||||
|
||||
Test.specify "should return the number of days in the year" <|
|
||||
group_builder.specify "should return the number of days in the year" <|
|
||||
create_new_date 1987 1 1 . length_of_month . should_equal 31
|
||||
create_new_date 2022 2 1 . length_of_month . should_equal 28
|
||||
create_new_date 1996 2 2 . length_of_month . should_equal 29
|
||||
@ -52,7 +52,7 @@ spec name create_new_date =
|
||||
create_new_date 2019 11 7 . length_of_month . should_equal 30
|
||||
create_new_date 2019 12 31 . length_of_month . should_equal 31
|
||||
|
||||
Test.specify "should return the day_of_year" <|
|
||||
group_builder.specify "should return the day_of_year" <|
|
||||
create_new_date 1990 9 18 . day_of_year . should_equal 261
|
||||
create_new_date 1990 10 13 . day_of_year . should_equal 286
|
||||
create_new_date 1992 9 28 . day_of_year . should_equal 272
|
||||
@ -66,7 +66,7 @@ spec name create_new_date =
|
||||
create_new_date 2023 3 21 . day_of_year . should_equal 80
|
||||
create_new_date 2024 1 13 . day_of_year . should_equal 13
|
||||
|
||||
Test.specify "should return the day_of_week" <|
|
||||
group_builder.specify "should return the day_of_week" <|
|
||||
create_new_date 1990 9 18 . day_of_week . should_equal Day_Of_Week.Tuesday
|
||||
create_new_date 1990 10 13 . day_of_week . should_equal Day_Of_Week.Saturday
|
||||
create_new_date 1992 9 28 . day_of_week . should_equal Day_Of_Week.Monday
|
||||
@ -80,7 +80,7 @@ spec name create_new_date =
|
||||
create_new_date 2023 3 21 . day_of_week . should_equal Day_Of_Week.Tuesday
|
||||
create_new_date 2024 1 13 . day_of_week . should_equal Day_Of_Week.Saturday
|
||||
|
||||
Test.specify "should return the correct week of year" <|
|
||||
group_builder.specify "should return the correct week of year" <|
|
||||
create_new_date 2021 8 1 . week_of_year Locale.mexico . should_equal 32
|
||||
create_new_date 2021 1 1 . week_of_year Locale.us . should_equal 1
|
||||
create_new_date 2021 1 1 . week_of_year Locale.uk . should_equal 0
|
||||
|
@ -2,14 +2,18 @@ from Standard.Base import all
|
||||
import Standard.Base.Errors.Common.Type_Error
|
||||
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
|
||||
|
||||
from Standard.Test import Problems, Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
|
||||
spec =
|
||||
Test.group "Date_Range" <|
|
||||
Test.specify "should be created with up_to and down_to extension methods" <|
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
||||
add_specs suite_builder =
|
||||
suite_builder.group "Date_Range" group_builder->
|
||||
group_builder.specify "should be created with up_to and down_to extension methods" <|
|
||||
(Date.new 2020 02 28).up_to (Date.new 2020 03 02) . to_vector . should_equal [Date.new 2020 02 28, Date.new 2020 02 29, Date.new 2020 03 01]
|
||||
(Date.new 2020 02 28).up_to (Date.new 2020 03 02) include_end=True . to_vector . should_equal [Date.new 2020 02 28, Date.new 2020 02 29, Date.new 2020 03 01, Date.new 2020 03 02]
|
||||
|
||||
@ -24,18 +28,18 @@ spec =
|
||||
|
||||
(Date.new 2023 12 31).down_to (Date.new 2023 12 31) . with_step Date_Period.Month . to_vector . should_equal []
|
||||
|
||||
Test.specify ".new should infer if the range should be increasing or not" <|
|
||||
group_builder.specify ".new should infer if the range should be increasing or not" <|
|
||||
Date_Range.new (Date.new 2023 10 01) (Date.new 2023 10 04) . to_vector . should_equal [Date.new 2023 10 01, Date.new 2023 10 02, Date.new 2023 10 03]
|
||||
Date_Range.new (Date.new 2023 10 04) (Date.new 2023 10 01) . to_vector . should_equal [Date.new 2023 10 04, Date.new 2023 10 03, Date.new 2023 10 02]
|
||||
|
||||
Test.specify "will be empty if the start and end are swapped with up_to or down_to" <|
|
||||
group_builder.specify "will be empty if the start and end are swapped with up_to or down_to" <|
|
||||
(Date.new 2023 10 01).down_to (Date.new 2023 10 04) . to_vector . should_equal []
|
||||
(Date.new 2023 10 04).up_to (Date.new 2023 10 01) . to_vector . should_equal []
|
||||
|
||||
(Date.new 2023 10 01).down_to (Date.new 2023 10 04) . with_step Date_Period.Month . to_vector . should_equal []
|
||||
(Date.new 2023 10 04).up_to (Date.new 2023 10 01) . with_step Date_Period.Month . to_vector . should_equal []
|
||||
|
||||
Test.specify "should allow setting a custom step" <|
|
||||
group_builder.specify "should allow setting a custom step" <|
|
||||
(Date.new 2020 01 10).up_to (Date.new 2020 01 31) step=(Period.new days=5) . to_vector . should_equal [Date.new 2020 01 10, Date.new 2020 01 15, Date.new 2020 01 20, Date.new 2020 01 25, Date.new 2020 01 30]
|
||||
(Date.new 2020 01 10).up_to (Date.new 2020 01 31) . with_step (Period.new days=5) . to_vector . should_equal [Date.new 2020 01 10, Date.new 2020 01 15, Date.new 2020 01 20, Date.new 2020 01 25, Date.new 2020 01 30]
|
||||
(Date.new 2020 01 10).up_to (Date.new 2020 01 30) . with_step (Period.new days=5) . to_vector . should_equal [Date.new 2020 01 10, Date.new 2020 01 15, Date.new 2020 01 20, Date.new 2020 01 25]
|
||||
@ -52,14 +56,14 @@ spec =
|
||||
|
||||
(Date.new 2020).up_to (Date.new 2023) . with_step (Period.new years=1 months=2 days=3) . to_vector . should_equal [Date.new 2020 01 01, Date.new 2021 03 04, Date.new 2022 05 07]
|
||||
|
||||
Test.specify "should handle end of month edge cases" <|
|
||||
group_builder.specify "should handle end of month edge cases" <|
|
||||
(Date.new 2020 01 31).up_to (Date.new 2020 12 31) include_end=True . with_step Date_Period.Month . to_vector . should_equal [Date.new 2020 01 31, Date.new 2020 02 29, Date.new 2020 03 31, Date.new 2020 04 30, Date.new 2020 05 31, Date.new 2020 06 30, Date.new 2020 07 31, Date.new 2020 08 31, Date.new 2020 09 30, Date.new 2020 10 31, Date.new 2020 11 30, Date.new 2020 12 31]
|
||||
(Date.new 2021 01 28).up_to (Date.new 2021 05 10) . with_step Date_Period.Month . to_vector . should_equal [Date.new 2021 01 28, Date.new 2021 02 28, Date.new 2021 03 28, Date.new 2021 04 28]
|
||||
(Date.new 2023 01 30).up_to (Date.new 2023 06 10) . with_step Date_Period.Month . to_vector . should_equal [Date.new 2023 01 30, Date.new 2023 02 28, Date.new 2023 03 30, Date.new 2023 04 30, Date.new 2023 05 30]
|
||||
(Date.new 2023 01 30).up_to (Date.new 2023 06 10) . with_step (Period.new months=2) . to_vector . should_equal [Date.new 2023 01 30, Date.new 2023 03 30, Date.new 2023 05 30]
|
||||
(Date.new 2020 02 29).up_to (Date.new 2023) . with_step Date_Period.Year . to_vector . should_equal [Date.new 2020 02 29, Date.new 2021 02 28, Date.new 2022 02 28]
|
||||
|
||||
Test.specify "should handle edge cases" <|
|
||||
group_builder.specify "should handle edge cases" <|
|
||||
(Date.new 2020 02 27).up_to (Date.new 2020 03 02) include_end=True . with_step (Period.new days=2) . to_vector . should_equal [Date.new 2020 02 27, Date.new 2020 02 29, Date.new 2020 03 02]
|
||||
|
||||
(Date.new 2020 02 27).up_to (Date.new 2020 02 28) . with_step Date_Period.Month . to_vector . should_equal [Date.new 2020 02 27]
|
||||
@ -96,7 +100,7 @@ spec =
|
||||
(Date.new 2021 01 01).up_to (Date.new 2023 12 31) . with_step (Period.new years=2) . to_vector . should_equal [Date.new 2021 01 01, Date.new 2023 01 01]
|
||||
(Date.new 2021 01 01).up_to (Date.new 2023 12 31) include_end=True . with_step (Period.new years=2) . to_vector . should_equal [Date.new 2021 01 01, Date.new 2023 01 01]
|
||||
|
||||
Test.specify "should not allow a non-positive step" <|
|
||||
group_builder.specify "should not allow a non-positive step" <|
|
||||
(Date.new 2010).up_to (Date.new 2050) . with_step (Period.new years=0 months=0 days=0) . should_fail_with Illegal_Argument
|
||||
(Date.new 2010).up_to (Date.new 2050) . with_step (Period.new years=0 months=-1 days=0) . should_fail_with Illegal_Argument
|
||||
(Date.new 2010).up_to (Date.new 2050) . with_step (Period.new years=0 months=0 days=-1) . should_fail_with Illegal_Argument
|
||||
@ -112,7 +116,7 @@ spec =
|
||||
(Date.new 2021 05 05).up_to (Date.new 2021 06 08) . with_step (Period.new months=1 days=(-30)) . should_fail_with Illegal_Argument
|
||||
(Date.new 2021 02 28).up_to (Date.new 2021 03 31) . with_step ((Period.new years=1 months=(-11) days=(-28))) . should_fail_with Illegal_Argument
|
||||
|
||||
Test.specify "should allow to reverse a range, returning a vector" <|
|
||||
group_builder.specify "should allow to reverse a range, returning a vector" <|
|
||||
(Date.new 2020 01 02).up_to (Date.new 2020 01 02) . reverse . should_equal []
|
||||
(Date.new 2020 01 02).up_to (Date.new 2020 01 02) include_end=True . reverse . should_equal [Date.new 2020 01 02]
|
||||
|
||||
@ -120,7 +124,7 @@ spec =
|
||||
|
||||
(Date.new 2020 02 29).up_to (Date.new 2023) . with_step Date_Period.Year . reverse . should_equal [Date.new 2022 02 28, Date.new 2021 02 28, Date.new 2020 02 29]
|
||||
|
||||
Test.specify "should be consistent with its to_vector representation" <|
|
||||
group_builder.specify "should be consistent with its to_vector representation" <|
|
||||
r1 = (Date.new 2020 02 28).up_to (Date.new 2020 03 02)
|
||||
r2 = (Date.new 2020 02 28).up_to (Date.new 2020 03 02) include_end=True
|
||||
r3 = (Date.new 2021 03 01).down_to (Date.new 2021 02 28)
|
||||
@ -180,7 +184,7 @@ spec =
|
||||
# Catch+to_text to fix Empty_Error equality.
|
||||
r.reduce reducer . catch . to_text . should_equal (r.to_vector.reduce reducer . catch . to_text)
|
||||
|
||||
Test.specify "should define friendly text representations" <|
|
||||
group_builder.specify "should define friendly text representations" <|
|
||||
r1 = (Date.new 2020 02 28).up_to (Date.new 2020 03 02)
|
||||
r2 = (Date.new 2020 03 20).down_to (Date.new 2020 03 01) include_end=True . with_step Date_Period.Week
|
||||
|
||||
@ -193,6 +197,6 @@ spec =
|
||||
r1.to_display_text . should_equal '[2020-02-28 .. 2020-03-02]'
|
||||
r2.to_display_text . should_equal '[2020-03-20 .. 2020-02-29 by -7D]'
|
||||
|
||||
Test.specify "should be serializable to JSON" <|
|
||||
group_builder.specify "should be serializable to JSON" <|
|
||||
r = (Date.new 2020 01 01).up_to (Date.new 2020 01 03)
|
||||
r.to_json . should_equal '{"type":"Date_Range","start":{"type":"Date","constructor":"new","day":1,"month":1,"year":2020},"end":{"type":"Date","constructor":"new","day":3,"month":1,"year":2020},"step":{"type":"Period","constructor":"new","days":1},"increasing":true}'
|
||||
|
@ -3,78 +3,80 @@ import Standard.Base.Errors.Common.Incomparable_Values
|
||||
import Standard.Base.Errors.Common.Type_Error
|
||||
import Standard.Base.Errors.Time_Error.Time_Error
|
||||
|
||||
from Standard.Test import Problems, Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
|
||||
import project.Data.Time.Date_Part_Spec
|
||||
|
||||
polyglot java import java.time.LocalDate
|
||||
polyglot java import java.time.format.DateTimeFormatter
|
||||
|
||||
spec =
|
||||
spec_with "Date" Date.new Date.parse
|
||||
spec_with "JavaScriptDate" js_date js_parse
|
||||
add_specs suite_builder =
|
||||
spec_with suite_builder "Date" Date.new Date.parse
|
||||
spec_with suite_builder "JavaScriptDate" js_date js_parse
|
||||
if Polyglot.is_language_installed "python" then
|
||||
spec_with "PythonDate" python_date python_parse
|
||||
spec_with "JavaDate" java_date java_parse
|
||||
spec_with "JavaScriptArrayWithADate" js_array_date js_parse
|
||||
spec_with suite_builder "PythonDate" python_date python_parse pending="https://github.com/enso-org/enso/issues/8913"
|
||||
spec_with suite_builder "JavaDate" java_date java_parse
|
||||
spec_with suite_builder "JavaScriptArrayWithADate" js_array_date js_parse
|
||||
|
||||
spec_with name create_new_date parse_date =
|
||||
Test.group name <|
|
||||
spec_with suite_builder name create_new_date parse_date pending=Nothing =
|
||||
if pending.is_nothing then Date_Part_Spec.add_specs suite_builder name create_new_date
|
||||
|
||||
Test.specify "should create local date" <|
|
||||
suite_builder.group name pending=pending group_builder->
|
||||
|
||||
group_builder.specify "should create local date" <|
|
||||
date = create_new_date 2020 1 1
|
||||
date . year . should_equal 2020
|
||||
date . month . should_equal 1
|
||||
date . day . should_equal 1
|
||||
|
||||
Test.specify "should handle errors when creating local date" <|
|
||||
group_builder.specify "should handle errors when creating local date" <|
|
||||
case create_new_date 2020 30 30 . catch of
|
||||
Time_Error.Error msg _ ->
|
||||
msg . should_equal "Invalid value for MonthOfYear (valid values 1 - 12): 30"
|
||||
result ->
|
||||
Test.fail ("Unexpected result: " + result.to_text)
|
||||
|
||||
Test.specify "should format local date using provided pattern" <|
|
||||
group_builder.specify "should format local date using provided pattern" <|
|
||||
text = create_new_date 2020 12 21 . format "yyyyMMdd"
|
||||
text . should_equal "20201221"
|
||||
|
||||
Test.specify "should support to_display_text" <|
|
||||
group_builder.specify "should support to_display_text" <|
|
||||
text = create_new_date 2020 12 21 . to_display_text
|
||||
text . should_equal "2020-12-21"
|
||||
|
||||
Test.specify "should format local date using provided pattern and locale" <|
|
||||
group_builder.specify "should format local date using provided pattern and locale" <|
|
||||
d = create_new_date 2020 6 21
|
||||
d.format "d. MMM yyyy" . should_equal "21. Jun 2020"
|
||||
d.format "d. MMMM yyyy" . should_equal "21. June 2020"
|
||||
d.format (Date_Time_Formatter.from "d. MMMM yyyy" locale=(Locale.uk)) . should_equal "21. June 2020"
|
||||
d.format (Date_Time_Formatter.from "d. MMMM yyyy" locale=(Locale.france)) . should_equal "21. juin 2020"
|
||||
|
||||
Test.specify "should format local date using default pattern" <|
|
||||
group_builder.specify "should format local date using default pattern" <|
|
||||
text = create_new_date 2020 12 21 . to_text
|
||||
text . should_equal "2020-12-21"
|
||||
|
||||
Test.specify "should parse default time format" <|
|
||||
group_builder.specify "should parse default time format" <|
|
||||
text = create_new_date 2020 12 21 . to_text
|
||||
date = parse_date text
|
||||
date . year . should_equal 2020
|
||||
date . month . should_equal 12
|
||||
date . day . should_equal 21
|
||||
|
||||
Test.specify "should throw error when parsing invalid date" <|
|
||||
group_builder.specify "should throw error when parsing invalid date" <|
|
||||
case parse_date "birthday" . catch of
|
||||
Time_Error.Error msg _ ->
|
||||
msg . should_contain "Text 'birthday' could not be parsed"
|
||||
result ->
|
||||
Test.fail ("Unexpected result: " + result.to_text)
|
||||
|
||||
Test.specify "should parse local date" <|
|
||||
group_builder.specify "should parse local date" <|
|
||||
date = parse_date "1999-01-01"
|
||||
date . year . should_equal 1999
|
||||
date . month . should_equal 1
|
||||
date . day . should_equal 1
|
||||
|
||||
Test.specify "should convert to time" <|
|
||||
group_builder.specify "should convert to time" <|
|
||||
time = create_new_date 2000 12 21 . to_date_time (Time_Of_Day.new 12 30 45) Time_Zone.utc
|
||||
time . year . should_equal 2000
|
||||
time . month . should_equal 12
|
||||
@ -85,45 +87,45 @@ spec_with name create_new_date parse_date =
|
||||
time . nanosecond . should_equal 0
|
||||
time . zone . zone_id . should_equal Time_Zone.utc.zone_id
|
||||
|
||||
Test.specify "date-time conversion should work with interop values" <|
|
||||
group_builder.specify "date-time conversion should work with interop values" <|
|
||||
date = create_new_date 2000 12 21
|
||||
time = Time_Of_Day.new 12 30 45
|
||||
datetime = time.to_date_time date
|
||||
datetime.date . should_equal date
|
||||
datetime.time_of_day . should_equal time
|
||||
|
||||
Test.specify "should convert to Json" <|
|
||||
group_builder.specify "should convert to Json" <|
|
||||
date = create_new_date 2001 12 21
|
||||
date.to_json.should_equal <|
|
||||
JS_Object.from_pairs [["type", "Date"], ["constructor", "new"], ["day", date.day], ["month", date.month], ["year", date.year]] . to_text
|
||||
|
||||
Test.specify "should add date-based interval" <|
|
||||
group_builder.specify "should add date-based interval" <|
|
||||
date = create_new_date 1970 + (Period.new days=1)
|
||||
date . year . should_equal 1970
|
||||
date . month . should_equal 1
|
||||
date . day . should_equal 2
|
||||
|
||||
Test.specify "should subtract date-based interval" <|
|
||||
group_builder.specify "should subtract date-based interval" <|
|
||||
date = create_new_date 1970 - (Period.new years=1)
|
||||
date . year . should_equal 1969
|
||||
date . month . should_equal 1
|
||||
date . day . should_equal 1
|
||||
|
||||
Test.specify "should support mixed interval operators" <|
|
||||
group_builder.specify "should support mixed interval operators" <|
|
||||
date = create_new_date 1970 + (Period.new months=1) - (Period.new years=1)
|
||||
date . year . should_equal 1969
|
||||
date . month . should_equal 2
|
||||
date . day . should_equal 1
|
||||
|
||||
Test.specify "should throw error when adding time-based Duration" <|
|
||||
group_builder.specify "should throw error when adding time-based Duration" <|
|
||||
Test.expect_panic_with matcher=Type_Error <|
|
||||
create_new_date 1970 + (Duration.new hours=1)
|
||||
|
||||
Test.specify "should throw error when subtracting time-based Duration" <|
|
||||
group_builder.specify "should throw error when subtracting time-based Duration" <|
|
||||
Test.expect_panic_with matcher=Type_Error <|
|
||||
create_new_date 1970 - (Duration.new minutes=1)
|
||||
|
||||
Test.specify "should support addition of Date_Period" <|
|
||||
group_builder.specify "should support addition of Date_Period" <|
|
||||
time = create_new_date 1970
|
||||
time+Date_Period.Year . should_equal <| create_new_date 1971
|
||||
time+Date_Period.Quarter . should_equal <| create_new_date 1970 4
|
||||
@ -131,7 +133,7 @@ spec_with name create_new_date parse_date =
|
||||
time+Date_Period.Week . should_equal <| create_new_date 1970 1 8
|
||||
time+Date_Period.Day . should_equal <| create_new_date 1970 1 2
|
||||
|
||||
Test.specify "should support subtraction of Date_Period" <|
|
||||
group_builder.specify "should support subtraction of Date_Period" <|
|
||||
time = create_new_date 1970
|
||||
time-Date_Period.Year . should_equal <| create_new_date 1969
|
||||
time-Date_Period.Quarter . should_equal <| create_new_date 1969 10
|
||||
@ -139,12 +141,12 @@ spec_with name create_new_date parse_date =
|
||||
time-Date_Period.Week . should_equal <| create_new_date 1969 12 25
|
||||
time-Date_Period.Day . should_equal <| create_new_date 1969 12 31
|
||||
|
||||
Test.specify "should support mixed addition and subtraction of Date_Period" <|
|
||||
group_builder.specify "should support mixed addition and subtraction of Date_Period" <|
|
||||
time = create_new_date 1970
|
||||
time+Date_Period.Quarter-Date_Period.Month . should_equal <| create_new_date 1970 3 1
|
||||
time-Date_Period.Month+Date_Period.Year . should_equal <| create_new_date 1970 12 1
|
||||
|
||||
Test.specify "should be comparable" <|
|
||||
group_builder.specify "should be comparable" <|
|
||||
date_1 = parse_date "2021-01-02"
|
||||
date_2 = parse_date "2021-01-01"
|
||||
(date_1 == date_2) . should_be_false
|
||||
@ -159,12 +161,12 @@ spec_with name create_new_date parse_date =
|
||||
date_1>=datetime . should_fail_with Incomparable_Values
|
||||
date_1==datetime . should_be_false
|
||||
|
||||
Test.specify "should create date before epoch start" <|
|
||||
group_builder.specify "should create date before epoch start" <|
|
||||
(create_new_date 100 1 2).year . should_equal 100
|
||||
(create_new_date 100 1 2).month . should_equal 1
|
||||
(create_new_date 100 1 2).day . should_equal 2
|
||||
|
||||
Test.specify "Gregorian calendar related functionality should produce warning before epoch start" <|
|
||||
group_builder.specify "Gregorian calendar related functionality should produce warning before epoch start" <|
|
||||
is_time_error v = case v of
|
||||
_ : Time_Error -> True
|
||||
_ -> False
|
||||
@ -178,7 +180,7 @@ spec_with name create_new_date parse_date =
|
||||
expect_warning date.length_of_year
|
||||
expect_warning date.week_of_year
|
||||
|
||||
Test.specify "Gregorian calendar related functionality should work after epoch start" <|
|
||||
group_builder.specify "Gregorian calendar related functionality should work after epoch start" <|
|
||||
expect_no_warning value =
|
||||
((Warning.get_all value).length == 0) . should_be_true
|
||||
dates_after_epoch = [(create_new_date 1583), (create_new_date 1582 10 16), (create_new_date 2020)]
|
||||
@ -190,14 +192,14 @@ spec_with name create_new_date parse_date =
|
||||
expect_no_warning date.length_of_year
|
||||
expect_no_warning date.week_of_year
|
||||
|
||||
Test.specify "should correctly determine the type of date" <|
|
||||
group_builder.specify "should correctly determine the type of date" <|
|
||||
new_date = create_new_date 2020 6 1
|
||||
parsed_date = parse_date "2021-01-02"
|
||||
|
||||
Meta.type_of new_date . should_equal_type Date
|
||||
Meta.type_of parsed_date . should_equal_type Date
|
||||
|
||||
Test.specify "should allow to find start and end of a Date_Period containing the current date" <|
|
||||
group_builder.specify "should allow to find start and end of a Date_Period containing the current date" <|
|
||||
d1 = create_new_date 2022 9 12
|
||||
d1.start_of Date_Period.Year . should_equal (Date.new 2022 1 1)
|
||||
d1.end_of Date_Period.Year . should_equal (Date.new 2022 12 31)
|
||||
@ -250,17 +252,17 @@ spec_with name create_new_date parse_date =
|
||||
(create_new_date 2000 7 1).end_of Date_Period.Quarter . should_equal (Date.new 2000 9 30)
|
||||
(create_new_date 2000 6 30).end_of Date_Period.Quarter . should_equal (Date.new 2000 6 30)
|
||||
|
||||
Test.specify "should allow to compute the next Date_Period after a date" <|
|
||||
group_builder.specify "should allow to compute the next Date_Period after a date" <|
|
||||
create_new_date 2000 2 1 . next Date_Period.Day . should_equal <| create_new_date 2000 2 2
|
||||
create_new_date 2000 2 1 . next Date_Period.Month . should_equal <| create_new_date 2000 3 1
|
||||
create_new_date 2000 2 1 . next Date_Period.Year . should_equal <| create_new_date 2001 2 1
|
||||
|
||||
Test.specify "should allow to compute the previous Date_Period after a date" <|
|
||||
group_builder.specify "should allow to compute the previous Date_Period after a date" <|
|
||||
create_new_date 2000 2 1 . previous Date_Period.Day . should_equal <| create_new_date 2000 1 31
|
||||
create_new_date 2000 2 1 . previous Date_Period.Month . should_equal <| create_new_date 2000 1 1
|
||||
create_new_date 2000 2 1 . previous Date_Period.Year . should_equal <| create_new_date 1999 2 1
|
||||
|
||||
Test.specify "should allow to compute the Period between two dates." <|
|
||||
group_builder.specify "should allow to compute the Period between two dates." <|
|
||||
create_new_date 2000 2 1 . until (create_new_date 2000 2 12) . should_equal <| Period.new 0 0 11
|
||||
create_new_date 2000 2 1 . until (create_new_date 2000 12 12) . should_equal <| Period.new 0 10 11
|
||||
create_new_date 2000 2 1 . until (create_new_date 2010 2 12) . should_equal <| Period.new 10 0 11
|
||||
@ -269,7 +271,7 @@ spec_with name create_new_date parse_date =
|
||||
create_new_date 2000 12 12 . until (create_new_date 2000 2 1) . should_equal <| Period.new 0 -10 -11
|
||||
create_new_date 2010 2 12 . until (create_new_date 2000 2 1) . should_equal <| Period.new -10 0 -11
|
||||
|
||||
Test.specify "should allow to compute the number of days until a date" <|
|
||||
group_builder.specify "should allow to compute the number of days until a date" <|
|
||||
create_new_date 2000 2 1 . days_until (create_new_date 2000 2 1) . should_equal 0
|
||||
create_new_date 2000 2 1 . days_until (create_new_date 2000 2 2) . should_equal 1
|
||||
create_new_date 2000 2 2 . days_until (create_new_date 2000 2 1) . should_equal -1
|
||||
@ -277,7 +279,7 @@ spec_with name create_new_date parse_date =
|
||||
create_new_date 2000 3 1 . days_until (create_new_date 2001 3 1) . should_equal 365
|
||||
create_new_date 2001 3 1 . days_until (create_new_date 2000 3 1) . should_equal -365
|
||||
|
||||
Test.specify "should allow to compute the number of days until a date including the end date" <|
|
||||
group_builder.specify "should allow to compute the number of days until a date including the end date" <|
|
||||
create_new_date 2000 2 1 . days_until (create_new_date 2000 2 1) include_end_date=True . should_equal 1
|
||||
create_new_date 2000 2 1 . days_until (create_new_date 2000 2 2) include_end_date=True . should_equal 2
|
||||
create_new_date 2000 2 2 . days_until (create_new_date 2000 2 1) include_end_date=True . should_equal -2
|
||||
@ -285,7 +287,7 @@ spec_with name create_new_date parse_date =
|
||||
create_new_date 2000 3 1 . days_until (create_new_date 2001 3 1) include_end_date=True . should_equal 366
|
||||
create_new_date 2001 3 1 . days_until (create_new_date 2000 3 1) include_end_date=True . should_equal -366
|
||||
|
||||
Test.specify "should allow to compute the number of working days until a later date" <|
|
||||
group_builder.specify "should allow to compute the number of working days until a later date" <|
|
||||
# 2000-2-1 is a Tuesday
|
||||
create_new_date 2000 2 1 . work_days_until (create_new_date 2000 2 1) . should_equal 0
|
||||
create_new_date 2000 2 1 . work_days_until (create_new_date 2000 2 2) . should_equal 1
|
||||
@ -336,7 +338,7 @@ spec_with name create_new_date parse_date =
|
||||
# We duplicate the holiday entries to check that the functions are resilient to such input data.
|
||||
duplicated_holiday_november year =
|
||||
holiday_november year + holiday_november year + holiday_november year
|
||||
Test.specify "should allow to compute the number of working days until a date, skipping custom set holidays" <|
|
||||
group_builder.specify "should allow to compute the number of working days until a date, skipping custom set holidays" <|
|
||||
holiday_february = Vector.new 29 (i -> create_new_date 2000 2 i+1)
|
||||
create_new_date 2000 2 1 . work_days_until (create_new_date 2000 3 1) holiday_february . should_equal 0
|
||||
create_new_date 2000 2 10 . work_days_until (create_new_date 2000 2 12) holiday_february . should_equal 0
|
||||
@ -351,7 +353,7 @@ spec_with name create_new_date parse_date =
|
||||
create_new_date 2000 11 1 . work_days_until (create_new_date 2000 12 1) (duplicated_holiday_november 2020) . should_equal 22
|
||||
create_new_date 1999 11 1 . work_days_until (create_new_date 1999 12 1) (duplicated_holiday_november 1999) . should_equal 19
|
||||
|
||||
Test.specify "should allow to compute the number of working days including the end, in a manner consistent with NETWORKDAYS" <|
|
||||
group_builder.specify "should allow to compute the number of working days including the end, in a manner consistent with NETWORKDAYS" <|
|
||||
create_new_date 2000 2 1 . work_days_until (create_new_date 2000 2 1) include_end_date=True . should_equal 1
|
||||
create_new_date 2000 2 1 . work_days_until (create_new_date 2000 2 2) include_end_date=True . should_equal 2
|
||||
create_new_date 2000 2 3 . work_days_until (create_new_date 2000 2 16) include_end_date=True . should_equal 10
|
||||
@ -368,7 +370,7 @@ spec_with name create_new_date parse_date =
|
||||
create_new_date 2000 2 6 . work_days_until (create_new_date 2000 2 8) include_end_date=True . should_equal 2
|
||||
create_new_date 2000 2 6 . work_days_until (create_new_date 2000 2 5) include_end_date=True . should_equal 0
|
||||
|
||||
Test.specify "should allow to shift the date by N working days" <|
|
||||
group_builder.specify "should allow to shift the date by N working days" <|
|
||||
# 2000-2-1 is a Tuesday
|
||||
create_new_date 2000 2 1 . add_work_days 0 . should_equal (Date.new 2000 2 1)
|
||||
create_new_date 2000 2 1 . add_work_days . should_equal (Date.new 2000 2 2)
|
||||
@ -400,7 +402,7 @@ spec_with name create_new_date parse_date =
|
||||
create_new_date 2022 3 27 . add_work_days 0 . should_equal (Date.new 2022 3 28)
|
||||
create_new_date 2022 3 27 . add_work_days 1 . should_equal (Date.new 2022 3 29)
|
||||
|
||||
Test.specify "should allow to shift the date by negative amount of working days" <|
|
||||
group_builder.specify "should allow to shift the date by negative amount of working days" <|
|
||||
# 2000-2-1 is a Tuesday
|
||||
create_new_date 2000 2 1 . add_work_days -1 . should_equal (Date.new 2000 1 31)
|
||||
create_new_date 2000 2 1 . add_work_days -2 . should_equal (Date.new 2000 1 28)
|
||||
@ -429,7 +431,7 @@ spec_with name create_new_date parse_date =
|
||||
create_new_date 2000 2 6 . add_work_days -5 . should_equal (Date.new 2000 1 31)
|
||||
create_new_date 2000 2 6 . add_work_days -6 . should_equal (Date.new 2000 1 28)
|
||||
|
||||
Test.specify "should allow to shift the date by N working days, skipping custom holidays" <|
|
||||
group_builder.specify "should allow to shift the date by N working days, skipping custom holidays" <|
|
||||
all_year_holiday year =
|
||||
first_day = create_new_date year 1 1
|
||||
Vector.new first_day.length_of_year (n -> first_day + (Period.new days=n))
|
||||
@ -450,7 +452,7 @@ spec_with name create_new_date parse_date =
|
||||
create_new_date 1999 10 30 . add_work_days 0 (duplicated_holiday_november 1999) . should_equal (Date.new 1999 11 3)
|
||||
create_new_date 1999 10 30 . add_work_days 1 (duplicated_holiday_november 1999) . should_equal (Date.new 1999 11 4)
|
||||
|
||||
Test.specify "add_work_days and work_days_until should be consistent with each other" <|
|
||||
group_builder.specify "add_work_days and work_days_until should be consistent with each other" <|
|
||||
first_day = create_new_date 2020 1 1
|
||||
dates = Vector.new 100 (n -> first_day + (Period.new days=n))
|
||||
holidays = [1, 2, 10, 11, 12, 13, 14, 15, 30, 40, 41, 42, 50, 60].map (n -> first_day + (Period.new days=n))
|
||||
@ -466,7 +468,7 @@ spec_with name create_new_date parse_date =
|
||||
(date.add_work_days -n).work_days_until date . should_equal n
|
||||
(date.add_work_days -n holidays).work_days_until date holidays . should_equal n
|
||||
|
||||
Test.specify "should allow extracting a date_part" <|
|
||||
group_builder.specify "should allow extracting a date_part" <|
|
||||
d1 = create_new_date 2023 12 30
|
||||
d1.date_part Date_Period.Year . should_equal 2023
|
||||
d1.date_part Date_Period.Quarter . should_equal 4
|
||||
@ -476,7 +478,7 @@ spec_with name create_new_date parse_date =
|
||||
|
||||
Test.expect_panic_with (d1.date_part Time_Period.Day) Type_Error
|
||||
|
||||
Test.specify "should allow computing date_diff" <|
|
||||
group_builder.specify "should allow computing date_diff" <|
|
||||
d1 = create_new_date 2021 11 3
|
||||
d2 = create_new_date 2021 12 5
|
||||
|
||||
@ -506,7 +508,7 @@ spec_with name create_new_date parse_date =
|
||||
Test.expect_panic_with (d1.date_diff d2 Time_Period.Day) Type_Error
|
||||
Test.expect_panic_with (d1.date_diff d2 Time_Period.Hour) Type_Error
|
||||
|
||||
Test.specify "should allow shifting with date_add" <|
|
||||
group_builder.specify "should allow shifting with date_add" <|
|
||||
d1 = create_new_date 2021 01 31
|
||||
d1.date_add 5 Date_Period.Day . should_equal (Date.new 2021 02 05)
|
||||
d1.date_add -1 Date_Period.Day . should_equal (Date.new 2021 01 30)
|
||||
@ -523,10 +525,13 @@ spec_with name create_new_date parse_date =
|
||||
Test.expect_panic_with (d1.date_add 1.5 Date_Period.Day) Type_Error
|
||||
Test.expect_panic_with (d1.date_add 1.0 Date_Period.Day) Type_Error
|
||||
|
||||
Date_Part_Spec.spec name create_new_date
|
||||
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
||||
parseNormally x y = (Date.parse x y) . to_text
|
||||
|
||||
|
@ -3,14 +3,14 @@ import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
|
||||
import Standard.Base.Errors.Time_Error.Time_Error
|
||||
from Standard.Base.Data.Time.Errors import Date_Time_Format_Parse_Error, Suspicious_Date_Time_Format
|
||||
|
||||
from Standard.Test import Test, Test_Suite, Problems
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
|
||||
polyglot java import java.time.format.DateTimeFormatter
|
||||
|
||||
spec =
|
||||
Test.group "Parsing formats" <|
|
||||
Test.specify "should throw informative error for replacements of Java patterns in Simple format" <|
|
||||
add_specs suite_builder =
|
||||
suite_builder.group "Parsing formats" group_builder->
|
||||
group_builder.specify "should throw informative error for replacements of Java patterns in Simple format" <|
|
||||
r1 = Date_Time_Formatter.from "d LLL yyyy"
|
||||
r1.should_fail_with Date_Time_Format_Parse_Error
|
||||
r1.catch.to_display_text . should_contain "use `MMM`"
|
||||
@ -27,7 +27,7 @@ spec =
|
||||
r4.should_fail_with Date_Time_Format_Parse_Error
|
||||
r4.catch.to_display_text . should_contain "at most 4"
|
||||
|
||||
Test.specify "should report format parse failures" <|
|
||||
group_builder.specify "should report format parse failures" <|
|
||||
Date_Time_Formatter.from "yyyy[" . should_fail_with Date_Time_Format_Parse_Error
|
||||
Date_Time_Formatter.from "yyyy{12}" . should_fail_with Date_Time_Format_Parse_Error
|
||||
Date_Time_Formatter.from "yy{baz}" . should_fail_with Date_Time_Format_Parse_Error
|
||||
@ -37,10 +37,10 @@ spec =
|
||||
Date_Time_Formatter.from "[]]" . should_fail_with Date_Time_Format_Parse_Error
|
||||
Date_Time_Formatter.from "'" . should_fail_with Date_Time_Format_Parse_Error
|
||||
|
||||
Test.specify "should gracefully handle Java pattern errors" <|
|
||||
group_builder.specify "should gracefully handle Java pattern errors" <|
|
||||
Date_Time_Formatter.from_java "}}{{,.,..} INVALID PATTERN FORMAT" . should_fail_with Illegal_Argument
|
||||
|
||||
Test.specify "should warn about likely M/m mistakes" <|
|
||||
group_builder.specify "should warn about likely M/m mistakes" <|
|
||||
f1 = Date_Time_Formatter.from "yyyy-mm-dd"
|
||||
w1 = Problems.expect_only_warning Suspicious_Date_Time_Format f1
|
||||
w1.to_display_text . should_contain "Did you mean 'M'"
|
||||
@ -69,23 +69,23 @@ spec =
|
||||
w4 = Problems.expect_only_warning Suspicious_Date_Time_Format f4
|
||||
w4.to_display_text . should_contain "Did you mean 'h'"
|
||||
|
||||
Test.group "Formatting date/time values" <|
|
||||
Test.specify "should allow printing month names" <|
|
||||
suite_builder.group "Formatting date/time values" group_builder->
|
||||
group_builder.specify "should allow printing month names" <|
|
||||
d = Date.new 2020 6 30
|
||||
d.format "d. MMM yyyy" . should_equal "30. Jun 2020"
|
||||
d.format "d. MMMM yyyy" . should_equal "30. June 2020"
|
||||
d.format (Date_Time_Formatter.from "d. MMMM yyyy" Locale.us) . should_equal "30. June 2020"
|
||||
d.format (Date_Time_Formatter.from "d. MMMM yyyy" Locale.default) . should_equal "30. June 2020"
|
||||
|
||||
Test.specify "should allow using a Java formatter" <|
|
||||
group_builder.specify "should allow using a Java formatter" <|
|
||||
jformatter = Date_Time_Formatter.from_java DateTimeFormatter.ISO_ORDINAL_DATE
|
||||
Date.new 2020 2 1 . format jformatter . should_equal "2020-032"
|
||||
|
||||
Test.specify "should allow parsing Java patterns" <|
|
||||
group_builder.specify "should allow parsing Java patterns" <|
|
||||
Date.new 2020 2 1 . format (Date_Time_Formatter.from_java "E, d LLL yyyy") . should_equal "Sat, 1 Feb 2020"
|
||||
Date.new 2020 3 1 . format (Date_Time_Formatter.from_java "E, d LLLL yyyy") . should_equal "Sun, 1 March 2020"
|
||||
|
||||
Test.specify "should handle various formats" <|
|
||||
group_builder.specify "should handle various formats" <|
|
||||
Date.new 2023 09 21 . format "E, dd.MM.yy" . should_equal "Thu, 21.09.23"
|
||||
Date.new 2023 09 21 . format (Date_Time_Formatter.from "DDDD" Locale.poland) . should_equal "czwartek"
|
||||
Date.new 2023 09 21 . format (Date_Time_Formatter.from_iso_week_date_pattern "eee, 'W'WW ''yy" Locale.uk) . should_equal "Thursday, W38 '23"
|
||||
@ -94,7 +94,7 @@ spec =
|
||||
tz = Time_Zone.parse "US/Hawaii"
|
||||
Date_Time.new 2023 09 21 12 zone=tz . format "yyyy/MM/dd HH:mm:ss VV" . should_equal "2023/09/21 12:00:00 US/Hawaii"
|
||||
|
||||
Test.specify "should allow to customize the 'zero' of a zone offset" <|
|
||||
group_builder.specify "should allow to customize the 'zero' of a zone offset" <|
|
||||
dt = Date_Time.new 2020 01 02 12 zone=(Time_Zone.utc)
|
||||
dt.format "yyyy/MM/dd HH:mm:ss ZZ" . should_equal "2020/01/02 12:00:00 +0000"
|
||||
dt.format "yyyy/MM/dd HH:mm:ss ZZ{Z}" . should_equal "2020/01/02 12:00:00 Z"
|
||||
@ -105,25 +105,25 @@ spec =
|
||||
dt2.format "yyyy/MM/dd HH:mm:ss ZZ{Z}" . should_equal "2020/01/02 12:00:00 -1000"
|
||||
dt2.format "yyyy/MM/dd HH:mm:ss ZZZZZ{<no offset>}" . should_equal "2020/01/02 12:00:00 -10:00"
|
||||
|
||||
Test.specify "should work with optional parts" <|
|
||||
group_builder.specify "should work with optional parts" <|
|
||||
f = Date_Time_Formatter.from "[('Date:' yyyy-MM-dd)][('Time:' HH:mm)]"
|
||||
Date_Time.new 2020 01 02 12 30 . format f . should_equal "(Date: 2020-01-02)(Time: 12:30)"
|
||||
Date.new 2020 01 02 . format f . should_equal "(Date: 2020-01-02)"
|
||||
Time_Of_Day.new 12 30 . format f . should_equal "(Time: 12:30)"
|
||||
|
||||
Test.group "Parsing date/time values" <|
|
||||
Test.specify "should allow short month names" <|
|
||||
suite_builder.group "Parsing date/time values" group_builder->
|
||||
group_builder.specify "should allow short month names" <|
|
||||
Date.parse "30. Jun 2020" "d. MMM yyyy" . should_equal (Date.new 2020 6 30)
|
||||
|
||||
Test.specify "should allow long month names" <|
|
||||
group_builder.specify "should allow long month names" <|
|
||||
Date.parse "30. June 2020" (Date_Time_Formatter.from "d. MMMM yyyy") . should_equal (Date.new 2020 6 30)
|
||||
Date.parse "30. June 2020" (Date_Time_Formatter.from "d. MMMM yyyy" Locale.uk) . should_equal (Date.new 2020 6 30)
|
||||
|
||||
Test.specify "should allow long month names from Java" <|
|
||||
group_builder.specify "should allow long month names from Java" <|
|
||||
Date.parse "30. June 2020" (Date_Time_Formatter.from_java "d. LLLL yyyy") . should_equal (Date.new 2020 6 30)
|
||||
Date.parse "30. June 2020" (Date_Time_Formatter.from_java "d. LLLL yyyy" Locale.uk) . should_equal (Date.new 2020 6 30)
|
||||
|
||||
Test.specify "should parse default time format" <|
|
||||
group_builder.specify "should parse default time format" <|
|
||||
text = Date_Time.new 1970 (zone = Time_Zone.utc) . to_text
|
||||
time = Date_Time.parse text
|
||||
time . year . should_equal 1970
|
||||
@ -135,7 +135,7 @@ spec =
|
||||
time . nanosecond . should_equal 0
|
||||
time . zone . zone_id . should_equal Time_Zone.utc.zone_id
|
||||
|
||||
Test.specify "should parse local time adding system zone" <|
|
||||
group_builder.specify "should parse local time adding system zone" <|
|
||||
time = Date_Time.parse "1970-01-01T00:00:01"
|
||||
time . year . should_equal 1970
|
||||
time . month . should_equal 1
|
||||
@ -146,17 +146,17 @@ spec =
|
||||
time . nanosecond . should_equal 0
|
||||
(time.zone.offset time) . should_equal (Time_Zone.system.offset time)
|
||||
|
||||
Test.specify "should parse time Z" <|
|
||||
group_builder.specify "should parse time Z" <|
|
||||
time = Date_Time.parse "1582-10-15T00:00:01Z"
|
||||
time . to_enso_epoch_seconds . should_equal 1
|
||||
time.zone.zone_id . should_equal "Z"
|
||||
|
||||
Test.specify "should parse time UTC" <|
|
||||
group_builder.specify "should parse time UTC" <|
|
||||
time = Date_Time.parse "1582-10-15T00:00:01Z[UTC]"
|
||||
time . to_enso_epoch_seconds . should_equal 1
|
||||
time . zone . zone_id . should_equal "UTC"
|
||||
|
||||
Test.specify "should parse time with nanoseconds" <|
|
||||
group_builder.specify "should parse time with nanoseconds" <|
|
||||
time = Date_Time.parse "1970-01-01T00:00:01.123456789Z"
|
||||
time . year . should_equal 1970
|
||||
time . month . should_equal 1
|
||||
@ -170,7 +170,7 @@ spec =
|
||||
time . nanosecond . should_equal 789
|
||||
time.zone.zone_id . should_equal "Z"
|
||||
|
||||
Test.specify "should parse time with offset-based zone" <|
|
||||
group_builder.specify "should parse time with offset-based zone" <|
|
||||
time = Date_Time.parse "1970-01-01T00:00:01+01:00"
|
||||
time . year . should_equal 1970
|
||||
time . month . should_equal 1
|
||||
@ -183,7 +183,7 @@ spec =
|
||||
time . nanosecond . should_equal 0
|
||||
time.zone.zone_id . take (Last 6) . should_equal "+01:00"
|
||||
|
||||
Test.specify "should parse time with id-based zone" <|
|
||||
group_builder.specify "should parse time with id-based zone" <|
|
||||
time = Date_Time.parse "1970-01-01T00:00:01+01:00[Europe/Paris]"
|
||||
time . year . should_equal 1970
|
||||
time . month . should_equal 1
|
||||
@ -199,14 +199,14 @@ spec =
|
||||
zone.zone_id . should_equal "Europe/Paris"
|
||||
time.to_display_text . should_equal "1970-01-01 00:00:01[Europe/Paris]"
|
||||
|
||||
Test.specify "should throw error when parsing invalid time" <|
|
||||
group_builder.specify "should throw error when parsing invalid time" <|
|
||||
case Date_Time.parse "2008-1-1" . catch of
|
||||
Time_Error.Error msg _ ->
|
||||
msg . should_contain "Text '2008-1-1' could not be parsed"
|
||||
result ->
|
||||
Test.fail ("Unexpected result: " + result.to_text)
|
||||
|
||||
Test.specify "should parse custom format of zoned time" <|
|
||||
group_builder.specify "should parse custom format of zoned time" <|
|
||||
time = Date_Time.parse "2020-05-06 04:30:20 UTC" "yyyy-MM-dd HH:mm:ss VV"
|
||||
time . year . should_equal 2020
|
||||
time . month . should_equal 5
|
||||
@ -219,7 +219,7 @@ spec =
|
||||
time . nanosecond . should_equal 0
|
||||
(time.zone.zone_id . take (Last 3) . to_case Case.Upper) . should_equal "UTC"
|
||||
|
||||
Test.specify "should parse custom format of local time" <|
|
||||
group_builder.specify "should parse custom format of local time" <|
|
||||
time = Date_Time.parse "06 of May 2020 at 04:30AM" "dd 'of' MMMM yyyy 'at' hh:mma"
|
||||
time . year . should_equal 2020
|
||||
time . month . should_equal 5
|
||||
@ -231,7 +231,7 @@ spec =
|
||||
time . microsecond . should_equal 0
|
||||
time . nanosecond . should_equal 0
|
||||
|
||||
Test.specify "should throw error when parsing custom format" <|
|
||||
group_builder.specify "should throw error when parsing custom format" <|
|
||||
time = Date_Time.parse "2008-01-01" "yyyy-MM-dd'T'HH:mm:ss'['tt']'"
|
||||
case time.catch of
|
||||
Time_Error.Error msg _ ->
|
||||
@ -239,29 +239,29 @@ spec =
|
||||
result ->
|
||||
Test.fail ("Unexpected result: " + result.to_text)
|
||||
|
||||
Test.specify "should be able to parse YYYY as well as yyyy" <|
|
||||
group_builder.specify "should be able to parse YYYY as well as yyyy" <|
|
||||
Date.parse "2020-01-02" "YYYY-MM-dd" . should_equal (Date.new 2020 1 2)
|
||||
Date.parse "2020-01-02" "yyyy-MM-dd" . should_equal (Date.new 2020 1 2)
|
||||
|
||||
Test.specify "should be able to parse year-month without day" <|
|
||||
group_builder.specify "should be able to parse year-month without day" <|
|
||||
Date.parse "2022-05" "yyyy-MM" . should_equal (Date.new 2022 5 1)
|
||||
|
||||
Test.specify "should be able to parse a quarter without day" <|
|
||||
group_builder.specify "should be able to parse a quarter without day" <|
|
||||
Date.parse "Q2 of 2022" "'Q'Q 'of' yyyy" . should_equal (Date.new 2022 4 1)
|
||||
|
||||
Test.specify "should be able to parse 2-digit year" <|
|
||||
group_builder.specify "should be able to parse 2-digit year" <|
|
||||
Date.parse "22-05-06" "yy-MM-dd" . should_equal (Date.new 2022 5 6)
|
||||
Date.parse "99-01-02" "yy-MM-dd" . should_equal (Date.new 1999 1 2)
|
||||
Date.parse "49-03-04" "yy-MM-dd" . should_equal (Date.new 2049 3 4)
|
||||
Date.parse "50-03-04" "yy-MM-dd" . should_equal (Date.new 1950 3 4)
|
||||
|
||||
Test.specify "should be able to parse 2-digit year with custom base-year" <|
|
||||
group_builder.specify "should be able to parse 2-digit year with custom base-year" <|
|
||||
Date.parse "22-05-06" "yy{1999}-MM-dd" . should_equal (Date.new 1922 5 6)
|
||||
Date.parse "99-01-02" "yy{1949}-MM-dd" . should_equal (Date.new 1899 1 2)
|
||||
Date.parse "49-03-04" "yy{3099}-MM-dd" . should_equal (Date.new 3049 3 4)
|
||||
Date.parse "50-03-04" "yy{2099}-MM-dd" . should_equal (Date.new 2050 3 4)
|
||||
|
||||
Test.specify "should work like in examples" <|
|
||||
group_builder.specify "should work like in examples" <|
|
||||
Date.parse "Tue, 12 Oct 2021" "ddd, d MMM yyyy" . should_equal (Date.new 2021 10 12)
|
||||
Date.parse "Thursday, 1 October '98" (Date_Time_Formatter.from "dddd, d MMMM ''yy" Locale.uk) . should_equal (Date.new 1998 10 01)
|
||||
Date_Time.parse "12/10/2021 5:34 PM" "d/M/Y h:mm a" . should_equal (Date_Time.new 2021 10 12 17 34 00)
|
||||
@ -270,7 +270,7 @@ spec =
|
||||
Date.parse "1 Nov '95" "d MMM ''yy{2099}" . should_equal (Date.new 2095 11 01)
|
||||
Date_Time.parse "2021-10-12T12:34:56.789+0200" "yyyy-MM-dd'T'HH:mm:ss.fZ" . should_equal (Date_Time.new 2021 10 12 12 34 56 millisecond=789 zone=(Time_Zone.new hours=2))
|
||||
|
||||
Test.specify "should be able to parse a week-based year format" <|
|
||||
group_builder.specify "should be able to parse a week-based year format" <|
|
||||
Date.parse "1976-W53-6" (Date_Time_Formatter.from_iso_week_date_pattern "YYYY-'W'WW-d") . should_equal (Date.new 1977 01 01)
|
||||
Date_Time.parse "1978-W01-4 12:34:56" (Date_Time_Formatter.from_iso_week_date_pattern "YYYY-'W'WW-d HH:mm:ss") . should_equal (Date_Time.new 1978 01 05 12 34 56)
|
||||
|
||||
@ -280,7 +280,7 @@ spec =
|
||||
# Just week will parse to first day of the week:
|
||||
Date.parse "1978-W01" (Date_Time_Formatter.from_iso_week_date_pattern "YYYY-'W'WW") . should_equal (Date.new 1978 01 02)
|
||||
|
||||
Test.specify "should include the pattern in the parse failure message" <|
|
||||
group_builder.specify "should include the pattern in the parse failure message" <|
|
||||
r1 = "1999.01.02".parse_date
|
||||
r1.should_fail_with Time_Error
|
||||
r1.to_display_text . should_contain "Expected date/time format: (ISO date) yyyy-MM-dd"
|
||||
@ -301,7 +301,7 @@ spec =
|
||||
r5.should_fail_with Time_Error
|
||||
r5.to_display_text . should_contain "Expected date/time format: (Java DateTimeFormatter) ParseCaseSensitive(false)Value(Year,4,10,EXCEEDS_PAD)'-'Value(DayOfYear,3)[Offset(+HH:MM:ss,'Z')]"
|
||||
|
||||
Test.specify "should allow to use 12h hours without am/pm and default to am, but issue a warning (only in parsing)" <|
|
||||
group_builder.specify "should allow to use 12h hours without am/pm and default to am, but issue a warning (only in parsing)" <|
|
||||
f1 = Date_Time_Formatter.from "hh:mm"
|
||||
# No warning yet.
|
||||
Problems.assume_no_problems f1
|
||||
@ -319,7 +319,7 @@ spec =
|
||||
w1.to_display_text . should_contain "default to AM"
|
||||
w1.to_display_text . should_contain "Did you mean 'H'"
|
||||
|
||||
Test.specify "the warning should be removable as indicated in the message" pending="TODO: bug https://github.com/enso-org/enso/issues/7892" <|
|
||||
group_builder.specify "the warning should be removable as indicated in the message" pending="TODO: bug https://github.com/enso-org/enso/issues/7892" <|
|
||||
f1 = Date_Time_Formatter.from "hh:mm"
|
||||
r1 = Time_Of_Day.parse "04:24" f1
|
||||
w1 = Problems.expect_only_warning Suspicious_Date_Time_Format r1
|
||||
@ -329,7 +329,7 @@ spec =
|
||||
r2.should_equal (Time_Of_Day.new 4 24)
|
||||
Problems.assume_no_problems r2
|
||||
|
||||
Test.specify "should allow to parse MM-dd without a year, defaulting to current year but adding a warning (only in parsing)" <|
|
||||
group_builder.specify "should allow to parse MM-dd without a year, defaulting to current year but adding a warning (only in parsing)" <|
|
||||
f1 = Date_Time_Formatter.from "dd.MM"
|
||||
|
||||
s1 = Date.new 2020 12 31 . format f1
|
||||
@ -347,7 +347,7 @@ spec =
|
||||
Date.parse "07/23" "MM/dd" . should_equal (Date.new current_year 7 23)
|
||||
Date.parse "14. of May" "d. 'of' MMMM" . should_equal (Date.new current_year 5 14)
|
||||
|
||||
Test.specify "should allow nested patterns" <|
|
||||
group_builder.specify "should allow nested patterns" <|
|
||||
# Difference between a nested pattern and two optional patterns next to each other.
|
||||
Date.parse "2023-01-02 XY" "yyyy-MM-dd ['X']['Y']" . should_equal (Date.new 2023 1 2)
|
||||
Date.parse "2023-01-02 X" "yyyy-MM-dd ['X']['Y']" . should_equal (Date.new 2023 1 2)
|
||||
@ -367,7 +367,11 @@ spec =
|
||||
Date.parse "2023-01-07 XY" very_nested . should_equal (Date.new 2023 1 7)
|
||||
Date.parse "2023-01-08 XZ" very_nested . should_fail_with Time_Error
|
||||
|
||||
Test.specify "should allow to parse even when some patterns are unused" <|
|
||||
group_builder.specify "should allow to parse even when some patterns are unused" <|
|
||||
"2020-01-02 14:55".parse_date "yyyy-MM-dd HH:mm" . should_equal (Date.new 2020 1 2)
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -3,8 +3,8 @@ import Standard.Base.Errors.Common.Incomparable_Values
|
||||
import Standard.Base.Errors.Common.Type_Error
|
||||
import Standard.Base.Errors.Time_Error.Time_Error
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
|
||||
import project.Data.Time.Date_Part_Spec
|
||||
|
||||
@ -17,16 +17,16 @@ polyglot java import java.time.format.DateTimeFormatter
|
||||
polyglot java import java.lang.Exception as JException
|
||||
polyglot java import java.util.Objects
|
||||
|
||||
spec =
|
||||
spec_with "Date_Time" enso_datetime Date_Time.parse
|
||||
spec_with "JavascriptDate" js_datetime js_parse nanoseconds_loss_in_precision=True
|
||||
add_specs suite_builder =
|
||||
spec_with suite_builder "Date_Time" enso_datetime Date_Time.parse
|
||||
spec_with suite_builder "JavascriptDate" js_datetime js_parse nanoseconds_loss_in_precision=True
|
||||
if Polyglot.is_language_installed "python" then
|
||||
spec_with "PythonDate" python_datetime python_parse nanoseconds_loss_in_precision=True
|
||||
spec_with "JavaZonedDateTime" java_datetime java_parse
|
||||
spec_with "JavascriptDataInArray" js_array_datetime js_parse nanoseconds_loss_in_precision=True
|
||||
spec_with suite_builder "PythonDate" python_datetime python_parse nanoseconds_loss_in_precision=True
|
||||
spec_with suite_builder "JavaZonedDateTime" java_datetime java_parse
|
||||
spec_with suite_builder "JavascriptDataInArray" js_array_datetime js_parse nanoseconds_loss_in_precision=True
|
||||
|
||||
Test.group "Date_Time equality" <|
|
||||
Test.specify "should work with values coming from various sources" <|
|
||||
suite_builder.group "Date_Time equality" group_builder->
|
||||
group_builder.specify "should work with values coming from various sources" <|
|
||||
d1 = Date_Time.new 2022 3 4 5 6
|
||||
d2 = java_datetime 2022 3 4 5 6
|
||||
d1 . should_equal d2
|
||||
@ -38,16 +38,16 @@ spec =
|
||||
d3 . should_equal d4
|
||||
d4 . should_equal d5
|
||||
|
||||
Test.specify "should provide equality operator" <|
|
||||
group_builder.specify "should provide equality operator" <|
|
||||
(Date_Time.new 2022 zone=(Time_Zone.parse "CET")).should_not_equal (Date_Time.new 2022 zone=(Time_Zone.parse "UTC"))
|
||||
(Date_Time.new 2022 zone=(Time_Zone.parse "CET")).should_equal (Date_Time.new 2022 zone=(Time_Zone.parse "CET"))
|
||||
(Date_Time.new 2022 12 12).should_equal (Date_Time.new 2022 12 12)
|
||||
(Date_Time.new 2022 12 12).should_not_equal (Date_Time.new 1996)
|
||||
|
||||
spec_with name create_new_datetime parse_datetime nanoseconds_loss_in_precision=False =
|
||||
Test.group name <|
|
||||
spec_with suite_builder name create_new_datetime parse_datetime nanoseconds_loss_in_precision=False =
|
||||
suite_builder.group name group_builder->
|
||||
|
||||
Test.specify "should create time" <|
|
||||
group_builder.specify "should create time" <|
|
||||
time = create_new_datetime 1970 (zone = Time_Zone.utc)
|
||||
time . year . should_equal 1970
|
||||
time . month . should_equal 1
|
||||
@ -58,7 +58,7 @@ spec_with name create_new_datetime parse_datetime nanoseconds_loss_in_precision=
|
||||
time . nanosecond . should_equal 0
|
||||
time . zone . zone_id . should_equal Time_Zone.utc.zone_id
|
||||
|
||||
Test.specify "should handle errors when creating time" <|
|
||||
group_builder.specify "should handle errors when creating time" <|
|
||||
case create_new_datetime 1970 0 0 . catch of
|
||||
Time_Error.Error msg _ ->
|
||||
msg.to_text . contains "0" . should_be_true
|
||||
@ -67,43 +67,43 @@ spec_with name create_new_datetime parse_datetime nanoseconds_loss_in_precision=
|
||||
result ->
|
||||
Test.fail ("Unexpected result: " + result.to_text)
|
||||
|
||||
Test.specify "should format using provided pattern" <|
|
||||
group_builder.specify "should format using provided pattern" <|
|
||||
text = create_new_datetime 1970 (zone = Time_Zone.utc) . format "yyyy-MM-dd'T'HH:mm:ss"
|
||||
text . should_equal "1970-01-01T00:00:00"
|
||||
|
||||
Test.specify "should support to_display_text" <|
|
||||
group_builder.specify "should support to_display_text" <|
|
||||
text = create_new_datetime 2020 12 21 11 23 45 nanosecond=123456789 . to_display_text
|
||||
text . should_equal "2020-12-21 11:23:45.123"
|
||||
|
||||
text_2 = create_new_datetime 2020 12 21 11 23 45 . to_display_text
|
||||
text_2 . should_equal "2020-12-21 11:23:45"
|
||||
|
||||
Test.specify "should format using provided pattern and locale" <|
|
||||
group_builder.specify "should format using provided pattern and locale" <|
|
||||
d = create_new_datetime 2020 6 21
|
||||
d.format (Date_Time_Formatter.from "d. MMMM yyyy" (Locale.new "gb")) . should_equal "21. Jun 2020"
|
||||
d.format (Date_Time_Formatter.from "d. MMMM yyyy" (Locale.new "fr")) . should_equal "21. juin 2020"
|
||||
|
||||
Test.specify "should format using default pattern" <|
|
||||
group_builder.specify "should format using default pattern" <|
|
||||
text = create_new_datetime 1970 (zone = Time_Zone.utc) . to_text
|
||||
text . should_equal "1970-01-01 00:00:00Z[UTC]"
|
||||
|
||||
Test.specify "should convert to Json" <|
|
||||
group_builder.specify "should convert to Json" <|
|
||||
time = create_new_datetime 1970 12 21 (zone = Time_Zone.utc)
|
||||
time.to_json.should_equal <|
|
||||
zone_pairs = [["zone", Time_Zone.utc]]
|
||||
time_pairs = [["year", 1970], ["month", 12], ["day", 21], ["hour", 0], ["minute", 0], ["second", 0], ["nanosecond", 0]]
|
||||
JS_Object.from_pairs ([["type", "Date_Time"], ["constructor", "new"]] + time_pairs + zone_pairs) . to_text
|
||||
|
||||
Test.specify "should get Enso epoch seconds" <|
|
||||
group_builder.specify "should get Enso epoch seconds" <|
|
||||
(create_new_datetime 1582 10 15 0 0 8 (zone = Time_Zone.utc)).to_enso_epoch_seconds . should_equal 8
|
||||
(Date_Time.enso_epoch_start + (Duration.new minutes=2)).to_enso_epoch_seconds . should_equal (2 * 60)
|
||||
(Date_Time.enso_epoch_start - (Duration.new minutes=2)).to_enso_epoch_seconds . should_equal -(2 * 60)
|
||||
|
||||
Test.specify "should get Enso epoch millis" <|
|
||||
group_builder.specify "should get Enso epoch millis" <|
|
||||
(create_new_datetime 1582 10 15 0 0 8 (zone = Time_Zone.utc)).to_enso_epoch_milliseconds . should_equal 8000
|
||||
(Date_Time.enso_epoch_start + (Duration.new seconds=2)).to_enso_epoch_milliseconds . should_equal (2 * 1000)
|
||||
|
||||
Test.specify "should set offset-based timezone" <|
|
||||
group_builder.specify "should set offset-based timezone" <|
|
||||
tz = Time_Zone.new 1 1 1
|
||||
time = create_new_datetime 1970 (zone = Time_Zone.utc) . at_zone tz
|
||||
time . year . should_equal 1970
|
||||
@ -118,7 +118,7 @@ spec_with name create_new_datetime parse_datetime nanoseconds_loss_in_precision=
|
||||
time . zone . zone_id . should_equal tz.zone_id
|
||||
time.to_display_text . should_equal "1970-01-01 01:01:01[+01:01:01]"
|
||||
|
||||
Test.specify "should set id-based timezone" <|
|
||||
group_builder.specify "should set id-based timezone" <|
|
||||
tz = Time_Zone.parse "Europe/Moscow"
|
||||
time = create_new_datetime 1970 (zone = Time_Zone.utc) . at_zone tz
|
||||
time . year . should_equal 1970
|
||||
@ -133,7 +133,7 @@ spec_with name create_new_datetime parse_datetime nanoseconds_loss_in_precision=
|
||||
time . zone . zone_id . should_equal tz.zone_id
|
||||
time.to_display_text . should_equal "1970-01-01 03:00:00[Europe/Moscow]"
|
||||
|
||||
Test.specify "should allow to set an interop timezone" <|
|
||||
group_builder.specify "should allow to set an interop timezone" <|
|
||||
interop_tz = ZoneOffset.ofTotalSeconds 3600
|
||||
dt0 = create_new_datetime 2020 (zone = Time_Zone.utc)
|
||||
dt1 = dt0.at_zone interop_tz
|
||||
@ -141,7 +141,7 @@ spec_with name create_new_datetime parse_datetime nanoseconds_loss_in_precision=
|
||||
dt1.zone . zone_id . should_equal interop_tz.zone_id
|
||||
dt1.to_display_text . should_equal "2020-01-01 01:00:00[+01:00]"
|
||||
|
||||
Test.specify "should get time of day from offsed-based time" <|
|
||||
group_builder.specify "should get time of day from offsed-based time" <|
|
||||
time = parse_datetime "1970-01-01T00:00:01+01:00" . time_of_day
|
||||
time . hour . should_equal 0
|
||||
time . minute . should_equal 0
|
||||
@ -150,7 +150,7 @@ spec_with name create_new_datetime parse_datetime nanoseconds_loss_in_precision=
|
||||
time . microsecond . should_equal 0
|
||||
time . nanosecond . should_equal 0
|
||||
|
||||
Test.specify "should get time of day from id-based time" <|
|
||||
group_builder.specify "should get time of day from id-based time" <|
|
||||
time = parse_datetime "1970-01-01T00:00:01+01:00[Europe/Paris]" . time_of_day
|
||||
time . hour . should_equal 0
|
||||
time . minute . should_equal 0
|
||||
@ -159,19 +159,19 @@ spec_with name create_new_datetime parse_datetime nanoseconds_loss_in_precision=
|
||||
time . microsecond . should_equal 0
|
||||
time . nanosecond . should_equal 0
|
||||
|
||||
Test.specify "should get date from offsed-based time" <|
|
||||
group_builder.specify "should get date from offsed-based time" <|
|
||||
time = parse_datetime "1970-01-01T00:00:01+01:00" . date
|
||||
time . year . should_equal 1970
|
||||
time . month . should_equal 1
|
||||
time . day . should_equal 1
|
||||
|
||||
Test.specify "should get date from id-based time" <|
|
||||
group_builder.specify "should get date from id-based time" <|
|
||||
time = parse_datetime "1970-01-01T00:00:01+01:00[Europe/Paris]" . date
|
||||
time . year . should_equal 1970
|
||||
time . month . should_equal 1
|
||||
time . day . should_equal 1
|
||||
|
||||
Test.specify "should add Duration" <|
|
||||
group_builder.specify "should add Duration" <|
|
||||
time = create_new_datetime 1970 (zone = Time_Zone.utc) + (Duration.new nanoseconds=1)
|
||||
time . year . should_equal 1970
|
||||
time . month . should_equal 1
|
||||
@ -184,7 +184,7 @@ spec_with name create_new_datetime parse_datetime nanoseconds_loss_in_precision=
|
||||
time . nanosecond . should_equal 1
|
||||
time . zone . should_equal Time_Zone.utc
|
||||
|
||||
Test.specify "should add Period" <|
|
||||
group_builder.specify "should add Period" <|
|
||||
time = (create_new_datetime 1970 (zone = Time_Zone.utc)) + (Period.new months=1)
|
||||
time . year . should_equal 1970
|
||||
time . month . should_equal 2
|
||||
@ -197,7 +197,7 @@ spec_with name create_new_datetime parse_datetime nanoseconds_loss_in_precision=
|
||||
time . nanosecond . should_equal 0
|
||||
time . zone . zone_id . should_equal Time_Zone.utc.zone_id
|
||||
|
||||
Test.specify "should add mixed Period and Duration" <|
|
||||
group_builder.specify "should add mixed Period and Duration" <|
|
||||
time = create_new_datetime 1970 (zone = Time_Zone.utc) + (Period.new months=1) + (Duration.new hours=3)
|
||||
time . year . should_equal 1970
|
||||
time . month . should_equal 2
|
||||
@ -210,7 +210,7 @@ spec_with name create_new_datetime parse_datetime nanoseconds_loss_in_precision=
|
||||
time . nanosecond . should_equal 0
|
||||
time . zone . zone_id . should_equal Time_Zone.utc.zone_id
|
||||
|
||||
Test.specify "should subtract Duration" <|
|
||||
group_builder.specify "should subtract Duration" <|
|
||||
time = create_new_datetime 1970 (zone = Time_Zone.utc) - (Duration.new hours=1)
|
||||
time . year . should_equal 1969
|
||||
time . month . should_equal 12
|
||||
@ -223,7 +223,7 @@ spec_with name create_new_datetime parse_datetime nanoseconds_loss_in_precision=
|
||||
time . nanosecond . should_equal 0
|
||||
time . zone . zone_id . should_equal Time_Zone.utc.zone_id
|
||||
|
||||
Test.specify "should subtract Period" <|
|
||||
group_builder.specify "should subtract Period" <|
|
||||
time = create_new_datetime 1970 (zone = Time_Zone.utc) - (Period.new months=1)
|
||||
time . year . should_equal 1969
|
||||
time . month . should_equal 12
|
||||
@ -236,7 +236,7 @@ spec_with name create_new_datetime parse_datetime nanoseconds_loss_in_precision=
|
||||
time . nanosecond . should_equal 0
|
||||
time . zone . zone_id . should_equal Time_Zone.utc.zone_id
|
||||
|
||||
Test.specify "should subtract mixed Period and Duration" <|
|
||||
group_builder.specify "should subtract mixed Period and Duration" <|
|
||||
time = create_new_datetime 1970 (zone = Time_Zone.utc) - (Period.new months=1) - (Duration.new hours=3)
|
||||
time . year . should_equal 1969
|
||||
time . month . should_equal 11
|
||||
@ -249,7 +249,7 @@ spec_with name create_new_datetime parse_datetime nanoseconds_loss_in_precision=
|
||||
time . nanosecond . should_equal 0
|
||||
time . zone . zone_id . should_equal Time_Zone.utc.zone_id
|
||||
|
||||
Test.specify "should support mixed interval operators" <|
|
||||
group_builder.specify "should support mixed interval operators" <|
|
||||
time = create_new_datetime 1970 (zone = Time_Zone.utc) - (Period.new months=1) + (Duration.new hours=12)
|
||||
time . year . should_equal 1969
|
||||
time . month . should_equal 12
|
||||
@ -262,7 +262,7 @@ spec_with name create_new_datetime parse_datetime nanoseconds_loss_in_precision=
|
||||
time . nanosecond . should_equal 0
|
||||
time . zone . zone_id . should_equal Time_Zone.utc.zone_id
|
||||
|
||||
Test.specify "should support addition of Date_Period" <|
|
||||
group_builder.specify "should support addition of Date_Period" <|
|
||||
time = create_new_datetime 1970 (zone = Time_Zone.utc)
|
||||
time+Date_Period.Year . should_equal <| create_new_datetime 1971 (zone = Time_Zone.utc)
|
||||
time+Date_Period.Quarter . should_equal <| create_new_datetime 1970 4 (zone = Time_Zone.utc)
|
||||
@ -270,7 +270,7 @@ spec_with name create_new_datetime parse_datetime nanoseconds_loss_in_precision=
|
||||
time+Date_Period.Week . should_equal <| create_new_datetime 1970 1 8 (zone = Time_Zone.utc)
|
||||
time+Date_Period.Day . should_equal <| create_new_datetime 1970 1 2 (zone = Time_Zone.utc)
|
||||
|
||||
Test.specify "should support subtraction of Date_Period" <|
|
||||
group_builder.specify "should support subtraction of Date_Period" <|
|
||||
time = create_new_datetime 1970 (zone = Time_Zone.utc)
|
||||
time-Date_Period.Year . should_equal <| create_new_datetime 1969 (zone = Time_Zone.utc)
|
||||
time-Date_Period.Quarter . should_equal <| create_new_datetime 1969 10 (zone = Time_Zone.utc)
|
||||
@ -278,7 +278,7 @@ spec_with name create_new_datetime parse_datetime nanoseconds_loss_in_precision=
|
||||
time-Date_Period.Week . should_equal <| create_new_datetime 1969 12 25 (zone = Time_Zone.utc)
|
||||
time-Date_Period.Day . should_equal <| create_new_datetime 1969 12 31 (zone = Time_Zone.utc)
|
||||
|
||||
Test.specify "should support addition of Time_Period" <|
|
||||
group_builder.specify "should support addition of Time_Period" <|
|
||||
time = create_new_datetime 1970 (zone = Time_Zone.utc)
|
||||
time+Time_Period.Day . should_equal <| create_new_datetime 1970 1 2 (zone = Time_Zone.utc)
|
||||
time+Time_Period.Hour . should_equal <| create_new_datetime 1970 1 1 1 (zone = Time_Zone.utc)
|
||||
@ -289,7 +289,7 @@ spec_with name create_new_datetime parse_datetime nanoseconds_loss_in_precision=
|
||||
time+Time_Period.Microsecond . should_equal <| create_new_datetime 1970 1 1 0 0 0 (10^3) (zone = Time_Zone.utc)
|
||||
time+Time_Period.Nanosecond . should_equal <| create_new_datetime 1970 1 1 0 0 0 1 (zone = Time_Zone.utc)
|
||||
|
||||
Test.specify "should support subtraction of Time_Period" <|
|
||||
group_builder.specify "should support subtraction of Time_Period" <|
|
||||
time = create_new_datetime 1970 (zone = Time_Zone.utc)
|
||||
time-Time_Period.Day . should_equal <| create_new_datetime 1969 12 31 (zone = Time_Zone.utc)
|
||||
time-Time_Period.Hour . should_equal <| create_new_datetime 1969 12 31 23 (zone = Time_Zone.utc)
|
||||
@ -301,7 +301,7 @@ spec_with name create_new_datetime parse_datetime nanoseconds_loss_in_precision=
|
||||
time-Time_Period.Microsecond . should_equal <| create_new_datetime 1969 12 31 23 59 59 (second_in_nanos - 10^3) (zone = Time_Zone.utc)
|
||||
time-Time_Period.Nanosecond . should_equal <| create_new_datetime 1969 12 31 23 59 59 (second_in_nanos - 1) (zone = Time_Zone.utc)
|
||||
|
||||
Test.specify "should support mixed addition and subtraction of Date_Period and Time_Period" <|
|
||||
group_builder.specify "should support mixed addition and subtraction of Date_Period and Time_Period" <|
|
||||
time = create_new_datetime 1970 (zone = Time_Zone.utc)
|
||||
time+Date_Period.Quarter+Time_Period.Hour . should_equal <| create_new_datetime 1970 4 1 1 (zone = Time_Zone.utc)
|
||||
time+Time_Period.Hour+Date_Period.Quarter . should_equal <| create_new_datetime 1970 4 1 1 (zone = Time_Zone.utc)
|
||||
@ -310,7 +310,7 @@ spec_with name create_new_datetime parse_datetime nanoseconds_loss_in_precision=
|
||||
time+Date_Period.Quarter-Date_Period.Month . should_equal <| create_new_datetime 1970 3 1 (zone = Time_Zone.utc)
|
||||
time+Date_Period.Day-Time_Period.Day . should_equal <| create_new_datetime 1970 (zone = Time_Zone.utc)
|
||||
|
||||
Test.specify "will reflect that Time_Period.Day does not reflect daylight saving" <|
|
||||
group_builder.specify "will reflect that Time_Period.Day does not reflect daylight saving" <|
|
||||
tz = Time_Zone.parse "Europe/Warsaw"
|
||||
dt = Date_Time.new 2023 03 26 01 20 zone=tz
|
||||
|
||||
@ -323,12 +323,12 @@ spec_with name create_new_datetime parse_datetime nanoseconds_loss_in_precision=
|
||||
# Date Period shifts by 1 calendar day.
|
||||
dt2 . should_equal (Date_Time.new 2023 03 27 01 20 zone=tz)
|
||||
|
||||
Test.specify "should get the default comparator for datetimes" <|
|
||||
group_builder.specify "should get the default comparator for datetimes" <|
|
||||
Comparable.from (create_new_datetime 2023 2 3 23 59) . should_equal Default_Comparator
|
||||
Comparable.from (parse_datetime "2021-01-01T00:30:12.7102[UTC]") . should_equal Default_Comparator
|
||||
Comparable.from (create_new_datetime 2022 10 31 2 30 55 1234) . should_equal Default_Comparator
|
||||
|
||||
Test.specify "should be comparable" <|
|
||||
group_builder.specify "should be comparable" <|
|
||||
time_1 = parse_datetime "2021-01-01T00:30:12.7102[UTC]"
|
||||
time_2 = parse_datetime "2021-01-01T04:00:10.0+04:00"
|
||||
(time_1 == time_2) . should_be_false
|
||||
@ -351,7 +351,7 @@ spec_with name create_new_datetime parse_datetime nanoseconds_loss_in_precision=
|
||||
Ordering.compare time_1 date . should_fail_with Incomparable_Values
|
||||
time_1==date . should_be_false
|
||||
|
||||
Test.specify "simple computations before Enso epoch should produce a warning" <|
|
||||
group_builder.specify "simple computations before Enso epoch should produce a warning" <|
|
||||
expect_value_with_warning actual_value expected_value=Nothing =
|
||||
if expected_value != Nothing then actual_value . should_equal expected_value
|
||||
(((Warning.get_all actual_value).length) > 0) . should_be_true
|
||||
@ -365,14 +365,14 @@ spec_with name create_new_datetime parse_datetime nanoseconds_loss_in_precision=
|
||||
expect_value_with_warning (create_new_datetime 100 4).length_of_month
|
||||
expect_value_with_warning (create_new_datetime 100 4 15).day_of_week
|
||||
|
||||
Test.specify "should create datetime before epoch start" <|
|
||||
group_builder.specify "should create datetime before epoch start" <|
|
||||
(create_new_datetime 100 1 2 3 4).year . should_equal 100
|
||||
(create_new_datetime 100 1 2 3 4).month . should_equal 1
|
||||
(create_new_datetime 100 1 2 3 4).day . should_equal 2
|
||||
(create_new_datetime 100 1 2 3 4).hour . should_equal 3
|
||||
(create_new_datetime 100 1 2 3 4).minute . should_equal 4
|
||||
|
||||
Test.specify "should correctly determine the type of datetime" <|
|
||||
group_builder.specify "should correctly determine the type of datetime" <|
|
||||
new_datetime = create_new_datetime 2020 6 1 10 0 0
|
||||
parsed_datetime = parse_datetime "2021-02-01T00:30:12.7102[UTC]"
|
||||
|
||||
@ -380,7 +380,7 @@ spec_with name create_new_datetime parse_datetime nanoseconds_loss_in_precision=
|
||||
Meta.type_of parsed_datetime . should_equal_type Date_Time
|
||||
|
||||
max_nanos = 999999999
|
||||
Test.specify "should allow to find start/end of a Date_Period containing the current datetime" <|
|
||||
group_builder.specify "should allow to find start/end of a Date_Period containing the current datetime" <|
|
||||
d1 = create_new_datetime 2022 9 12 15 37 58
|
||||
d1.start_of Date_Period.Year . should_equal (Date_Time.new 2022 1 1)
|
||||
d1.end_of Date_Period.Year . should_equal (Date_Time.new 2022 12 31 23 59 59 nanosecond=max_nanos)
|
||||
@ -437,7 +437,7 @@ spec_with name create_new_datetime parse_datetime nanoseconds_loss_in_precision=
|
||||
(create_new_datetime 2000 7 1 16 50).end_of Date_Period.Quarter . should_equal (Date_Time.new 2000 9 30 23 59 59 nanosecond=max_nanos)
|
||||
(create_new_datetime 2000 6 30 17 40).end_of Date_Period.Quarter . should_equal (Date_Time.new 2000 6 30 23 59 59 nanosecond=max_nanos)
|
||||
|
||||
Test.specify "should allow to find start/end of a Time_Period containing the current datetime" <|
|
||||
group_builder.specify "should allow to find start/end of a Time_Period containing the current datetime" <|
|
||||
d1 = create_new_datetime 2022 9 12 15 37 58 123456789
|
||||
d1.start_of Time_Period.Day . should_equal (Date_Time.new 2022 9 12)
|
||||
d1.end_of Time_Period.Day . should_equal (Date_Time.new 2022 9 12 23 59 59 nanosecond=max_nanos)
|
||||
@ -484,7 +484,7 @@ spec_with name create_new_datetime parse_datetime nanoseconds_loss_in_precision=
|
||||
if name.contains "Python" then "Python doesn't support time zones correctly" else
|
||||
Nothing
|
||||
|
||||
Test.specify "should find start/end of a Date_Period or Time_Period containing the current datetime correctly near the spring DST switch" pending=dst_pending <|
|
||||
group_builder.specify "should find start/end of a Date_Period or Time_Period containing the current datetime correctly near the spring DST switch" pending=dst_pending <|
|
||||
d1 = create_new_datetime 2022 3 27 1 34 15 0 tz
|
||||
d2 = create_new_datetime 2022 3 27 3 34 15 0 tz
|
||||
d1_plus = d1 + (Duration.new hours=1)
|
||||
@ -519,7 +519,7 @@ spec_with name create_new_datetime parse_datetime nanoseconds_loss_in_precision=
|
||||
d2_end . should_equal (Date_Time.new 2022 3 27 3 59 59 nanosecond=max_nanos zone=tz)
|
||||
|
||||
dst_overlap_message = "We cannot correctly migrate the datetime inside of the timeline overlap through the polyglot boundary - as due to polyglot conversion limitation, always the earlier one is chosen. See the bug report: https://github.com/oracle/graal/issues/4918"
|
||||
Test.specify "should find start/end of a Date_Period or Time_Period containing the current datetime correctly near the autumn DST switch" pending=dst_overlap_message <|
|
||||
group_builder.specify "should find start/end of a Date_Period or Time_Period containing the current datetime correctly near the autumn DST switch" pending=dst_overlap_message <|
|
||||
d3 = create_new_datetime 2022 10 30 2 30 15 0 tz
|
||||
d4 = d3 + (Duration.new hours=1)
|
||||
|
||||
@ -565,7 +565,7 @@ spec_with name create_new_datetime parse_datetime nanoseconds_loss_in_precision=
|
||||
d4_end.nanosecond . should_equal 999
|
||||
Time_Utils.get_datetime_offset d4_end . should_equal offset_1_h
|
||||
|
||||
Test.specify "should allow to shift the date by N working days" <|
|
||||
group_builder.specify "should allow to shift the date by N working days" <|
|
||||
# 2000-2-1 is a Tuesday
|
||||
create_new_datetime 2000 2 1 12 30 . add_work_days 0 . should_equal (Date_Time.new 2000 2 1 12 30)
|
||||
create_new_datetime 2000 2 1 12 15 55 . add_work_days . should_equal (Date_Time.new 2000 2 2 12 15 55)
|
||||
@ -586,7 +586,7 @@ spec_with name create_new_datetime parse_datetime nanoseconds_loss_in_precision=
|
||||
create_new_datetime 2000 2 27 12 10 . add_work_days 3 . should_equal (Date_Time.new 2000 3 2 12 10)
|
||||
create_new_datetime 1999 2 27 12 10 . add_work_days 3 . should_equal (Date_Time.new 1999 3 4 12 10)
|
||||
|
||||
Test.specify "should handle shifting dates around spring DST edge cases" pending=dst_pending <|
|
||||
group_builder.specify "should handle shifting dates around spring DST edge cases" pending=dst_pending <|
|
||||
# 2022-10-30 and 2022-03-27 are DST switch days, Sundays.
|
||||
(create_new_datetime 2022 10 30 2 30 55 1234 . add_work_days 0) . should_equal (create_new_datetime 2022 10 31 2 30 55 1234)
|
||||
(create_new_datetime 2022 10 30 1 30 . add_work_days 1) . should_equal (Date_Time.new 2022 11 1 1 30)
|
||||
@ -596,14 +596,14 @@ spec_with name create_new_datetime parse_datetime nanoseconds_loss_in_precision=
|
||||
(create_new_datetime 2022 3 27 1 30 zone=tz . add_work_days 0) . should_equal (Date_Time.new 2022 3 28 1 30 zone=tz)
|
||||
(create_new_datetime 2022 3 27 3 30 zone=tz . add_work_days 1) . should_equal (Date_Time.new 2022 3 29 3 30 zone=tz)
|
||||
|
||||
Test.specify "should handle shifting dates around autumn DST edge cases" pending=dst_overlap_message <|
|
||||
group_builder.specify "should handle shifting dates around autumn DST edge cases" pending=dst_overlap_message <|
|
||||
d3 = create_new_datetime 2022 10 30 2 30 15 0 tz
|
||||
d4 = d3 + (Duration.new hours=1)
|
||||
|
||||
# TODO we need to check and document the actual behaviour once it is expressible, it may be equally acceptable to shift to 3:30 instead of 2:30.
|
||||
d4 . add_work_days 0 . should_equal (Date_Time.new 2022 10 31 2 30 15 0 tz)
|
||||
|
||||
Test.specify "should allow to shift the date by negative amount of working days" <|
|
||||
group_builder.specify "should allow to shift the date by negative amount of working days" <|
|
||||
# 2000-2-1 is a Tuesday
|
||||
create_new_datetime 2000 2 1 12 30 . add_work_days -1 . should_equal (Date_Time.new 2000 1 31 12 30)
|
||||
create_new_datetime 2000 2 1 13 30 . add_work_days -2 . should_equal (Date_Time.new 2000 1 28 13 30)
|
||||
@ -613,7 +613,7 @@ spec_with name create_new_datetime parse_datetime nanoseconds_loss_in_precision=
|
||||
create_new_datetime 2000 2 6 0 1 . add_work_days -2 . should_equal (Date_Time.new 2000 2 3 0 1)
|
||||
create_new_datetime 2000 2 6 23 59 . add_work_days -6 . should_equal (Date_Time.new 2000 1 28 23 59)
|
||||
|
||||
Test.specify "should allow to shift the date by N working days, skipping custom holidays" <|
|
||||
group_builder.specify "should allow to shift the date by N working days, skipping custom holidays" <|
|
||||
all_year_holiday year =
|
||||
first_day = Date.new year 1 1
|
||||
Vector.new first_day.length_of_year (n -> first_day + (Period.new days=n))
|
||||
@ -636,7 +636,7 @@ spec_with name create_new_datetime parse_datetime nanoseconds_loss_in_precision=
|
||||
create_new_datetime 1999 10 30 14 40 . add_work_days 0 (duplicated_holiday_november 1999) . should_equal (Date_Time.new 1999 11 3 14 40)
|
||||
create_new_datetime 1999 10 30 15 50 . add_work_days 1 (duplicated_holiday_november 1999) . should_equal (Date_Time.new 1999 11 4 15 50)
|
||||
|
||||
Test.specify "should allow extracting a date_part" <|
|
||||
group_builder.specify "should allow extracting a date_part" <|
|
||||
d1 = create_new_datetime 2023 12 30 15 37 58 123456789
|
||||
d1.date_part Date_Period.Year . should_equal 2023
|
||||
d1.date_part Date_Period.Quarter . should_equal 4
|
||||
@ -654,7 +654,7 @@ spec_with name create_new_datetime parse_datetime nanoseconds_loss_in_precision=
|
||||
|
||||
pending_date_diff_test = if name.contains "Python" then "Loose Zone conversions are on, skipping the test"
|
||||
|
||||
Test.specify "should allow computing date_diff" pending=pending_date_diff_test <|
|
||||
group_builder.specify "should allow computing date_diff" pending=pending_date_diff_test <|
|
||||
t1 = create_new_datetime 2021 11 3 10 15 0 zone=Time_Zone.utc
|
||||
t2 = create_new_datetime 2021 12 5 12 30 20 zone=Time_Zone.utc
|
||||
|
||||
@ -688,7 +688,7 @@ spec_with name create_new_datetime parse_datetime nanoseconds_loss_in_precision=
|
||||
t1.date_diff t2 Time_Period.Nanosecond . should_equal 2772920000000000
|
||||
t1.date_diff (Date_Time.new 2021 11 3 10 15 30 123 456 789 zone=Time_Zone.utc) Time_Period.Nanosecond . should_equal 30123456789
|
||||
|
||||
Test.specify "should allow shifting with date_add" <|
|
||||
group_builder.specify "should allow shifting with date_add" <|
|
||||
t1 = Date_Time.new 2021 01 01 12 30 0
|
||||
t1.date_add 5 Date_Period.Day . should_equal (Date_Time.new 2021 01 06 12 30 0)
|
||||
t1.date_add -1 Time_Period.Day . should_equal (Date_Time.new 2020 12 31 12 30 0)
|
||||
@ -707,7 +707,7 @@ spec_with name create_new_datetime parse_datetime nanoseconds_loss_in_precision=
|
||||
Test.expect_panic_with (t1.date_add 1.5 Date_Period.Day) Type_Error
|
||||
Test.expect_panic_with (t1.date_add 1.0 Date_Period.Day) Type_Error
|
||||
|
||||
Test.specify "date_diff and date_add should behave well around DST" <|
|
||||
group_builder.specify "date_diff and date_add should behave well around DST" <|
|
||||
zone = Time_Zone.parse "Europe/Warsaw"
|
||||
dt1 = Date_Time.new 2023 03 26 00 30 00 zone=zone
|
||||
|
||||
@ -733,7 +733,7 @@ spec_with name create_new_datetime parse_datetime nanoseconds_loss_in_precision=
|
||||
dt3.date_diff dt4 Time_Period.Day . should_equal 0
|
||||
dt3.date_diff dt4 Time_Period.Hour . should_equal 23
|
||||
|
||||
Date_Part_Spec.spec name create_new_datetime
|
||||
Date_Part_Spec.add_specs suite_builder name create_new_datetime
|
||||
|
||||
js_datetime year month=1 day=1 hour=0 minute=0 second=0 nanosecond=0 zone=Time_Zone.system =
|
||||
Panic.catch Any (js_datetime_with_zone year month day hour minute second nanosecond zone) (err -> Error.throw (Time_Error.Error err.payload))
|
||||
@ -812,4 +812,8 @@ java_parse date_text_raw pattern=Nothing =
|
||||
formatter = DateTimeFormatter.ofPattern(pattern)
|
||||
Panic.catch Any (maybe_parse_java_zoned date_text formatter) (cause -> parse_java_local cause.payload date_text pattern)
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -1,56 +1,60 @@
|
||||
from Standard.Base import all
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
spec =
|
||||
Test.group "Day_Of_Week conversions" <|
|
||||
Test.specify "should be able to convert to an Integer" <|
|
||||
|
||||
add_specs suite_builder =
|
||||
suite_builder.group "Day_Of_Week conversions" group_builder->
|
||||
group_builder.specify "should be able to convert to an Integer" <|
|
||||
Day_Of_Week.Sunday.to_integer . should_equal 1
|
||||
Day_Of_Week.Wednesday.to_integer . should_equal 4
|
||||
Day_Of_Week.Friday.to_integer . should_equal 6
|
||||
Day_Of_Week.Saturday.to_integer . should_equal 7
|
||||
|
||||
Test.specify "should be able to convert from an Integer" <|
|
||||
group_builder.specify "should be able to convert from an Integer" <|
|
||||
Day_Of_Week.from 1 . should_equal Day_Of_Week.Sunday
|
||||
Day_Of_Week.from 4 . should_equal Day_Of_Week.Wednesday
|
||||
Day_Of_Week.from 6 . should_equal Day_Of_Week.Friday
|
||||
Day_Of_Week.from 7 . should_equal Day_Of_Week.Saturday
|
||||
|
||||
Test.specify "should be able to convert to an Integer starting at 0" <|
|
||||
group_builder.specify "should be able to convert to an Integer starting at 0" <|
|
||||
Day_Of_Week.Sunday.to_integer start_at_zero=True . should_equal 0
|
||||
Day_Of_Week.Wednesday.to_integer start_at_zero=True . should_equal 3
|
||||
Day_Of_Week.Friday.to_integer start_at_zero=True . should_equal 5
|
||||
Day_Of_Week.Saturday.to_integer start_at_zero=True . should_equal 6
|
||||
|
||||
Test.specify "should be able to convert from an Integer starting at 0" <|
|
||||
group_builder.specify "should be able to convert from an Integer starting at 0" <|
|
||||
Day_Of_Week.from 0 start_at_zero=True . should_equal Day_Of_Week.Sunday
|
||||
Day_Of_Week.from 3 start_at_zero=True . should_equal Day_Of_Week.Wednesday
|
||||
Day_Of_Week.from 5 start_at_zero=True . should_equal Day_Of_Week.Friday
|
||||
Day_Of_Week.from 6 start_at_zero=True . should_equal Day_Of_Week.Saturday
|
||||
|
||||
Test.specify "should be able to convert to an Integer starting on Monday" <|
|
||||
group_builder.specify "should be able to convert to an Integer starting on Monday" <|
|
||||
Day_Of_Week.Sunday.to_integer Day_Of_Week.Monday . should_equal 7
|
||||
Day_Of_Week.Wednesday.to_integer Day_Of_Week.Monday . should_equal 3
|
||||
Day_Of_Week.Friday.to_integer Day_Of_Week.Monday . should_equal 5
|
||||
Day_Of_Week.Saturday.to_integer Day_Of_Week.Monday . should_equal 6
|
||||
|
||||
Test.specify "should be able to convert from an Integer starting on Monday" <|
|
||||
group_builder.specify "should be able to convert from an Integer starting on Monday" <|
|
||||
Day_Of_Week.from 7 Day_Of_Week.Monday . should_equal Day_Of_Week.Sunday
|
||||
Day_Of_Week.from 3 Day_Of_Week.Monday . should_equal Day_Of_Week.Wednesday
|
||||
Day_Of_Week.from 5 Day_Of_Week.Monday . should_equal Day_Of_Week.Friday
|
||||
Day_Of_Week.from 6 Day_Of_Week.Monday . should_equal Day_Of_Week.Saturday
|
||||
|
||||
Test.specify "should be able to convert to an Integer starting on Monday at 0" <|
|
||||
group_builder.specify "should be able to convert to an Integer starting on Monday at 0" <|
|
||||
Day_Of_Week.Sunday.to_integer Day_Of_Week.Monday True . should_equal 6
|
||||
Day_Of_Week.Wednesday.to_integer Day_Of_Week.Monday True . should_equal 2
|
||||
Day_Of_Week.Friday.to_integer Day_Of_Week.Monday True . should_equal 4
|
||||
Day_Of_Week.Saturday.to_integer Day_Of_Week.Monday True . should_equal 5
|
||||
|
||||
Test.specify "should be able to convert from an Integer starting on Monday at 0" <|
|
||||
group_builder.specify "should be able to convert from an Integer starting on Monday at 0" <|
|
||||
Day_Of_Week.from 6 Day_Of_Week.Monday True . should_equal Day_Of_Week.Sunday
|
||||
Day_Of_Week.from 2 Day_Of_Week.Monday True . should_equal Day_Of_Week.Wednesday
|
||||
Day_Of_Week.from 4 Day_Of_Week.Monday True . should_equal Day_Of_Week.Friday
|
||||
Day_Of_Week.from 5 Day_Of_Week.Monday True . should_equal Day_Of_Week.Saturday
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -2,39 +2,39 @@ from Standard.Base import all
|
||||
import Standard.Base.Errors.Common.Incomparable_Values
|
||||
import Standard.Base.Errors.Time_Error.Time_Error
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
|
||||
polyglot java import java.time.Duration as Java_Duration
|
||||
polyglot java import java.time.LocalDate
|
||||
polyglot java import java.time.LocalDateTime as Java_DateTime
|
||||
|
||||
spec =
|
||||
Test.group "Duration" <|
|
||||
add_specs suite_builder =
|
||||
suite_builder.group "Duration" group_builder->
|
||||
|
||||
Test.specify "should create interval seconds" <|
|
||||
group_builder.specify "should create interval seconds" <|
|
||||
duration = (Duration.new seconds=5)
|
||||
duration.seconds . should_equal 5
|
||||
duration.milliseconds . should_equal 0
|
||||
|
||||
Test.specify "should create interval between two points in time" <|
|
||||
group_builder.specify "should create interval between two points in time" <|
|
||||
(Duration.between (Date_Time.new 2001 1 1 3) (Date_Time.new 2001 1 1 10)).total_hours . should_equal 7
|
||||
(Duration.between (Date_Time.new 2001 1 1) (Date_Time.new 2001 1 7)).total_hours . should_equal (6 * 24)
|
||||
(Duration.between (Date_Time.new 2001 1 1 13) (Date_Time.new 2001 1 7 16)).total_hours . should_equal (3 + 6 * 24)
|
||||
|
||||
Test.specify "should normalize periods" <|
|
||||
group_builder.specify "should normalize periods" <|
|
||||
(Duration.new seconds=60).total_minutes . should_equal 1
|
||||
(Duration.new milliseconds=1000).total_seconds . should_equal 1
|
||||
|
||||
Test.specify "should normalize addition" <|
|
||||
group_builder.specify "should normalize addition" <|
|
||||
duration = (Duration.new hours=11) + (Duration.new hours=1)
|
||||
duration.hours . should_equal 12
|
||||
|
||||
Test.specify "should normalize subtraction" <|
|
||||
group_builder.specify "should normalize subtraction" <|
|
||||
duration = (Duration.new hours=13) - (Duration.new hours=1)
|
||||
duration.hours . should_equal 12
|
||||
|
||||
Test.specify "should render a friendly to display text" <|
|
||||
group_builder.specify "should render a friendly to display text" <|
|
||||
Duration.new . to_display_text . should_equal "0s"
|
||||
Duration.new seconds=30 . to_display_text . should_equal "30s"
|
||||
Duration.new seconds=30 milliseconds=500 . to_display_text . should_equal "30.5s"
|
||||
@ -43,13 +43,13 @@ spec =
|
||||
Duration.new hours=1 seconds=30 . to_display_text . should_equal "1h 0m 30s"
|
||||
Duration.new hours=1 minutes=30 seconds=30 . to_display_text . should_equal "1h 30m 30s"
|
||||
|
||||
Test.specify "should convert to Json" <|
|
||||
group_builder.specify "should convert to Json" <|
|
||||
interval = (Duration.new nanoseconds=120) + (Duration.new seconds=30) + (Duration.new hours=14)
|
||||
interval.to_json.should_equal <|
|
||||
duration_pairs = [["hours", interval.hours], ["seconds", interval.seconds], ["nanoseconds", interval.nanoseconds]]
|
||||
JS_Object.from_pairs ([["type", "Duration"], ["constructor", "new"]] + duration_pairs) . to_text
|
||||
|
||||
Test.specify "should be comparable" <|
|
||||
group_builder.specify "should be comparable" <|
|
||||
duration_1 = (Duration.new hours=5)
|
||||
duration_2 = (Duration.new minutes=1)
|
||||
Ordering.compare duration_1 duration_1 . should_equal Ordering.Equal
|
||||
@ -58,7 +58,7 @@ spec =
|
||||
duration_1>duration_2 . should_be_true
|
||||
duration_1<duration_2 . should_be_false
|
||||
|
||||
Test.specify "should not mix Duration and Period" <|
|
||||
group_builder.specify "should not mix Duration and Period" <|
|
||||
durations = [(Duration.new hours=1), (Duration.zero), (Duration.new hours=1 seconds=30)]
|
||||
periods = [(Period.new days=1), (Period.new 0), (Period.new years=30), (Period.new years=3 months=2)]
|
||||
durations.each duration->
|
||||
@ -70,30 +70,30 @@ spec =
|
||||
(duration > period).should_fail_with Incomparable_Values
|
||||
(duration < period).should_fail_with Incomparable_Values
|
||||
|
||||
Test.specify "Date_Time supports adding and subtracting Duration" <|
|
||||
group_builder.specify "Date_Time supports adding and subtracting Duration" <|
|
||||
((Date_Time.new 2022 10 1 hour=10) + (Duration.new hours=2)) . should_equal (Date_Time.new 2022 10 1 hour=12)
|
||||
((Date_Time.new 2022 10 1 hour=10) - (Duration.new hours=2)) . should_equal (Date_Time.new 2022 10 1 hour=8)
|
||||
((Date_Time.new 2022 10 2) - (Duration.new hours=24)) . should_equal (Date_Time.new 2022 10 1)
|
||||
((Date_Time.new 2022 10 1 hour=2) - (Duration.new minutes=3)) . should_equal (Date_Time.new 2022 10 1 hour=1 minute=57)
|
||||
|
||||
Test.specify "Java Duration is equal to Enso Duration" <|
|
||||
group_builder.specify "Java Duration is equal to Enso Duration" <|
|
||||
(Duration.new hours=1) . should_equal (Java_Duration.ofHours 1)
|
||||
(Duration.new minutes=80) . should_equal (Java_Duration.ofMinutes 80)
|
||||
(Java_Duration.ofSeconds 30) . should_equal (Duration.new seconds=30)
|
||||
|
||||
Test.specify "Difference of Java Date and Enso date should be an Enso Duration" <|
|
||||
group_builder.specify "Difference of Java Date and Enso date should be an Enso Duration" <|
|
||||
(Duration.between (java_datetime 2022 01 01) (Date_Time.new 2022 01 02) timezone_aware=False).total_hours . should_equal 24
|
||||
(Duration.between (Date_Time.new 2022 01 01) (java_datetime 2022 01 02) timezone_aware=False).total_hours . should_equal 24
|
||||
(Duration.between (Date_Time.new 2022 01 01 13) (java_datetime 2022 01 01 14) timezone_aware=False).total_hours . should_equal 1
|
||||
(Duration.between (java_datetime 2022 01 01 13 13) (Date_Time.new 2022 01 01 13 15) timezone_aware=False).total_minutes . should_equal 2
|
||||
|
||||
Test.specify "Difference of two Java Dates should be an Enso Duration" <|
|
||||
group_builder.specify "Difference of two Java Dates should be an Enso Duration" <|
|
||||
(Duration.between (java_datetime 2022 01 01) (java_datetime 2022 01 02) timezone_aware=False).total_hours . should_equal 24
|
||||
|
||||
Test.specify "Difference of two JS Dates is an Enso Duration" <|
|
||||
group_builder.specify "Difference of two JS Dates is an Enso Duration" <|
|
||||
(Duration.between (js_datetime 2022 09 01) (js_datetime 2022 09 02) timezone_aware=False).total_hours . should_equal 24
|
||||
|
||||
Test.specify "Difference of JS Dates and Java Dates should be Enso Duration" <|
|
||||
group_builder.specify "Difference of JS Dates and Java Dates should be Enso Duration" <|
|
||||
(Duration.between (js_datetime 2022 09 08) (java_datetime 2022 09 09) timezone_aware=False).total_hours . should_equal 24
|
||||
(Duration.between (java_datetime 2022 09 09) (js_datetime 2022 09 08) timezone_aware=False).total_hours . should_equal (-24)
|
||||
|
||||
@ -109,4 +109,8 @@ foreign js js_datetime_impl year month day hour minute second nanosecond = """
|
||||
}
|
||||
return new Date(year, month - 1, day, hour, minute, second, nanosecond / 1000000);
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -1,39 +1,39 @@
|
||||
from Standard.Base import all
|
||||
import Standard.Base.Errors.Common.Incomparable_Values
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
spec =
|
||||
Test.group "Period" <|
|
||||
Test.specify "should create period years" <|
|
||||
|
||||
add_specs suite_builder =
|
||||
suite_builder.group "Period" group_builder->
|
||||
group_builder.specify "should create period years" <|
|
||||
period = (Period.new years=5)
|
||||
period.years . should_equal 5
|
||||
period.days . should_equal 0
|
||||
|
||||
Test.specify "should add two Periods" <|
|
||||
group_builder.specify "should add two Periods" <|
|
||||
((Period.new years=1) + (Period.new years=2)).years . should_equal 3
|
||||
((Period.new days=1) + (Period.new months=2)).days . should_equal 1
|
||||
((Period.new days=1) + (Period.new months=2)).months . should_equal 2
|
||||
((Period.new months=2) + (Period.new days=1)).days . should_equal 1
|
||||
((Period.new months=2) + (Period.new days=1)).months . should_equal 2
|
||||
|
||||
Test.specify "should subtract two Periods" <|
|
||||
group_builder.specify "should subtract two Periods" <|
|
||||
((Period.new years=2) - (Period.new years=1)).years . should_equal 1
|
||||
((Period.new years=1) - (Period.new months=2)).months . should_equal (-2)
|
||||
((Period.new years=1) - (Period.new months=2)).years . should_equal 1
|
||||
|
||||
Test.specify "should get Period between two dates" <|
|
||||
group_builder.specify "should get Period between two dates" <|
|
||||
(Period.between (Date.new year=100) (Date.new year=150)) . should_equal (Period.new years=50)
|
||||
(Period.between (Date.new year=150) (Date.new year=100)) . should_equal (Period.new years=(-50))
|
||||
(Period.between (Date.new 2022 10 19) (Date.new 2022 11 01)) . should_equal (Period.new days=13)
|
||||
|
||||
Test.specify "should not compare between two periods" <|
|
||||
group_builder.specify "should not compare between two periods" <|
|
||||
((Period.new days=10) > (Period.new days=1)) . should_fail_with Incomparable_Values
|
||||
((Period.new years=10) > (Period.new days=1)) . should_fail_with Incomparable_Values
|
||||
((Period.new years=10 months=3) > (Period.new months=5)) . should_fail_with Incomparable_Values
|
||||
|
||||
Test.specify "two Periods are equal iff their fields are equal" <|
|
||||
group_builder.specify "two Periods are equal iff their fields are equal" <|
|
||||
((Period.new days=1) == (Period.new days=1)) . should_be_true
|
||||
((Period.new months=12) == (Period.new years=1)) . should_be_false
|
||||
((Period.new months=3) == (Period.new months=3)) . should_be_true
|
||||
@ -41,7 +41,7 @@ spec =
|
||||
((Period.new years=1 days=10) == (Period.new years=1 days=10)) . should_be_true
|
||||
((Period.new days=1) != (Period.new months=1)) . should_be_true
|
||||
|
||||
Test.specify "should render a friendly to display text" <|
|
||||
group_builder.specify "should render a friendly to display text" <|
|
||||
Period.new . to_display_text . should_equal "0D"
|
||||
Period.new years=2 . to_display_text . should_equal "2Y"
|
||||
Period.new months=24 . to_display_text . should_equal "2Y"
|
||||
@ -50,4 +50,8 @@ spec =
|
||||
Period.new years=2 days=3 . to_display_text . should_equal "2Y 0M 3D"
|
||||
Period.new days=18 . to_display_text . should_equal "18D"
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
from Standard.Base import all
|
||||
|
||||
from Standard.Test import Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
|
||||
import project.Data.Time.Duration_Spec
|
||||
import project.Data.Time.Period_Spec
|
||||
@ -13,15 +13,19 @@ import project.Data.Time.Date_Time_Formatter_Spec
|
||||
import project.Data.Time.Time_Zone_Spec
|
||||
import project.Data.Time.Day_Of_Week_Spec
|
||||
|
||||
spec =
|
||||
Date_Spec.spec
|
||||
Date_Range_Spec.spec
|
||||
Duration_Spec.spec
|
||||
Period_Spec.spec
|
||||
Time_Of_Day_Spec.spec
|
||||
Date_Time_Spec.spec
|
||||
Date_Time_Formatter_Spec.spec
|
||||
Time_Zone_Spec.spec
|
||||
Day_Of_Week_Spec.spec
|
||||
add_specs suite_builder =
|
||||
Date_Spec.add_specs suite_builder
|
||||
Date_Range_Spec.add_specs suite_builder
|
||||
Duration_Spec.add_specs suite_builder
|
||||
Period_Spec.add_specs suite_builder
|
||||
Time_Of_Day_Spec.add_specs suite_builder
|
||||
Date_Time_Spec.add_specs suite_builder
|
||||
Date_Time_Formatter_Spec.add_specs suite_builder
|
||||
Time_Zone_Spec.add_specs suite_builder
|
||||
Day_Of_Week_Spec.add_specs suite_builder
|
||||
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
|
@ -4,40 +4,40 @@ import Standard.Base.Errors.Common.Type_Error
|
||||
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
|
||||
import Standard.Base.Errors.Time_Error.Time_Error
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
|
||||
polyglot java import java.time.LocalTime
|
||||
polyglot java import java.time.format.DateTimeFormatter
|
||||
|
||||
spec =
|
||||
specWith "Time_Of_Day" enso_time Time_Of_Day.parse
|
||||
specWith "JavaLocalTime" java_time java_parse
|
||||
add_specs suite_builder =
|
||||
specWith suite_builder "Time_Of_Day" enso_time Time_Of_Day.parse
|
||||
specWith suite_builder "JavaLocalTime" java_time java_parse
|
||||
if Polyglot.is_language_installed "python" then
|
||||
specWith "PythonLocalTime" python_time python_parse nanoseconds_loss_in_precision=True
|
||||
specWith suite_builder "PythonLocalTime" python_time python_parse nanoseconds_loss_in_precision=True
|
||||
|
||||
specWith name create_new_time parse_time nanoseconds_loss_in_precision=False =
|
||||
Test.group name <|
|
||||
specWith suite_builder name create_new_time parse_time nanoseconds_loss_in_precision=False =
|
||||
suite_builder.group name group_builder->
|
||||
|
||||
Test.specify "should create local time" <|
|
||||
group_builder.specify "should create local time" <|
|
||||
time = create_new_time 1 0 0
|
||||
time . hour . should_equal 1
|
||||
time . minute . should_equal 0
|
||||
time . second . should_equal 0
|
||||
time . to_seconds . should_equal 3600
|
||||
|
||||
Test.specify "should handle errors when creating a time" <|
|
||||
group_builder.specify "should handle errors when creating a time" <|
|
||||
case create_new_time 24 0 0 . catch of
|
||||
Time_Error.Error msg _ ->
|
||||
msg.to_text . contains "24" . should_not_equal -1
|
||||
result ->
|
||||
Test.fail ("Unexpected result: " + result.to_text)
|
||||
|
||||
Test.specify "should format local time using provided pattern" <|
|
||||
group_builder.specify "should format local time using provided pattern" <|
|
||||
text = create_new_time 12 20 44 . format "HHmmss"
|
||||
text . should_equal "122044"
|
||||
|
||||
Test.specify "should support display_text" <|
|
||||
group_builder.specify "should support display_text" <|
|
||||
text = create_new_time 12 20 44 . to_display_text
|
||||
text . should_equal "12:20:44"
|
||||
|
||||
@ -47,33 +47,33 @@ specWith name create_new_time parse_time nanoseconds_loss_in_precision=False =
|
||||
text_3 = create_new_time 12 20 44 123000000 . to_display_text
|
||||
text_3 . should_equal "12:20:44.123"
|
||||
|
||||
Test.specify "should format using provided pattern and locale" <|
|
||||
group_builder.specify "should format using provided pattern and locale" <|
|
||||
d = create_new_time 12 20 44
|
||||
# Note that the results are all the same.
|
||||
d.format "HH:mm" . should_equal "12:20"
|
||||
d.format (Date_Time_Formatter.from "HH:mm" (Locale.new "gb")) . should_equal "12:20"
|
||||
d.format (Date_Time_Formatter.from "HH:mm" (Locale.new "fr")) . should_equal "12:20"
|
||||
|
||||
Test.specify "should format local time using default pattern" <|
|
||||
group_builder.specify "should format local time using default pattern" <|
|
||||
text = create_new_time 12 20 44 . to_text
|
||||
text . should_equal "12:20:44"
|
||||
|
||||
Test.specify "should convert to Json" <|
|
||||
group_builder.specify "should convert to Json" <|
|
||||
time = create_new_time 1 2 3
|
||||
time.to_json.should_equal <|
|
||||
time_pairs = [["hour", 1], ["minute", 2], ["second", 3], ["nanosecond", 0]]
|
||||
JS_Object.from_pairs ([["type", "Time_Of_Day"], ["constructor", "new"]] + time_pairs) . to_text
|
||||
|
||||
Test.specify "should parse default time format" <|
|
||||
group_builder.specify "should parse default time format" <|
|
||||
text = create_new_time 12 20 44 . to_text
|
||||
time = Time_Of_Day.parse text
|
||||
time.to_seconds . should_equal 44444
|
||||
|
||||
Test.specify "should parse local time" <|
|
||||
group_builder.specify "should parse local time" <|
|
||||
time = parse_time "10:00:00"
|
||||
time.to_seconds . should_equal 36000
|
||||
|
||||
Test.specify "should throw error when parsing invalid time" <|
|
||||
group_builder.specify "should throw error when parsing invalid time" <|
|
||||
case parse_time "1200" . catch of
|
||||
Time_Error.Error msg _ ->
|
||||
## This error message may or may not contain the suffix:
|
||||
@ -83,11 +83,11 @@ specWith name create_new_time parse_time nanoseconds_loss_in_precision=False =
|
||||
result ->
|
||||
Test.fail ("Unexpected result: " + result.to_text)
|
||||
|
||||
Test.specify "should parse custom format" <|
|
||||
group_builder.specify "should parse custom format" <|
|
||||
time = parse_time "12:30AM" "hh:mma"
|
||||
time.to_seconds . should_equal 1800
|
||||
|
||||
Test.specify "should throw error when parsing custom format" <|
|
||||
group_builder.specify "should throw error when parsing custom format" <|
|
||||
time = parse_time "12:30" "HH:mm:ss"
|
||||
case time.catch of
|
||||
Time_Error.Error msg _ ->
|
||||
@ -95,7 +95,7 @@ specWith name create_new_time parse_time nanoseconds_loss_in_precision=False =
|
||||
result ->
|
||||
Test.fail ("Unexpected result: " + result.to_text)
|
||||
|
||||
Test.specify "should convert to time" <|
|
||||
group_builder.specify "should convert to time" <|
|
||||
datetime = create_new_time 1 0 0 . to_date_time (Date.new 2000 12 21) Time_Zone.utc
|
||||
datetime . year . should_equal 2000
|
||||
datetime . month . should_equal 12
|
||||
@ -108,34 +108,34 @@ specWith name create_new_time parse_time nanoseconds_loss_in_precision=False =
|
||||
datetime . nanosecond . should_equal 0
|
||||
datetime . zone . zone_id . should_equal Time_Zone.utc.zone_id
|
||||
|
||||
Test.specify "date-time conversion should work with interop values" <|
|
||||
group_builder.specify "date-time conversion should work with interop values" <|
|
||||
date = Date.new 2000 12 21
|
||||
time = create_new_time 12 30 45
|
||||
datetime = date.to_date_time time
|
||||
datetime.date . should_equal date
|
||||
datetime.time_of_day . should_equal time
|
||||
|
||||
Test.specify "should add time-based interval" <|
|
||||
group_builder.specify "should add time-based interval" <|
|
||||
time = create_new_time 0 + (Duration.new minutes=1)
|
||||
time . to_seconds . should_equal 60
|
||||
|
||||
Test.specify "should subtract time-based interval" <|
|
||||
group_builder.specify "should subtract time-based interval" <|
|
||||
time = create_new_time 0 - (Duration.new minutes=1)
|
||||
time . to_seconds . should_equal 86340
|
||||
|
||||
Test.specify "should support mixed interval operators" <|
|
||||
group_builder.specify "should support mixed interval operators" <|
|
||||
time = create_new_time 0 + (Duration.new hours=1) - (Duration.new seconds=1)
|
||||
time . to_seconds . should_equal 3599
|
||||
|
||||
Test.specify "should throw error when adding date-based interval" <|
|
||||
group_builder.specify "should throw error when adding date-based interval" <|
|
||||
Test.expect_panic_with matcher=Type_Error <|
|
||||
create_new_time 0 + (Period.new days=1)
|
||||
|
||||
Test.specify "should throw error when subtracting date-based interval" <|
|
||||
group_builder.specify "should throw error when subtracting date-based interval" <|
|
||||
Test.expect_panic_with matcher=Type_Error <|
|
||||
create_new_time 0 - (Period.new days=1)
|
||||
|
||||
Test.specify "should support addition of Time_Period" <|
|
||||
group_builder.specify "should support addition of Time_Period" <|
|
||||
time = create_new_time 0
|
||||
time+Time_Period.Day . should_equal <| create_new_time 0
|
||||
time+Time_Period.Hour . should_equal <| create_new_time 1
|
||||
@ -146,7 +146,7 @@ specWith name create_new_time parse_time nanoseconds_loss_in_precision=False =
|
||||
time+Time_Period.Microsecond . should_equal <| create_new_time 0 0 0 10^3
|
||||
time+Time_Period.Nanosecond . should_equal <| create_new_time 0 0 0 1
|
||||
|
||||
Test.specify "should support subtraction of Time_Period" <|
|
||||
group_builder.specify "should support subtraction of Time_Period" <|
|
||||
time = create_new_time 12
|
||||
time-Time_Period.Day . should_equal <| create_new_time 12
|
||||
time-Time_Period.Hour . should_equal <| create_new_time 11
|
||||
@ -158,12 +158,12 @@ specWith name create_new_time parse_time nanoseconds_loss_in_precision=False =
|
||||
time-Time_Period.Microsecond . should_equal <| create_new_time 11 59 59 (second_in_nanos - 10^3)
|
||||
time-Time_Period.Nanosecond . should_equal <| create_new_time 11 59 59 (second_in_nanos - 1)
|
||||
|
||||
Test.specify "should support mixed addition and subtraction of Date_Period and Time_Period" <|
|
||||
group_builder.specify "should support mixed addition and subtraction of Date_Period and Time_Period" <|
|
||||
time = create_new_time 0
|
||||
time+Time_Period.Hour-Time_Period.Minute . should_equal <| create_new_time 0 59
|
||||
time+Time_Period.Minute+Time_Period.Minute-Time_Period.Minute . should_equal <| create_new_time 0 1
|
||||
|
||||
Test.specify "should be comparable" <|
|
||||
group_builder.specify "should be comparable" <|
|
||||
time_1 = parse_time "12:30:12.7102"
|
||||
time_2 = parse_time "04:00:10.0"
|
||||
(time_1 == time_2) . should_be_false
|
||||
@ -173,7 +173,7 @@ specWith name create_new_time parse_time nanoseconds_loss_in_precision=False =
|
||||
time_1<time_2 . should_be_false
|
||||
time_2<(Date_Time.new 1999 1 1 4 0 10) . should_fail_with Incomparable_Values
|
||||
|
||||
Test.specify "should correctly determine the type of timeofday" <|
|
||||
group_builder.specify "should correctly determine the type of timeofday" <|
|
||||
new_timeofday = create_new_time 15 37 58
|
||||
parsed_timeofday = parse_time "10:00:00"
|
||||
|
||||
@ -181,7 +181,7 @@ specWith name create_new_time parse_time nanoseconds_loss_in_precision=False =
|
||||
Meta.type_of parsed_timeofday . should_equal_type Time_Of_Day
|
||||
|
||||
max_nanos = 999999999
|
||||
Test.specify "should allow to find start/end of a Time_Period containing the current time of day" <|
|
||||
group_builder.specify "should allow to find start/end of a Time_Period containing the current time of day" <|
|
||||
d1 = create_new_time 15 37 58 123456789
|
||||
d1.start_of Time_Period.Day . should_equal (Time_Of_Day.new)
|
||||
d1.end_of Time_Period.Day . should_equal (Time_Of_Day.new 23 59 59 nanosecond=max_nanos)
|
||||
@ -220,7 +220,7 @@ specWith name create_new_time parse_time nanoseconds_loss_in_precision=False =
|
||||
d3.start_of Time_Period.Second . should_equal (Time_Of_Day.new 23 59 59 0)
|
||||
d3.end_of Time_Period.Second . should_equal (Time_Of_Day.new 23 59 59 nanosecond=max_nanos)
|
||||
|
||||
Test.specify "should allow extracting a date_part" <|
|
||||
group_builder.specify "should allow extracting a date_part" <|
|
||||
d1 = create_new_time 15 37 58 123456789
|
||||
d1.date_part Time_Period.Hour . should_equal 15
|
||||
d1.date_part Time_Period.Minute . should_equal 37
|
||||
@ -233,7 +233,7 @@ specWith name create_new_time parse_time nanoseconds_loss_in_precision=False =
|
||||
d1.date_part Time_Period.Day . should_fail_with Illegal_Argument
|
||||
Test.expect_panic_with (d1.date_part Date_Period.Day) Type_Error
|
||||
|
||||
Test.specify "should allow computing a date_diff" <|
|
||||
group_builder.specify "should allow computing a date_diff" <|
|
||||
t1 = create_new_time 10 15 0
|
||||
t2 = create_new_time 12 30 20
|
||||
|
||||
@ -260,7 +260,7 @@ specWith name create_new_time parse_time nanoseconds_loss_in_precision=False =
|
||||
t1.date_diff t2 Time_Period.Nanosecond . should_equal 8120*1000*1000*1000
|
||||
t1.date_diff (Time_Of_Day.new 10 15 12 34 56 78) Time_Period.Nanosecond . should_equal 12034056078
|
||||
|
||||
Test.specify "should allow shifting with date_add" <|
|
||||
group_builder.specify "should allow shifting with date_add" <|
|
||||
t1 = create_new_time 23 45 0
|
||||
|
||||
t1.date_add -1 Time_Period.Hour . should_equal (Time_Of_Day.new 22 45 0)
|
||||
@ -305,7 +305,11 @@ python_parse time_text pattern=Date_Time_Formatter.iso_time =
|
||||
t = Time_Of_Day.parse time_text pattern
|
||||
python_time t.hour t.minute t.second t.nanosecond
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
||||
foreign python python_time_impl hour minute second nanoOfSecond = """
|
||||
import datetime
|
||||
|
@ -1,76 +1,81 @@
|
||||
from Standard.Base import all
|
||||
import Standard.Base.Errors.Time_Error.Time_Error
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
|
||||
polyglot java import java.time.ZoneId
|
||||
polyglot java import java.time.ZoneOffset
|
||||
|
||||
spec =
|
||||
Test.group "Zone" <|
|
||||
Test.specify "should get system zone id" <|
|
||||
add_specs suite_builder =
|
||||
suite_builder.group "Zone" group_builder->
|
||||
group_builder.specify "should get system zone id" <|
|
||||
Time_Zone.system
|
||||
Test.specify "Different time zones should not equal" <|
|
||||
group_builder.specify "Different time zones should not equal" <|
|
||||
(Time_Zone.parse "UTC").should_not_equal (Time_Zone.parse "CET")
|
||||
(Time_Zone.parse "UTC").should_equal (Time_Zone.parse "UTC")
|
||||
Test.specify "should parse UTC zone" <|
|
||||
group_builder.specify "should parse UTC zone" <|
|
||||
zone = "UTC"
|
||||
id = Time_Zone.parse zone
|
||||
id . zone_id . should_equal zone
|
||||
Test.specify "should parse id-based zone" <|
|
||||
group_builder.specify "should parse id-based zone" <|
|
||||
zone = "Europe/Warsaw"
|
||||
id = Time_Zone.parse zone
|
||||
id . zone_id . should_equal zone
|
||||
Test.specify "should parse offset-based zone" <|
|
||||
group_builder.specify "should parse offset-based zone" <|
|
||||
zone = "+01:02:03"
|
||||
id = Time_Zone.parse zone
|
||||
id . zone_id . should_equal zone
|
||||
Test.specify "should get utc zone id" <|
|
||||
group_builder.specify "should get utc zone id" <|
|
||||
id = Time_Zone.utc
|
||||
id . zone_id . should_equal "UTC"
|
||||
Test.specify "should convert to Json" <|
|
||||
group_builder.specify "should convert to Json" <|
|
||||
zone = Time_Zone.new 1 2 3
|
||||
zone.to_json.should_equal <|
|
||||
JS_Object.from_pairs [["type", "Time_Zone"], ["constructor", "parse"], ["id", "+01:02:03"]] . to_text
|
||||
Time_Zone.utc.to_json.should_equal <|
|
||||
JS_Object.from_pairs [["type", "Time_Zone"], ["constructor", "parse"], ["id", "UTC"]] . to_text
|
||||
Test.specify "should throw error when parsing invalid zone id" <|
|
||||
group_builder.specify "should throw error when parsing invalid zone id" <|
|
||||
case Time_Zone.parse "foo" . catch of
|
||||
Time_Error.Error msg _ ->
|
||||
msg . should_equal "Unknown time-zone ID: foo"
|
||||
result ->
|
||||
Test.fail ("Unexpected result: " + result.to_text)
|
||||
Test.specify "should correctly determine the type of zone" <|
|
||||
group_builder.specify "should correctly determine the type of zone" <|
|
||||
zone = Time_Zone.parse "Europe/Warsaw"
|
||||
Meta.type_of zone . should_equal_type Time_Zone
|
||||
Test.group "JavaZoneId" <|
|
||||
Test.specify "should get system zone id" <|
|
||||
|
||||
suite_builder.group "JavaZoneId" group_builder->
|
||||
group_builder.specify "should get system zone id" <|
|
||||
defaultZone = ZoneId.systemDefault
|
||||
Time_Zone.system . should_equal defaultZone
|
||||
Test.specify "should parse UTC zone" <|
|
||||
group_builder.specify "should parse UTC zone" <|
|
||||
zone = "UTC"
|
||||
id = ZoneId.of zone
|
||||
id . should_equal Time_Zone.utc
|
||||
Test.specify "should parse id-based zone" <|
|
||||
group_builder.specify "should parse id-based zone" <|
|
||||
zone = "Europe/Warsaw"
|
||||
id = ZoneId.of zone
|
||||
id . zone_id . should_equal zone
|
||||
Test.specify "should parse offset-based zone" <|
|
||||
group_builder.specify "should parse offset-based zone" <|
|
||||
zone = "+01:02:03"
|
||||
id = ZoneId.of zone
|
||||
id . zone_id . should_equal zone
|
||||
Test.specify "should get utc zone id" <|
|
||||
group_builder.specify "should get utc zone id" <|
|
||||
zone = ZoneId.of "UTC"
|
||||
zone . should_equal Time_Zone.utc
|
||||
Test.specify "should convert to Json" <|
|
||||
group_builder.specify "should convert to Json" <|
|
||||
zone = ZoneOffset.ofHoursMinutesSeconds 1 2 3
|
||||
zone.to_json.should_equal <|
|
||||
JS_Object.from_pairs [["type", "Time_Zone"], ["constructor", "parse"], ["id", "+01:02:03"]] . to_text
|
||||
(ZoneId.of "UTC").to_json.should_equal <|
|
||||
JS_Object.from_pairs [["type", "Time_Zone"], ["constructor", "parse"], ["id", "UTC"]] . to_text
|
||||
Test.specify "should correctly determine the type of zone" <|
|
||||
group_builder.specify "should correctly determine the type of zone" <|
|
||||
zone = ZoneId.systemDefault
|
||||
Meta.type_of zone . should_equal_type Time_Zone
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -1,11 +1,11 @@
|
||||
from Standard.Base import all
|
||||
from Standard.Base.Data.Index_Sub_Range import sort_and_merge_ranges
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
spec = Test.group "Vector Slicing Helpers" <|
|
||||
Test.specify "should be able to sort correctly merge neighboring sequences" <|
|
||||
|
||||
add_specs suite_builder = suite_builder.group "Vector Slicing Helpers" group_builder->
|
||||
group_builder.specify "should be able to sort correctly merge neighboring sequences" <|
|
||||
merge = sort_and_merge_ranges
|
||||
merge [] . should_equal []
|
||||
merge [0.up_to 0] . should_equal []
|
||||
@ -19,4 +19,8 @@ spec = Test.group "Vector Slicing Helpers" <|
|
||||
merge [0.up_to 1, 0.up_to 1] . should_equal [0.up_to 1]
|
||||
merge [0.up_to 1, 1.up_to 2] . should_equal [0.up_to 2]
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -15,8 +15,8 @@ import Standard.Base.Runtime.Ref.Ref
|
||||
import Standard.Base.Runtime.State
|
||||
from Standard.Base.Data.Index_Sub_Range.Index_Sub_Range import While, By_Index, Sample, Every
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
|
||||
polyglot java import java.util.ArrayList
|
||||
|
||||
@ -67,7 +67,7 @@ foreign python generate_py_array = """
|
||||
foreign python generate_nested_py_array = """
|
||||
return [[1, 2, 3], [4, 5]]
|
||||
|
||||
type_spec name alter = Test.group name <|
|
||||
type_spec suite_builder name alter = suite_builder.group name group_builder->
|
||||
map_fun a = if a == 30 then Error.throw (My_Error.Error a) else a+1
|
||||
flat_map_fun a = if (a.at 0) == 30 then Error.throw (My_Error.Error a) else a+[100]
|
||||
map_with_index_fun i a =
|
||||
@ -78,10 +78,10 @@ type_spec name alter = Test.group name <|
|
||||
pending_python_missing = if Polyglot.is_language_installed "python" then Nothing else
|
||||
"Can't run Python tests, Python is not installed."
|
||||
|
||||
Test.specify "text bytes" <|
|
||||
group_builder.specify "text bytes" <|
|
||||
"Lore".utf_8 . should_equal [76, 111, 114, 101]
|
||||
|
||||
Test.specify "should allow vector creation with a programmatic constructor" <|
|
||||
group_builder.specify "should allow vector creation with a programmatic constructor" <|
|
||||
Vector.new 100 (ix -> ix + 1) . fold 0 (+) . should_equal 5050
|
||||
|
||||
r = Ref.new 0
|
||||
@ -90,7 +90,7 @@ type_spec name alter = Test.group name <|
|
||||
const = Vector.new 4 _->next
|
||||
const.should_equal [0, 1, 2, 3]
|
||||
|
||||
Test.specify "should allow vector creation with a constant constructor" <|
|
||||
group_builder.specify "should allow vector creation with a constant constructor" <|
|
||||
Vector.fill 100 1 . fold (0) (+) . should_equal 100
|
||||
|
||||
r = Ref.new 0
|
||||
@ -99,34 +99,34 @@ type_spec name alter = Test.group name <|
|
||||
const = Vector.fill 4 next
|
||||
const.should_equal [0, 0, 0, 0]
|
||||
|
||||
Test.specify "should allow creation from arrays without mutability" <|
|
||||
group_builder.specify "should allow creation from arrays without mutability" <|
|
||||
built_from_js = Vector.from_polyglot_array generate_js_array
|
||||
built_from_js . should_equal (alter [1, 2, 3, 4, 5])
|
||||
|
||||
Test.specify "should allow creation from arrays without mutability in Python" pending=pending_python_missing <|
|
||||
group_builder.specify "should allow creation from arrays without mutability in Python" pending=pending_python_missing <|
|
||||
built_from_py = Vector.from_polyglot_array generate_py_array
|
||||
built_from_py . should_equal (alter [1, 2, 3, 4, Nothing])
|
||||
|
||||
Test.specify "should allow creation from nested arrays from JavaScript" <|
|
||||
group_builder.specify "should allow creation from nested arrays from JavaScript" <|
|
||||
built_from_js = Vector.from_polyglot_array generate_nested_js_array
|
||||
built_from_js . should_equal (alter [[1, 2, 3], [4, 5]])
|
||||
|
||||
Test.specify "should allow creation from nested arrays from Python" pending=pending_python_missing <|
|
||||
group_builder.specify "should allow creation from nested arrays from Python" pending=pending_python_missing <|
|
||||
built_from_py = Vector.from_polyglot_array generate_nested_py_array
|
||||
built_from_py . should_equal (alter [[1, 2, 3], [4, 5]])
|
||||
|
||||
Test.specify "should allow accessing elements" <|
|
||||
group_builder.specify "should allow accessing elements" <|
|
||||
alter [1,2,3] . at 0 . should_equal 1
|
||||
alter [1,2,3] . at 2 . should_equal 3
|
||||
|
||||
Test.specify "should allow to store dataflow errors and raise them on access" <|
|
||||
group_builder.specify "should allow to store dataflow errors and raise them on access" <|
|
||||
vec = [Error.throw (My_Error.Error "foo"), "bar"]
|
||||
vec.at 1 . should_equal "bar"
|
||||
vec.at 0 . should_fail_with My_Error
|
||||
vec.get 1 . should_equal "bar"
|
||||
vec.get 0 . should_fail_with My_Error
|
||||
|
||||
Test.specify "should allow accessing elements with negative indices" <|
|
||||
group_builder.specify "should allow accessing elements with negative indices" <|
|
||||
alter [1,2,3] . at -1 . should_equal 3
|
||||
alter [1,2,3] . at -2 . should_equal 2
|
||||
alter [1,2,3] . at -3 . should_equal 1
|
||||
@ -134,7 +134,7 @@ type_spec name alter = Test.group name <|
|
||||
alter [1,2,3] . get -2 . should_equal 2
|
||||
alter [1,2,3] . get -3 . should_equal 1
|
||||
|
||||
Test.specify "should correctly handle out of bounds access" <|
|
||||
group_builder.specify "should correctly handle out of bounds access" <|
|
||||
alter [1,2,3] . at -4 . should_fail_with Index_Out_Of_Bounds
|
||||
alter [1,2,3] . at 3 . should_fail_with Index_Out_Of_Bounds
|
||||
alter [1,2,3] . get -4 . should_equal Nothing
|
||||
@ -142,23 +142,23 @@ type_spec name alter = Test.group name <|
|
||||
alter [1,2,3] . get -4 "???" . should_equal "???"
|
||||
alter [1,2,3] . get 3 "???" . should_equal "???"
|
||||
|
||||
Test.specify "should have a well-defined length" <|
|
||||
group_builder.specify "should have a well-defined length" <|
|
||||
alter [1,2,3] . length . should_equal 3
|
||||
|
||||
Test.specify "should allow folding an operator over its elements" <|
|
||||
group_builder.specify "should allow folding an operator over its elements" <|
|
||||
alter [1,2,3] . fold 0 (+) . should_equal 6
|
||||
alter [] . fold 123 (+) . should_equal 123
|
||||
|
||||
Test.specify "should allow a running fold operator over its elements" <|
|
||||
group_builder.specify "should allow a running fold operator over its elements" <|
|
||||
alter [1,2,3] . running_fold 0 (+) . should_equal [1, 3, 6]
|
||||
alter [] . running_fold 123 (+) . should_equal []
|
||||
|
||||
Test.specify "should allow to reduce elements if it is non-empty" <|
|
||||
group_builder.specify "should allow to reduce elements if it is non-empty" <|
|
||||
alter [1,2,3] . reduce (+) . should_equal 6
|
||||
alter [] . reduce (+) . should_fail_with Empty_Error
|
||||
alter [] . reduce (+) 0 . should_equal 0
|
||||
|
||||
Test.specify "should check any" <|
|
||||
group_builder.specify "should check any" <|
|
||||
vec = alter [1, 2, 3, 4, 5]
|
||||
vec.any (ix -> ix > 3) . should_be_true
|
||||
vec.any (ix -> ix < 0) . should_be_false
|
||||
@ -169,7 +169,7 @@ type_spec name alter = Test.group name <|
|
||||
|
||||
Test.expect_panic_with matcher=Type_Error (vec.any "invalid argument")
|
||||
|
||||
Test.specify "should check all" <|
|
||||
group_builder.specify "should check all" <|
|
||||
vec = alter [1, 2, 3, 4, 5]
|
||||
vec.all (ix -> ix > 0) . should_be_true
|
||||
vec.all (ix -> ix < 5) . should_be_false
|
||||
@ -179,31 +179,31 @@ type_spec name alter = Test.group name <|
|
||||
|
||||
Test.expect_panic_with matcher=Type_Error (vec.all "invalid argument")
|
||||
|
||||
Test.specify "should check contains" <|
|
||||
group_builder.specify "should check contains" <|
|
||||
vec = alter [1, 2, 3, 4, 5]
|
||||
vec.contains 1 . should_be_true
|
||||
vec.contains 0 . should_be_false
|
||||
|
||||
Test.specify "should check for emptiness" <|
|
||||
group_builder.specify "should check for emptiness" <|
|
||||
non_empty = alter [1]
|
||||
empty = alter []
|
||||
non_empty.is_empty . should_be_false
|
||||
empty.is_empty . should_be_true
|
||||
|
||||
Test.specify "should check for non-emptiness" <|
|
||||
group_builder.specify "should check for non-emptiness" <|
|
||||
non_empty = alter [1]
|
||||
empty = alter []
|
||||
non_empty.not_empty . should_be_true
|
||||
empty.not_empty . should_be_false
|
||||
|
||||
Test.specify "should filter elements by a predicate" <|
|
||||
group_builder.specify "should filter elements by a predicate" <|
|
||||
vec = alter [1, 2, 3, 4, 5]
|
||||
vec.filter (x -> x > 3) . should_equal [4, 5]
|
||||
vec.filter (x -> x == 1) . should_equal [1]
|
||||
vec.filter (x -> x < 0) . should_equal []
|
||||
vec.filter (x -> if x == 2 then Error.throw <| My_Error.Error "foo" else True) . should_fail_with My_Error
|
||||
|
||||
Test.specify "should filter elements by Filter_Condition" <|
|
||||
group_builder.specify "should filter elements by Filter_Condition" <|
|
||||
vec = alter [1, 2, 3, 4, 5]
|
||||
vec.filter (Filter_Condition.Greater than=3) . should_equal [4, 5]
|
||||
vec.filter (Filter_Condition.Less than=3.5) . should_equal [1, 2, 3]
|
||||
@ -300,7 +300,7 @@ type_spec name alter = Test.group name <|
|
||||
|
||||
(alter [2, "a"]).filter (Filter_Condition.Greater 1) . should_fail_with Incomparable_Values
|
||||
|
||||
Test.specify "should allow Nothing when filtering by Filter_Condition" <|
|
||||
group_builder.specify "should allow Nothing when filtering by Filter_Condition" <|
|
||||
(alter [1, 2, Nothing, 3]).filter (Filter_Condition.Greater 2) . should_equal [3]
|
||||
(alter [1, 2, Nothing, 3]).filter (Filter_Condition.Equal_Or_Less 2) . should_equal [1, 2]
|
||||
(alter ["a", 2, Nothing, 2]).filter (Filter_Condition.Equal 2) . should_equal [2, 2]
|
||||
@ -325,7 +325,7 @@ type_spec name alter = Test.group name <|
|
||||
|
||||
(alter ["a", 2, Nothing, 3]).filter (Filter_Condition.Is_In [Nothing, 2]) . should_equal [2, Nothing]
|
||||
|
||||
Test.specify "should have a friendly error when missing Filter_Condition arguments" <|
|
||||
group_builder.specify "should have a friendly error when missing Filter_Condition arguments" <|
|
||||
v = alter [0, 1, 2]
|
||||
|
||||
r1 = v.filter Filter_Condition.Less
|
||||
@ -334,12 +334,12 @@ type_spec name alter = Test.group name <|
|
||||
|
||||
v.filter (Filter_Condition.Between 10) . should_fail_with Illegal_Argument
|
||||
|
||||
Test.specify "should filter elements with indices" <|
|
||||
group_builder.specify "should filter elements with indices" <|
|
||||
(alter [0, 10, 2, 2] . filter_with_index (==)) . should_equal [0, 2]
|
||||
(alter [1, 2, 3, 4] . filter_with_index ix-> _-> ix < 2) . should_equal [1, 2]
|
||||
(alter [1, 2, 3, 4] . filter_with_index ix-> _-> if ix == 1 then Error.throw <| My_Error.Error "foo" else True) . should_fail_with My_Error
|
||||
|
||||
Test.specify "should partition elements" <|
|
||||
group_builder.specify "should partition elements" <|
|
||||
vec = alter [1, 2, 3, 4, 5]
|
||||
vec.partition (x -> x % 2 == 0) . should_equal <| Pair.new [2, 4] [1, 3, 5]
|
||||
(vec . partition x-> if x == 1 then Error.throw <| My_Error.Error "foo" else True) . should_fail_with My_Error
|
||||
@ -348,38 +348,38 @@ type_spec name alter = Test.group name <|
|
||||
|
||||
Test.expect_panic_with matcher=Type_Error (vec.partition "invalid arg")
|
||||
|
||||
Test.specify "should partition elements with indices" <|
|
||||
group_builder.specify "should partition elements with indices" <|
|
||||
alter ["a", "b", "c", "d"] . partition_with_index (ix -> _ -> ix % 2 == 0) == (Pair.new ["a", "c"] ["b", "d"])
|
||||
alter ["a", "b", "c", "d"] . partition_with_index (ix -> _ -> if ix % 2 == 0 then Error.throw <| My_Error.Error "foo" else True) . should_fail_with My_Error
|
||||
|
||||
Test.specify "should allow to join a vector of text elements to form a single text" <|
|
||||
group_builder.specify "should allow to join a vector of text elements to form a single text" <|
|
||||
alter ["a", "b", "c"] . join . should_equal "abc"
|
||||
alter ["a", "b", "c"] . join ";" "{" "}" . should_equal "{a;b;c}"
|
||||
|
||||
Test.specify "should allow mapping an operation, returning a new vector" <|
|
||||
group_builder.specify "should allow mapping an operation, returning a new vector" <|
|
||||
vec = alter [1, 2, 3, 4]
|
||||
mapped = vec.map x-> x * x
|
||||
vec.to_text.should_equal "[1, 2, 3, 4]"
|
||||
mapped.to_text.should_equal "[1, 4, 9, 16]"
|
||||
|
||||
Test.specify "should allow map on_problems=No_Wrap, returning a new vector" <|
|
||||
group_builder.specify "should allow map on_problems=No_Wrap, returning a new vector" <|
|
||||
vec = alter [1, 2, 3, 4]
|
||||
mapped = vec.map on_problems=No_Wrap x-> x * x
|
||||
vec.to_text.should_equal "[1, 2, 3, 4]"
|
||||
mapped.to_text.should_equal "[1, 4, 9, 16]"
|
||||
|
||||
Test.specify "should allow mapping an operation with index" <|
|
||||
group_builder.specify "should allow mapping an operation with index" <|
|
||||
vec = alter [1, 2, 3, 4]
|
||||
mapped = vec.map_with_index i-> x-> x * x * i
|
||||
vec.to_text.should_equal "[1, 2, 3, 4]"
|
||||
mapped.to_text.should_equal "[0, 4, 18, 48]"
|
||||
|
||||
Test.specify "should allow flat_mapping an operation, returning a new vector" <|
|
||||
group_builder.specify "should allow flat_mapping an operation, returning a new vector" <|
|
||||
vec = alter [1, 2, 0, 3]
|
||||
mapped = vec.flat_map n-> Vector.fill n n
|
||||
mapped.should_equal [1, 2, 2, 3, 3, 3]
|
||||
|
||||
Test.specify "should allow to flatten a nested vector" <|
|
||||
group_builder.specify "should allow to flatten a nested vector" <|
|
||||
alter [[1, 2, 3], [4, 10], [], [0], [0]] . flatten . should_equal [1, 2, 3, 4, 10, 0, 0]
|
||||
alter [] . flatten . should_equal []
|
||||
alter [[]] . flatten . should_equal []
|
||||
@ -389,13 +389,13 @@ type_spec name alter = Test.group name <|
|
||||
alter [["a", 2], [], [[[3]]], [T.Value 1 2, 44]] . flatten . should_equal ["a", 2, [[3]], T.Value 1 2, 44]
|
||||
(alter ["polyglot", " ", "array"] . map .utf_8).flatten . should_equal "polyglot array".utf_8
|
||||
|
||||
Test.specify "should allow applying a function to each element" <|
|
||||
group_builder.specify "should allow applying a function to each element" <|
|
||||
vec = alter [1, 2, 3, 4]
|
||||
vec_mut = Vector.new_builder
|
||||
vec.each vec_mut.append
|
||||
vec_mut.to_vector . should_equal vec
|
||||
|
||||
Test.specify "should accept changed elements" <|
|
||||
group_builder.specify "should accept changed elements" <|
|
||||
vec_mut = Vector.new_builder
|
||||
vec_mut.append 1
|
||||
vec_mut.append 1.1
|
||||
@ -407,7 +407,7 @@ type_spec name alter = Test.group name <|
|
||||
vec.at 1 . should_equal 1.1
|
||||
vec.at 2 . should_equal Nothing
|
||||
|
||||
Test.specify "should accept Nothing" <|
|
||||
group_builder.specify "should accept Nothing" <|
|
||||
vec_mut = Vector.new_builder
|
||||
vec_mut.append Nothing
|
||||
|
||||
@ -415,16 +415,16 @@ type_spec name alter = Test.group name <|
|
||||
vec.length . should_equal 1
|
||||
vec.at 0 . should_equal Nothing
|
||||
|
||||
Test.specify "should allow reversing" <|
|
||||
group_builder.specify "should allow reversing" <|
|
||||
alter [1, 2, 3] . reverse . should_equal [3, 2, 1]
|
||||
|
||||
Test.specify "should have a well-defined text conversion" <|
|
||||
group_builder.specify "should have a well-defined text conversion" <|
|
||||
alter [] . to_text . should_equal "[]"
|
||||
alter [1,2,3] . to_text . should_equal "[1, 2, 3]"
|
||||
alter [Nothing] . to_text . should_equal "[Nothing]"
|
||||
alter ['a'] . to_text . should_equal "[a]"
|
||||
|
||||
Test.specify "should allow to generate a short text representation for display" <|
|
||||
group_builder.specify "should allow to generate a short text representation for display" <|
|
||||
alter [] . short_display_text max_entries=3 . should_equal "[]"
|
||||
alter [1] . short_display_text max_entries=3 . should_equal "[1]"
|
||||
alter [1, 2] . short_display_text max_entries=3 . should_equal "[1, 2]"
|
||||
@ -437,17 +437,17 @@ type_spec name alter = Test.group name <|
|
||||
|
||||
alter [] . short_display_text max_entries=0 . should_fail_with Illegal_Argument
|
||||
|
||||
Test.specify "should define equality" <|
|
||||
group_builder.specify "should define equality" <|
|
||||
(alter [1,2,3])==[1,2] . should_be_false
|
||||
(alter [1,2,3])==[1,2,3] . should_be_true
|
||||
(alter [1,2,3])==[3,4,5] . should_be_false
|
||||
|
||||
Test.specify "should define concatenation" <|
|
||||
group_builder.specify "should define concatenation" <|
|
||||
concat = (alter [1, 2, 3]) + (alter [4, 5, 6])
|
||||
concat.should_equal [1, 2, 3, 4, 5, 6]
|
||||
Test.expect_panic_with matcher=Type_Error ((alter [1, 2, 3])+1)
|
||||
|
||||
Test.specify "should allow finding a value" <|
|
||||
group_builder.specify "should allow finding a value" <|
|
||||
input = alter [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
|
||||
input.find (x -> x == 5) . should_equal 5
|
||||
input.find (x -> x%3 == 2) . should_equal 2
|
||||
@ -466,7 +466,7 @@ type_spec name alter = Test.group name <|
|
||||
|
||||
alter ["b", "A", "c"] . find (Filter_Condition.Equal_Ignore_Case "a") . should_equal "A"
|
||||
|
||||
Test.specify "should allow finding the index of a value" <|
|
||||
group_builder.specify "should allow finding the index of a value" <|
|
||||
input = alter [1, 2, 3, 4, 1, 2, 3, 1, 2, 1]
|
||||
input.index_of 4 . should_equal 3
|
||||
input.index_of (>3) . should_equal 3
|
||||
@ -482,7 +482,7 @@ type_spec name alter = Test.group name <|
|
||||
input.index_of (Filter_Condition.Equal_Or_Greater 2) . should_equal 1
|
||||
input.index_of "text" . should_equal Nothing
|
||||
|
||||
Test.specify "should allow finding the last index of a value" <|
|
||||
group_builder.specify "should allow finding the last index of a value" <|
|
||||
input = alter [1, 2, 3, 4, 1, 2, 3, 1, 2, 1]
|
||||
input.last_index_of 2 . should_equal 8
|
||||
input.last_index_of 5 . should_equal Nothing
|
||||
@ -496,12 +496,12 @@ type_spec name alter = Test.group name <|
|
||||
input.last_index_of (Filter_Condition.Equal_Or_Greater 2) . should_equal input.length-2
|
||||
input.last_index_of "text" . should_equal Nothing
|
||||
|
||||
Test.specify "should be convertible to a list" <|
|
||||
group_builder.specify "should be convertible to a list" <|
|
||||
alter [] . to_list . should_equal List.Nil
|
||||
alter ["A"] . to_list . should_equal (List.Cons "A" List.Nil)
|
||||
alter [1, 2, "B", 3] . to_list . should_equal (List.Cons 1 (List.Cons 2 (List.Cons "B" (List.Cons 3 List.Nil))))
|
||||
|
||||
Test.specify "Vector slice should return a Vector" <|
|
||||
group_builder.specify "Vector slice should return a Vector" <|
|
||||
vec = alter [1, 2, 3, 4, 5, 6]
|
||||
vec.slice 0 3 . should_equal [1, 2, 3]
|
||||
vec.slice 1 3 . should_equal [2, 3]
|
||||
@ -511,7 +511,7 @@ type_spec name alter = Test.group name <|
|
||||
Meta.get_qualified_type_name (vec.slice 1 1) . should_equal (Meta.meta Vector . qualified_name)
|
||||
Meta.get_simple_type_name (vec.slice 1 1) . should_equal "Vector"
|
||||
|
||||
Test.specify "should define take and drop family of operations" <|
|
||||
group_builder.specify "should define take and drop family of operations" <|
|
||||
vec = alter [1, 2, 3, 4, 5, 6]
|
||||
first_four = alter [1, 2, 3, 4]
|
||||
last_four = alter [3, 4, 5, 6]
|
||||
@ -633,17 +633,17 @@ type_spec name alter = Test.group name <|
|
||||
alter ["a", "a", "a"] . drop (Sample 1) . should_equal ["a", "a"]
|
||||
alter ["a", "a", "a"] . drop (Sample 100) . should_equal []
|
||||
|
||||
Test.group "take/drop Sample non-determinism" <|
|
||||
suite_builder.group "take/drop Sample non-determinism" group_builder->
|
||||
v = 0.up_to 20 . to_vector
|
||||
|
||||
Test.specify "sampling should be deterministic when a seed is supplied" <|
|
||||
group_builder.specify "sampling should be deterministic when a seed is supplied" <|
|
||||
v.take (Sample 3 seed=4200000) . should_equal (v.take (Sample 3 seed=4200000))
|
||||
|
||||
Test.specify "sampling should be non-deterministic when a seed is not supplied" <|
|
||||
group_builder.specify "sampling should be non-deterministic when a seed is not supplied" <|
|
||||
0.up_to 3 . map _->
|
||||
v.take (Sample 3) . should_not_equal (v.take (Sample 3))
|
||||
|
||||
Test.specify "take/drop should gracefully handle missing constructor arguments" <|
|
||||
group_builder.specify "take/drop should gracefully handle missing constructor arguments" <|
|
||||
[].take "FOO" . should_fail_with Type_Error
|
||||
[].drop "FOO" . should_fail_with Type_Error
|
||||
|
||||
@ -659,7 +659,7 @@ type_spec name alter = Test.group name <|
|
||||
r3.should_fail_with Illegal_Argument
|
||||
r3.catch.to_display_text . should_contain "Got a Function instead of a range, is a constructor argument missing?"
|
||||
|
||||
Test.specify "should allow getting the last element of the vector" <|
|
||||
group_builder.specify "should allow getting the last element of the vector" <|
|
||||
non_empty_vec = alter [1, 2, 3, 4, 5]
|
||||
singleton_vec = alter [1]
|
||||
empty_vec = alter []
|
||||
@ -667,7 +667,7 @@ type_spec name alter = Test.group name <|
|
||||
singleton_vec.last . should_equal 1
|
||||
empty_vec.last . should_fail_with Index_Out_Of_Bounds
|
||||
|
||||
Test.specify "should allow getting the first element" <|
|
||||
group_builder.specify "should allow getting the first element" <|
|
||||
non_empty_vec = alter [1, 2, 3, 4, 5]
|
||||
singleton_vec = alter [1]
|
||||
empty_vec = alter []
|
||||
@ -675,7 +675,7 @@ type_spec name alter = Test.group name <|
|
||||
singleton_vec.first . should_equal 1
|
||||
empty_vec.first . should_fail_with Index_Out_Of_Bounds
|
||||
|
||||
Test.specify "should allow getting the second element" <|
|
||||
group_builder.specify "should allow getting the second element" <|
|
||||
non_empty_vec = alter [1, 2, 3, 4, 5]
|
||||
singleton_vec = alter [1]
|
||||
empty_vec = alter []
|
||||
@ -683,7 +683,7 @@ type_spec name alter = Test.group name <|
|
||||
singleton_vec.second . should_fail_with Index_Out_Of_Bounds
|
||||
empty_vec.second . should_fail_with Index_Out_Of_Bounds
|
||||
|
||||
Test.specify "should be able to be sorted" <|
|
||||
group_builder.specify "should be able to be sorted" <|
|
||||
empty_vec = alter []
|
||||
short_vec = alter [2, 4, 38, -1, -1000, 3671, -32]
|
||||
short_expected = alter [-1000, -32, -1, 2, 4, 38, 3671]
|
||||
@ -698,53 +698,53 @@ type_spec name alter = Test.group name <|
|
||||
alter ["aa", 2] . sort . should_equal [2, "aa"]
|
||||
alter [2, Date.new 1999] . sort . should_equal [2, Date.new 1999]
|
||||
|
||||
Test.specify "should leave the original vector unchanged" <|
|
||||
group_builder.specify "should leave the original vector unchanged" <|
|
||||
non_empty_vec = alter [2, 4, 2, 3, 2, 3]
|
||||
sorted = non_empty_vec.sort
|
||||
non_empty_vec . should_equal [2, 4, 2, 3, 2, 3]
|
||||
sorted . should_equal [2, 2, 2, 3, 3, 4]
|
||||
|
||||
Test.specify "should have a stable sort" <|
|
||||
group_builder.specify "should have a stable sort" <|
|
||||
small_vec = alter [T.Value 1 8, T.Value 1 3, T.Value -20 0, T.Value -1 1, T.Value -1 10, T.Value 4 0]
|
||||
small_expected = [T.Value -20 0, T.Value -1 1, T.Value -1 10, T.Value 1 8, T.Value 1 3, T.Value 4 0]
|
||||
small_vec.sort . should_equal small_expected
|
||||
|
||||
Test.specify "should fail the sort if Report_Error problem_behavior specified" <|
|
||||
group_builder.specify "should fail the sort if Report_Error problem_behavior specified" <|
|
||||
alter [T.Value 1 8, Nothing] . sort on_incomparable=Problem_Behavior.Report_Error . should_fail_with Incomparable_Values
|
||||
alter [Nothing, Number.nan] . sort on_incomparable=Problem_Behavior.Report_Error . should_fail_with Incomparable_Values
|
||||
|
||||
Test.specify "should be able to use a custom element projection" <|
|
||||
group_builder.specify "should be able to use a custom element projection" <|
|
||||
small_vec = alter [T.Value 1 8, T.Value 1 3, T.Value -20 0, T.Value -1 1, T.Value -1 10, T.Value 4 0]
|
||||
small_expected = [T.Value -20 0, T.Value 4 0, T.Value -1 1, T.Value 1 3, T.Value 1 8, T.Value -1 10]
|
||||
small_vec.sort (on = _.b) . should_equal small_expected
|
||||
small_vec.sort (on = .b) . should_equal small_expected
|
||||
|
||||
Test.specify "should be able to use a custom compare function" <|
|
||||
group_builder.specify "should be able to use a custom compare function" <|
|
||||
small_vec = alter [2, 7, -3, 383, -392, 28, -90]
|
||||
small_expected = [383, 28, 7, 2, -3, -90, -392]
|
||||
small_vec.sort (by = l -> r -> Ordering.compare r l) . should_equal small_expected
|
||||
|
||||
Test.specify "should allow tail-recursive comparators in sort" <|
|
||||
group_builder.specify "should allow tail-recursive comparators in sort" <|
|
||||
v = alter [Foo.Value [4,2,2], Foo.Value [1,2,3], Foo.Value [1,2,4]]
|
||||
r = alter [Foo.Value [1,2,3], Foo.Value [1,2,4], Foo.Value [4,2,2]]
|
||||
v.sort by=compare_tco . should_equal r
|
||||
|
||||
Test.specify "should be able to use a custom compare function and projection" <|
|
||||
group_builder.specify "should be able to use a custom compare function and projection" <|
|
||||
small_vec = alter [T.Value 1 8, T.Value 1 3, T.Value -20 0, T.Value -1 1, T.Value -1 10, T.Value 4 0]
|
||||
small_expected = alter [T.Value -1 10, T.Value 1 8, T.Value 1 3, T.Value -1 1, T.Value -20 0, T.Value 4 0]
|
||||
small_vec.sort (on = _.b) (by = l -> r -> Ordering.compare r l) . should_equal small_expected
|
||||
|
||||
Test.specify "should be able to sort in descending order" <|
|
||||
group_builder.specify "should be able to sort in descending order" <|
|
||||
small_vec = alter [2, 7, -3, 383, -392, 28, -90]
|
||||
small_expected = alter [383, 28, 7, 2, -3, -90, -392]
|
||||
small_vec.sort Sort_Direction.Descending . should_equal small_expected
|
||||
|
||||
Test.specify "should be stable in descending order" <|
|
||||
group_builder.specify "should be stable in descending order" <|
|
||||
small_vec = alter [T.Value 1 8, T.Value 1 3, T.Value -20 0, T.Value -1 1, T.Value -1 10, T.Value 4 0]
|
||||
small_expected = alter [T.Value 4 0, T.Value 1 3, T.Value 1 8, T.Value -1 10, T.Value -1 1, T.Value -20 0]
|
||||
small_vec.sort Sort_Direction.Descending . should_equal small_expected
|
||||
|
||||
Test.specify "should correctly propagate state through each" <|
|
||||
group_builder.specify "should correctly propagate state through each" <|
|
||||
v = State.run Number 77 <|
|
||||
b = Vector.new_builder
|
||||
(alter ["A", "B"]).each x->
|
||||
@ -754,7 +754,7 @@ type_spec name alter = Test.group name <|
|
||||
|
||||
v.should_equal ['A', '77', 'B', '77']
|
||||
|
||||
Test.specify "should correctly propagate state through map" <|
|
||||
group_builder.specify "should correctly propagate state through map" <|
|
||||
v = State.run Number 55 <|
|
||||
b = Vector.new_builder
|
||||
(alter ["X", "Y"]).map x->
|
||||
@ -764,41 +764,41 @@ type_spec name alter = Test.group name <|
|
||||
|
||||
v.should_equal ['X', '55', 'Y', '55']
|
||||
|
||||
Test.specify "should pad elements" <|
|
||||
group_builder.specify "should pad elements" <|
|
||||
alter [] . pad 3 0 . should_equal [0, 0, 0]
|
||||
alter [1, 2, 3] . pad 0 0 . should_equal [1, 2, 3]
|
||||
alter [1, 2, 3] . pad 3 0 . should_equal [1, 2, 3]
|
||||
alter [1] . pad 3 0 . should_equal [1, 0, 0]
|
||||
|
||||
Test.specify "should zip elements" <|
|
||||
group_builder.specify "should zip elements" <|
|
||||
alter [1, 2, 3] . zip [] (+) . should_equal []
|
||||
alter [1, 2, 3] . zip [4] (+) . should_equal [5]
|
||||
alter [1, 2, 3] . zip [4, 5, 6] (+) . should_equal [5, 7, 9]
|
||||
alter [1, 2, 3] . zip [4, 5, 6, 7] (+) . should_equal [5, 7, 9]
|
||||
alter [] . zip [4, 5, 6] (+) . should_equal []
|
||||
|
||||
Test.specify "should zip elements with zip on_problems=No_Wrap" <|
|
||||
group_builder.specify "should zip elements with zip on_problems=No_Wrap" <|
|
||||
alter [1, 2, 3] . zip on_problems=No_Wrap [] (+) . should_equal []
|
||||
alter [1, 2, 3] . zip on_problems=No_Wrap [4] (+) . should_equal [5]
|
||||
alter [1, 2, 3] . zip on_problems=No_Wrap [4, 5, 6] (+) . should_equal [5, 7, 9]
|
||||
alter [1, 2, 3] . zip on_problems=No_Wrap [4, 5, 6, 7] (+) . should_equal [5, 7, 9]
|
||||
alter [] . zip on_problems=No_Wrap [4, 5, 6] (+) . should_equal []
|
||||
|
||||
Test.specify "should flat_map elements" <|
|
||||
group_builder.specify "should flat_map elements" <|
|
||||
alter [1, 2, 3] . flat_map (_ -> []) . should_equal []
|
||||
alter [1, 2, 3] . flat_map (_ -> [0, 1]) . should_equal [0, 1, 0, 1, 0, 1]
|
||||
alter [1, 2, 3] . flat_map (_ -> [0, [1]]) . should_equal [0, [1], 0, [1], 0, [1]]
|
||||
alter [0, 1, 0] . flat_map (i -> if i == 1 then [1, 1] else [i]) . should_equal [0, 1, 1, 0]
|
||||
alter [0, 0, 0] . flat_map (i -> [i]) . should_equal [0, 0, 0]
|
||||
|
||||
Test.specify "should flat_map elements with flat_map on_problems=No_Wrap" <|
|
||||
group_builder.specify "should flat_map elements with flat_map on_problems=No_Wrap" <|
|
||||
alter [1, 2, 3] . flat_map on_problems=No_Wrap (_ -> []) . should_equal []
|
||||
alter [1, 2, 3] . flat_map on_problems=No_Wrap (_ -> [0, 1]) . should_equal [0, 1, 0, 1, 0, 1]
|
||||
alter [1, 2, 3] . flat_map on_problems=No_Wrap (_ -> [0, [1]]) . should_equal [0, [1], 0, [1], 0, [1]]
|
||||
alter [0, 1, 0] . flat_map on_problems=No_Wrap (i -> if i == 1 then [1, 1] else [i]) . should_equal [0, 1, 1, 0]
|
||||
alter [0, 0, 0] . flat_map on_problems=No_Wrap (i -> [i]) . should_equal [0, 0, 0]
|
||||
|
||||
Test.specify "should allow inserting elements" <|
|
||||
group_builder.specify "should allow inserting elements" <|
|
||||
alter [2, 3] . insert . should_equal [2, 3, Nothing]
|
||||
alter [2, 3] . insert item=1 . should_equal [2, 3, 1]
|
||||
alter [2, 3] . insert 100 item=1 . should_fail_with Index_Out_Of_Bounds
|
||||
@ -808,21 +808,21 @@ type_spec name alter = Test.group name <|
|
||||
alter [1, 2, 3] . insert -1 item=4 . should_equal [1, 2, 4, 3]
|
||||
alter [1, 2, 3] . insert -20 item=4 . should_fail_with Index_Out_Of_Bounds
|
||||
|
||||
Test.specify "should allow removing elements" <|
|
||||
group_builder.specify "should allow removing elements" <|
|
||||
alter [1, 2, 3] . remove . should_equal [1, 2]
|
||||
alter [1, 2, 3] . remove 100 . should_fail_with Index_Out_Of_Bounds
|
||||
alter [1, 2, 3] . remove -123 . should_fail_with Index_Out_Of_Bounds
|
||||
alter [1, 2, 3] . remove 0 . should_equal [2, 3]
|
||||
alter [1, 2, 3] . remove -1 . should_equal [1, 2]
|
||||
|
||||
Test.specify "should return a vector containing only unique elements" <|
|
||||
group_builder.specify "should return a vector containing only unique elements" <|
|
||||
alter [1, 3, 1, 2, 2, 1] . distinct . should_equal [1, 3, 2]
|
||||
alter ["a", "a", "a"] . distinct . should_equal ["a"]
|
||||
alter ['ś', 's', 's\u0301'] . distinct . should_equal ['ś', 's']
|
||||
alter [1, 1.0, 2, 2.0] . distinct . should_equal [1, 2]
|
||||
alter [] . distinct . should_equal []
|
||||
|
||||
Test.specify "should be able to handle distinct on different primitive values" <|
|
||||
group_builder.specify "should be able to handle distinct on different primitive values" <|
|
||||
alter [1, "a"] . distinct . should_equal [1, "a"]
|
||||
alter ["a", 1] . distinct . should_equal ["a", 1]
|
||||
alter [Nothing, Nothing] . distinct . should_equal [Nothing]
|
||||
@ -837,27 +837,27 @@ type_spec name alter = Test.group name <|
|
||||
my_nan = My_Nan.Value 42
|
||||
alter [my_nan, Number.nan, my_nan] . distinct . to_text . should_equal "[(My_Nan.Value 42), NaN]"
|
||||
|
||||
Test.specify "should correctly handle distinct with types that have custom comparators" <|
|
||||
group_builder.specify "should correctly handle distinct with types that have custom comparators" <|
|
||||
alter [T.Value 1 2, T.Value 3 3, T.Value 1 2] . distinct . should_equal [T.Value 1 2, T.Value 3 3]
|
||||
alter [T.Value 1 2, T.Value 3 3, T.Value 1 2, Nothing] . distinct . should_equal [T.Value 1 2, T.Value 3 3, Nothing]
|
||||
alter [Nothing, T.Value 1 2, T.Value 3 3, T.Value 1 2, Nothing] . distinct . should_equal [Nothing, T.Value 1 2, T.Value 3 3]
|
||||
alter [T.Value 1 2, Date.new year=1973] . distinct . should_equal [T.Value 1 2, Date.new year=1973]
|
||||
|
||||
Test.specify "should return a vector containing only unique elements up to some criteria" <|
|
||||
group_builder.specify "should return a vector containing only unique elements up to some criteria" <|
|
||||
alter [Pair.new 1 "a", Pair.new 2 "b", Pair.new 1 "c"] . distinct (on = _.first) . should_equal [Pair.new 1 "a", Pair.new 2 "b"]
|
||||
|
||||
Test.specify "should be able to sort a heterogenous vector" <|
|
||||
group_builder.specify "should be able to sort a heterogenous vector" <|
|
||||
arr = alter [ 1, 1.3, "hi", Date.today, Date_Time.now, [ 0 ] ]
|
||||
(arr.sort on=(.to_text) . map .to_text) . should_equal (arr.map .to_text . sort)
|
||||
(arr.sort on=(_.to_text) . map .to_text) . should_equal (arr.map .to_text . sort)
|
||||
(arr.sort on=(x-> x.to_text) . map .to_text) . should_equal (arr.map .to_text . sort)
|
||||
|
||||
Test.specify "should be able to sort a polyglot vector" <|
|
||||
group_builder.specify "should be able to sort a polyglot vector" <|
|
||||
input = "beta".utf_8
|
||||
expected = "abet".utf_8
|
||||
input.sort . should_equal expected
|
||||
|
||||
Test.specify "should report only a limited number of warnings for incomparable values" <|
|
||||
group_builder.specify "should report only a limited number of warnings for incomparable values" <|
|
||||
gen x = case (x % 10) of
|
||||
0 -> Nothing
|
||||
1 -> "foo"+x.to_text
|
||||
@ -874,86 +874,87 @@ type_spec name alter = Test.group name <|
|
||||
Warning.get_all sorted . length . should_equal 10
|
||||
Warning.limit_reached sorted . should_equal True
|
||||
|
||||
Test.specify "an error thrown inside map should be caught as a Map_Error" <|
|
||||
group_builder.specify "an error thrown inside map should be caught as a Map_Error" <|
|
||||
alter [10, 20, 30, 40] . map map_fun . should_fail_with (Map_Error.Error 2 (My_Error.Error 30)) unwrap_errors=False
|
||||
|
||||
Test.specify "an error thrown inside map on_problems=No_Wrap should be caught as a My_Error" <|
|
||||
group_builder.specify "an error thrown inside map on_problems=No_Wrap should be caught as a My_Error" <|
|
||||
alter [10, 20, 30, 40] . map on_problems=No_Wrap map_fun . should_fail_with My_Error
|
||||
|
||||
Test.specify "an error thrown inside map_with_index on_problems=No_Wrap should be caught as a My_Error" <|
|
||||
group_builder.specify "an error thrown inside map_with_index on_problems=No_Wrap should be caught as a My_Error" <|
|
||||
map_with_index_fun _ a = if a == 30 then Error.throw (My_Error.Error a) else a+1
|
||||
alter [10, 20, 30, 40] . map_with_index on_problems=No_Wrap map_with_index_fun . should_fail_with My_Error
|
||||
|
||||
Test.specify "an error thrown inside map and caught (without error parameter) should be caught as a Map_Error" <|
|
||||
group_builder.specify "an error thrown inside map and caught (without error parameter) should be caught as a Map_Error" <|
|
||||
alter [10, 20, 30, 40] . map map_fun . catch . should_equal (Map_Error.Error 2 (My_Error.Error 30))
|
||||
|
||||
Test.specify "an error thrown inside map and caught (with error parameter My_Error) should be caught as a My_Error" <|
|
||||
group_builder.specify "an error thrown inside map and caught (with error parameter My_Error) should be caught as a My_Error" <|
|
||||
alter [10, 20, 30, 40] . map map_fun . catch My_Error . should_equal (My_Error.Error 30)
|
||||
|
||||
Test.specify "an error thrown inside map and caught (with error parameter Map_Error) should be caught as a Map_Error" <|
|
||||
group_builder.specify "an error thrown inside map and caught (with error parameter Map_Error) should be caught as a Map_Error" <|
|
||||
alter [10, 20, 30, 40] . map map_fun . catch Map_Error . should_equal (Map_Error.Error 2 (My_Error.Error 30))
|
||||
|
||||
Test.specify "an error thrown inside flat_map should be caught as a Map_Error" <|
|
||||
group_builder.specify "an error thrown inside flat_map should be caught as a Map_Error" <|
|
||||
vec = alter ([[10], [20], [30], [40]].map alter)
|
||||
vec.flat_map flat_map_fun . should_fail_with (Map_Error.Error 2 (My_Error.Error [30])) unwrap_errors=False
|
||||
|
||||
Test.specify "an error thrown inside map_with_index should be caught as a Map_Error" <|
|
||||
group_builder.specify "an error thrown inside map_with_index should be caught as a Map_Error" <|
|
||||
alter [10, 20, 30, 40] . map_with_index map_with_index_fun . should_fail_with (Map_Error.Error 2 (My_Error.Error 30)) unwrap_errors=False
|
||||
|
||||
Test.specify "an error thrown inside zip should be caught as a Map_Error" <|
|
||||
group_builder.specify "an error thrown inside zip should be caught as a Map_Error" <|
|
||||
vec = alter [10, 20, 30, 40]
|
||||
vec.zip [10, 20, 30, 40] zip_fun . should_fail_with (Map_Error.Error 2 (My_Error.Error 30)) unwrap_errors=False
|
||||
|
||||
Test.specify "an error thrown inside nested maps should be wrapped multiple times" <|
|
||||
group_builder.specify "an error thrown inside nested maps should be wrapped multiple times" <|
|
||||
nested_vector = Vector.fill 5 (alter [10, 20, 30, 40])
|
||||
nested_vector.map (_.map map_fun) . should_fail_with (Map_Error.Error 0 (Map_Error.Error 2 (My_Error.Error 30))) unwrap_errors=False
|
||||
|
||||
Test.specify "nested Map_Error indicies should be in the correct order" <|
|
||||
group_builder.specify "nested Map_Error indicies should be in the correct order" <|
|
||||
nested_vector = [[10, 20, 30, 40], [30, 10, 20, 30]]
|
||||
expected_warnings = [(Map_Error.Error 1 (Map_Error.Error 3 (My_Error.Error 30))), (Map_Error.Error 1 (Map_Error.Error 0 (My_Error.Error 30))), (Map_Error.Error 0 (Map_Error.Error 2 (My_Error.Error 30)))]
|
||||
Warning.get_all wrap_errors=True (nested_vector.map (_.map map_fun on_problems=Problem_Behavior.Report_Warning) on_problems=Problem_Behavior.Report_Warning) . map .value . should_equal expected_warnings
|
||||
|
||||
Test.specify "a Nothing thrown inside map should be caught as Nothing" <|
|
||||
group_builder.specify "a Nothing thrown inside map should be caught as Nothing" <|
|
||||
map_fun a = if a == 30 then Error.throw Nothing else a+1
|
||||
alter [10, 20, 30, 40] . map map_fun . should_fail_with (Map_Error.Error 2 Nothing) unwrap_errors=False
|
||||
|
||||
Test.specify "an error thrown inside map with on_problems=Problem_Behavior.Ignore should be ignored" <|
|
||||
group_builder.specify "an error thrown inside map with on_problems=Problem_Behavior.Ignore should be ignored" <|
|
||||
results = alter [10, 20, 30, 40] . map map_fun on_problems=Problem_Behavior.Ignore
|
||||
results . should_equal [11, 21, Nothing, 41]
|
||||
Warning.get_all results wrap_errors=True . should_equal []
|
||||
|
||||
Test.specify "an error thrown inside map with on_problems=Problem_Behavior.Report_Warning should be attached as a warning" <|
|
||||
group_builder.specify "an error thrown inside map with on_problems=Problem_Behavior.Report_Warning should be attached as a warning" <|
|
||||
result = alter [10, 20, 30, 40] . map map_fun on_problems=Problem_Behavior.Report_Warning
|
||||
result . should_equal [11, 21, Nothing, 41]
|
||||
Warning.get_all result wrap_errors=True . map .value . should_equal [Map_Error.Error 2 (My_Error.Error 30)]
|
||||
|
||||
Test.specify "multiple errors thrown inside map with on_problems=Problem_Behavior.Report_Warning should be attached as warnings" <|
|
||||
group_builder.specify "multiple errors thrown inside map with on_problems=Problem_Behavior.Report_Warning should be attached as warnings" <|
|
||||
result = alter [10, 30, 20, 30, 40, 30] . map map_fun on_problems=Problem_Behavior.Report_Warning
|
||||
result . should_equal [11, Nothing, 21, Nothing, 41, Nothing]
|
||||
Warning.get_all result wrap_errors=True . map .value . should_equal ([5, 3, 1].map i-> Map_Error.Error i (My_Error.Error 30))
|
||||
|
||||
Test.specify "many errors thrown inside map with on_problems=Problem_Behavior.Report_Warning should be attached as warnings and an Additional_Warnings" <|
|
||||
group_builder.specify "many errors thrown inside map with on_problems=Problem_Behavior.Report_Warning should be attached as warnings and an Additional_Warnings" <|
|
||||
result = alter ([10, 20] + (Vector.fill 12 30) + [40]) . map map_fun on_problems=Problem_Behavior.Report_Warning
|
||||
result . should_equal ([11, 21] + (Vector.fill 12 Nothing) + [41])
|
||||
expected_warnings_regular = 11.down_to 1 . map i-> Map_Error.Error i (My_Error.Error 30)
|
||||
expected_warnings = expected_warnings_regular + [(Additional_Warnings.Error 2)]
|
||||
Warning.get_all result wrap_errors=True . map .value . should_equal expected_warnings
|
||||
|
||||
Test.specify "map on_problems=No_Wrap does not do error wrapping" <|
|
||||
group_builder.specify "map on_problems=No_Wrap does not do error wrapping" <|
|
||||
alter [10, 20, 30, 40] . map on_problems=No_Wrap map_fun . catch . should_equal (My_Error.Error 30)
|
||||
|
||||
Test.specify "zip on_problems=No_Wrap does not do error wrapping" <|
|
||||
group_builder.specify "zip on_problems=No_Wrap does not do error wrapping" <|
|
||||
zip_fun a _ = if a == 30 then Error.throw (My_Error.Error a) else a+1
|
||||
arr = alter [10, 20, 30, 40]
|
||||
arr . zip on_problems=No_Wrap arr zip_fun . catch . should_equal (My_Error.Error 30)
|
||||
|
||||
Test.specify "flat_map on_problems=No_Wrap does not do error wrapping" <|
|
||||
group_builder.specify "flat_map on_problems=No_Wrap does not do error wrapping" <|
|
||||
vec = alter [1, 2, 0, 3]
|
||||
vec.flat_map on_problems=No_Wrap (n-> Error.throw (My_Error.Error n)) . catch . should_equal (My_Error.Error 1)
|
||||
|
||||
spec =
|
||||
Test.group "Vector builder" <|
|
||||
Test.specify "empty builder.get" <|
|
||||
|
||||
add_specs suite_builder =
|
||||
suite_builder.group "Vector builder" group_builder->
|
||||
group_builder.specify "empty builder.get" <|
|
||||
b = Vector.new_builder
|
||||
e = Panic.catch Index_Out_Of_Bounds (b.at 0) err->
|
||||
err.payload
|
||||
@ -961,7 +962,7 @@ spec =
|
||||
e.index . should_equal 0
|
||||
e.length . should_equal 0
|
||||
|
||||
Test.specify "one number builder.get" <|
|
||||
group_builder.specify "one number builder.get" <|
|
||||
b = Vector.new_builder
|
||||
b.append 32
|
||||
e = Panic.catch Index_Out_Of_Bounds (b.at 1) err->
|
||||
@ -971,67 +972,67 @@ spec =
|
||||
e.length . should_equal 1
|
||||
b.at 0 . should_equal 32
|
||||
|
||||
Test.specify "Vector.collect lazy" <|
|
||||
group_builder.specify "Vector.collect lazy" <|
|
||||
seq = Fib.sequence
|
||||
seq.take 5 . should_equal [1, 1, 2, 3, 5]
|
||||
|
||||
Test.specify "Vector.collect empty list" <|
|
||||
group_builder.specify "Vector.collect empty list" <|
|
||||
l = List.Nil
|
||||
v = Vector.collect l .x .xs limit=30 stop_at=(_==List.Nil)
|
||||
v . should_equal []
|
||||
|
||||
Test.specify "Vector.collect finite" <|
|
||||
group_builder.specify "Vector.collect finite" <|
|
||||
l = List.Cons 1 <| List.Cons 2 <| List.Cons 3 <| List.Nil
|
||||
v = Vector.collect l .x .xs limit=30 stop_at=(_==List.Nil)
|
||||
v . should_equal [1, 2, 3]
|
||||
|
||||
Test.group "Vector/Array equality" <|
|
||||
suite_builder.group "Vector/Array equality" group_builder->
|
||||
v1 = [1, 2, 3]
|
||||
a1 = v1.to_array
|
||||
|
||||
v2 = [1, 2, 3]
|
||||
a2 = v2.to_array
|
||||
|
||||
Test.specify "vector1 should be equal to vector1" <|
|
||||
group_builder.specify "vector1 should be equal to vector1" <|
|
||||
v1.should_equal v1
|
||||
|
||||
Test.specify "vector1 should be equal to vector2" <|
|
||||
group_builder.specify "vector1 should be equal to vector2" <|
|
||||
v1.should_equal v2
|
||||
|
||||
Test.specify "vector1 should be equal to array1" <|
|
||||
group_builder.specify "vector1 should be equal to array1" <|
|
||||
v1.should_equal a1
|
||||
|
||||
Test.specify "vector1 should be equal to array2" <|
|
||||
group_builder.specify "vector1 should be equal to array2" <|
|
||||
v1.should_equal a2
|
||||
|
||||
Test.specify "array1 should be equal to vector1" <|
|
||||
group_builder.specify "array1 should be equal to vector1" <|
|
||||
a1.should_equal v1
|
||||
|
||||
Test.specify "array1 should be equal to vector2" <|
|
||||
group_builder.specify "array1 should be equal to vector2" <|
|
||||
a1.should_equal v2
|
||||
|
||||
Test.specify "array1 should be equal to array1" <|
|
||||
group_builder.specify "array1 should be equal to array1" <|
|
||||
a1.should_equal a1
|
||||
|
||||
Test.specify "array1 should be equal to array2" <|
|
||||
group_builder.specify "array1 should be equal to array2" <|
|
||||
a1.should_equal a2
|
||||
|
||||
Test.specify "should have a well-defined debug-printing method" <|
|
||||
group_builder.specify "should have a well-defined debug-printing method" <|
|
||||
[].pretty.should_equal "[]"
|
||||
[1,2,3].pretty.should_equal "[1, 2, 3]"
|
||||
[Nothing].pretty.should_equal "[Nothing]"
|
||||
[True, False, 'a'].pretty . should_equal "[True, False, 'a']"
|
||||
[Foo.Value True].pretty . should_equal "[(Foo.Value True)]"
|
||||
|
||||
type_spec "Use Vector as vectors" identity
|
||||
type_spec "Use Array as vectors" (v -> v.to_array)
|
||||
type_spec "Use Java ArrayList as vectors" v->
|
||||
type_spec suite_builder "Use Vector as vectors" identity
|
||||
type_spec suite_builder "Use Array as vectors" (v -> v.to_array)
|
||||
type_spec suite_builder "Use Java ArrayList as vectors" v->
|
||||
arr = ArrayList.new
|
||||
v.each (x -> arr.add x)
|
||||
arr
|
||||
type_spec "Use Array_Proxy as vectors" v->
|
||||
type_spec suite_builder "Use Array_Proxy as vectors" v->
|
||||
Array_Proxy.new v.length (ix -> v.at ix)
|
||||
type_spec "Use a slice of an array as vectors" v->
|
||||
type_spec suite_builder "Use a slice of an array as vectors" v->
|
||||
v2 = v+[Nothing]
|
||||
sliced_vector = v2.slice 0 v.length
|
||||
sliced_array = sliced_vector.to_array
|
||||
@ -1049,4 +1050,8 @@ type Fib
|
||||
start = Fib.Number 1 <| Fib.Number 1 (sum_two start)
|
||||
start
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -1,131 +1,144 @@
|
||||
from Standard.Base import all
|
||||
import Standard.Base.Errors.Common.Syntax_Error
|
||||
import Standard.Base.Errors.File_Error.File_Error
|
||||
from Standard.Base.Runtime import assert
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
spec =
|
||||
test_file = enso_project.data / "xml" / "sample.xml"
|
||||
document = XML_Document.from_file test_file
|
||||
root = document . root_element
|
||||
type Test_Data
|
||||
Value ~data
|
||||
|
||||
Test.group "Read XML" <|
|
||||
Test.specify "Can read from a file" <|
|
||||
root.name . should_equal "class"
|
||||
test_file self = self.data.at 0
|
||||
document self = self.data.at 1
|
||||
root self = self.data.at 2
|
||||
|
||||
Test.specify "Error if file does not exist" <|
|
||||
setup = Test_Data.Value <|
|
||||
test_file = enso_project.data / "xml" / "sample.xml"
|
||||
assert test_file.exists
|
||||
document = XML_Document.from_file test_file
|
||||
root = document . root_element
|
||||
[test_file, document, root]
|
||||
|
||||
|
||||
add_specs suite_builder =
|
||||
data = Test_Data.setup
|
||||
|
||||
suite_builder.group "Read XML" group_builder->
|
||||
group_builder.specify "Can read from a file" <|
|
||||
data.root.name . should_equal "class"
|
||||
|
||||
group_builder.specify "Error if file does not exist" <|
|
||||
test_file = enso_project.data / "xml" / "sample.xmlnotexists"
|
||||
XML_Document.from_file test_file . should_fail_with File_Error
|
||||
|
||||
Test.specify "Can read from a stream" <|
|
||||
test_file.with_input_stream [File_Access.Read] input_stream->
|
||||
group_builder.specify "Can read from a stream" <|
|
||||
data.test_file.with_input_stream [File_Access.Read] input_stream->
|
||||
doc = XML_Document.from_stream input_stream
|
||||
doc.root_element.name . should_equal "class"
|
||||
|
||||
Test.specify "Can read from a string" <|
|
||||
xml_string = test_file.read_text
|
||||
group_builder.specify "Can read from a string" <|
|
||||
xml_string = data.test_file.read_text
|
||||
doc = XML_Document.from_text xml_string
|
||||
doc.root_element.name . should_equal "class"
|
||||
|
||||
Test.specify "Can read from a short string" <|
|
||||
group_builder.specify "Can read from a short string" <|
|
||||
xml_string = "<class></class>"
|
||||
doc = XML_Document.from_text xml_string
|
||||
doc.root_element.name . should_equal "class"
|
||||
|
||||
Test.specify "Parse error from file" <|
|
||||
group_builder.specify "Parse error from file" <|
|
||||
test_file = enso_project.data / "sample.txt"
|
||||
XML_Document.from_file test_file . catch . should_be_a XML_Error.Parse_Error
|
||||
|
||||
Test.specify "Parse error from string" <|
|
||||
group_builder.specify "Parse error from string" <|
|
||||
xml_string = "<<<<</"
|
||||
XML_Document.from_text xml_string . catch . should_be_a XML_Error.Parse_Error
|
||||
|
||||
Test.group "at/get" <|
|
||||
Test.specify "Can get children by index" <|
|
||||
root.at 0 . name . should_equal "teacher"
|
||||
suite_builder.group "at/get" group_builder->
|
||||
group_builder.specify "Can get children by index" <|
|
||||
data.root.at 0 . name . should_equal "teacher"
|
||||
|
||||
root.at 0 . at 0 . name . should_equal "firstname"
|
||||
root.at 0 . at 1 . name . should_equal "lastname"
|
||||
root.at 0 . at 2 . name . should_equal "bio"
|
||||
root.at 0 . at 2 . at 0 . should_equal '\n Blah blah\n '
|
||||
data.root.at 0 . at 0 . name . should_equal "firstname"
|
||||
data.root.at 0 . at 1 . name . should_equal "lastname"
|
||||
data.root.at 0 . at 2 . name . should_equal "bio"
|
||||
data.root.at 0 . at 2 . at 0 . should_equal '\n Blah blah\n '
|
||||
|
||||
root.at 3 . at 0 . name . should_equal "firstname"
|
||||
root.at 3 . at 1 . name . should_equal "lastname"
|
||||
root.at 3 . at 2 . name . should_equal "gpa"
|
||||
root.at 3 . at 2 . at 0 . should_equal "3.99"
|
||||
data.root.at 3 . at 0 . name . should_equal "firstname"
|
||||
data.root.at 3 . at 1 . name . should_equal "lastname"
|
||||
data.root.at 3 . at 2 . name . should_equal "gpa"
|
||||
data.root.at 3 . at 2 . at 0 . should_equal "3.99"
|
||||
|
||||
Test.specify "Can get text children by index" <|
|
||||
root.at 4 . at 0 . should_equal '\n Some\n '
|
||||
root.at 4 . at 2 . should_equal '\n Extra\n '
|
||||
root.at 4 . at 4 . should_equal '\n Text\n '
|
||||
group_builder.specify "Can get text children by index" <|
|
||||
data.root.at 4 . at 0 . should_equal '\n Some\n '
|
||||
data.root.at 4 . at 2 . should_equal '\n Extra\n '
|
||||
data.root.at 4 . at 4 . should_equal '\n Text\n '
|
||||
|
||||
Test.specify "Can get element attributes" <|
|
||||
root.at 0 . at "@id" . should_equal "100"
|
||||
root.at 1 . at "@id" . should_equal "101"
|
||||
root.at 2 . at "@studentId" . should_equal "1000"
|
||||
root.at 3 . at "@studentId" . should_equal "1001"
|
||||
group_builder.specify "Can get element attributes" <|
|
||||
data.root.at 0 . at "@id" . should_equal "100"
|
||||
data.root.at 1 . at "@id" . should_equal "101"
|
||||
data.root.at 2 . at "@studentId" . should_equal "1000"
|
||||
data.root.at 3 . at "@studentId" . should_equal "1001"
|
||||
|
||||
root.at 0 . attribute "id" . should_equal "100"
|
||||
root.at 1 . attribute "id" . should_equal "101"
|
||||
root.at 2 . attribute "studentId" . should_equal "1000"
|
||||
root.at 3 . attribute "studentId" . should_equal "1001"
|
||||
data.root.at 0 . attribute "id" . should_equal "100"
|
||||
data.root.at 1 . attribute "id" . should_equal "101"
|
||||
data.root.at 2 . attribute "studentId" . should_equal "1000"
|
||||
data.root.at 3 . attribute "studentId" . should_equal "1001"
|
||||
|
||||
root.at 3 . attribute "does_not_exist" if_missing="if_missing" . should_equal "if_missing"
|
||||
data.root.at 3 . attribute "does_not_exist" if_missing="if_missing" . should_equal "if_missing"
|
||||
|
||||
Test.specify "Can get element an attribute map" <|
|
||||
root.at 2 . attributes . should_equal (Map.from_vector [["studentId", "1000"], ["year", "2"]])
|
||||
root.at 3 . attributes . should_equal (Map.from_vector [["studentId", "1001"], ["year", "3"]])
|
||||
group_builder.specify "Can get element an attribute map" <|
|
||||
data.root.at 2 . attributes . should_equal (Map.from_vector [["studentId", "1000"], ["year", "2"]])
|
||||
data.root.at 3 . attributes . should_equal (Map.from_vector [["studentId", "1001"], ["year", "3"]])
|
||||
|
||||
Test.specify "Can get nodes via xpath" <|
|
||||
classes = root.at "/class"
|
||||
group_builder.specify "Can get nodes via xpath" <|
|
||||
classes = data.root.at "/class"
|
||||
classes.length . should_equal 1
|
||||
classes.at 0 . name . should_equal "class"
|
||||
|
||||
teachers = root.at "/class/teacher"
|
||||
teachers = data.root.at "/class/teacher"
|
||||
teachers.length . should_equal 2
|
||||
teachers.at 0 . at "@id" . should_equal "100"
|
||||
teachers.at 1 . at "@id" . should_equal "101"
|
||||
|
||||
students = root.at "/class/student"
|
||||
students = data.root.at "/class/student"
|
||||
students.length . should_equal 3
|
||||
students.at 0 . at "@studentId" . should_equal "1000"
|
||||
students.at 1 . at "@studentId" . should_equal "1001"
|
||||
|
||||
root.at "/class/teacher[1]/firstname" . at 0 . text . should_equal "Mary"
|
||||
root.at "/class/teacher[2]/firstname" . at 0 . text . should_equal "Bob"
|
||||
root.at "/class/teacher[1]/firstname/text()" . should_equal ["Mary"]
|
||||
root.at "/class/teacher[2]/firstname/text()" . should_equal ["Bob"]
|
||||
root.at "/class/teacher/firstname/text()" . should_equal ["Mary", "Bob"]
|
||||
root.at "/class/teacher[1]/bio" . at 0 . text . should_equal '\n Blah blah\n '
|
||||
root.at "/class/teacher[2]/bio" . at 0 . text . should_equal '\n This that\n '
|
||||
root.get "/class/teacher[23]" . should_equal []
|
||||
data.root.at "/class/teacher[1]/firstname" . at 0 . text . should_equal "Mary"
|
||||
data.root.at "/class/teacher[2]/firstname" . at 0 . text . should_equal "Bob"
|
||||
data.root.at "/class/teacher[1]/firstname/text()" . should_equal ["Mary"]
|
||||
data.root.at "/class/teacher[2]/firstname/text()" . should_equal ["Bob"]
|
||||
data.root.at "/class/teacher/firstname/text()" . should_equal ["Mary", "Bob"]
|
||||
data.root.at "/class/teacher[1]/bio" . at 0 . text . should_equal '\n Blah blah\n '
|
||||
data.root.at "/class/teacher[2]/bio" . at 0 . text . should_equal '\n This that\n '
|
||||
data.root.get "/class/teacher[23]" . should_equal []
|
||||
|
||||
root.at "teacher[1]/firstname" . at 0 . text . should_equal "Mary"
|
||||
root.at "teacher[2]/firstname" . at 0 . text . should_equal "Bob"
|
||||
root.at "teacher[1]/bio" . at 0 . text . should_equal '\n Blah blah\n '
|
||||
root.at "teacher[2]/bio" . at 0 . text . should_equal '\n This that\n '
|
||||
data.root.at "teacher[1]/firstname" . at 0 . text . should_equal "Mary"
|
||||
data.root.at "teacher[2]/firstname" . at 0 . text . should_equal "Bob"
|
||||
data.root.at "teacher[1]/bio" . at 0 . text . should_equal '\n Blah blah\n '
|
||||
data.root.at "teacher[2]/bio" . at 0 . text . should_equal '\n This that\n '
|
||||
|
||||
Test.specify "Can get children using .get" <|
|
||||
root.get 0 . get 0 . name . should_equal "firstname"
|
||||
root.get 0 . get "@id" . should_equal "100"
|
||||
root.get "/class/teacher[1]/firstname" . get 0 . text . should_equal "Mary"
|
||||
group_builder.specify "Can get children using .get" <|
|
||||
data.root.get 0 . get 0 . name . should_equal "firstname"
|
||||
data.root.get 0 . get "@id" . should_equal "100"
|
||||
data.root.get "/class/teacher[1]/firstname" . get 0 . text . should_equal "Mary"
|
||||
|
||||
root.get 0 . get 32 "if_missing" . should_equal "if_missing"
|
||||
root.get 0 . get "@not_there" "if_missing" . should_equal "if_missing"
|
||||
data.root.get 0 . get 32 "if_missing" . should_equal "if_missing"
|
||||
data.root.get 0 . get "@not_there" "if_missing" . should_equal "if_missing"
|
||||
|
||||
Test.specify "Can handle a bad xpath" <|
|
||||
root.at "/qqq[[[[1" . at 0 . text . should_fail_with XML_Error
|
||||
group_builder.specify "Can handle a bad xpath" <|
|
||||
data.root.at "/qqq[[[[1" . at 0 . text . should_fail_with XML_Error
|
||||
|
||||
Test.group "tag name" <|
|
||||
Test.specify "Can get the tag name" <|
|
||||
root.name . should_equal "class"
|
||||
root.at 0 . name . should_equal "teacher"
|
||||
root.at 1 . at 1 . name . should_equal "lastname"
|
||||
suite_builder.group "tag name" group_builder->
|
||||
group_builder.specify "Can get the tag name" <|
|
||||
data.root.name . should_equal "class"
|
||||
data.root.at 0 . name . should_equal "teacher"
|
||||
data.root.at 1 . at 1 . name . should_equal "lastname"
|
||||
|
||||
Test.group "children" <|
|
||||
Test.specify "Can get the list of children" <|
|
||||
children = root.children
|
||||
suite_builder.group "children" group_builder->
|
||||
group_builder.specify "Can get the list of children" <|
|
||||
children = data.root.children
|
||||
children.length . should_equal 5
|
||||
children.at 0 . at "@id" . should_equal "100"
|
||||
children.at 1 . at "@id" . should_equal "101"
|
||||
@ -133,30 +146,30 @@ spec =
|
||||
children.at 3 . at "@studentId" . should_equal "1001"
|
||||
children.at 4 . at "@studentId" . should_equal "1002"
|
||||
|
||||
Test.specify "Can get the number of children" <|
|
||||
root.child_count . should_equal 5
|
||||
group_builder.specify "Can get the number of children" <|
|
||||
data.root.child_count . should_equal 5
|
||||
|
||||
Test.group "text contents" <|
|
||||
Test.specify "Can get child text contents" <|
|
||||
root.at 4 . at 1 . text . should_equal "Randy"
|
||||
root.at 4 . text . should_equal '\n Some\n Randy\n Extra\n Brown\n Text\n 3.99\n '
|
||||
suite_builder.group "text contents" group_builder->
|
||||
group_builder.specify "Can get child text contents" <|
|
||||
data.root.at 4 . at 1 . text . should_equal "Randy"
|
||||
data.root.at 4 . text . should_equal '\n Some\n Randy\n Extra\n Brown\n Text\n 3.99\n '
|
||||
|
||||
Test.group "inner / outer xml" <|
|
||||
Test.specify "Can get the inner xml" <|
|
||||
(root.at "/class/teacher[1]" . at 0 . inner_xml) . should_equal '\n <firstname>Mary</firstname>\n <lastname>Smith</lastname>\n <bio>\n Blah blah\n </bio>\n '
|
||||
(root.at "/class/teacher[1]/bio" . at 0 . inner_xml) . should_equal '\n Blah blah\n '
|
||||
(root.at "/class/teacher[2]/bio" . at 0 . inner_xml) . should_equal '\n This that\n '
|
||||
(root.at "/class/teacher[2]" . at 0 . inner_xml) . should_equal '\n <firstname>Bob</firstname>\n <lastname>Jones</lastname>\n <bio>\n This that\n </bio>\n '
|
||||
suite_builder.group "inner / outer xml" group_builder->
|
||||
group_builder.specify "Can get the inner xml" <|
|
||||
(data.root.at "/class/teacher[1]" . at 0 . inner_xml) . should_equal '\n <firstname>Mary</firstname>\n <lastname>Smith</lastname>\n <bio>\n Blah blah\n </bio>\n '
|
||||
(data.root.at "/class/teacher[1]/bio" . at 0 . inner_xml) . should_equal '\n Blah blah\n '
|
||||
(data.root.at "/class/teacher[2]/bio" . at 0 . inner_xml) . should_equal '\n This that\n '
|
||||
(data.root.at "/class/teacher[2]" . at 0 . inner_xml) . should_equal '\n <firstname>Bob</firstname>\n <lastname>Jones</lastname>\n <bio>\n This that\n </bio>\n '
|
||||
|
||||
Test.specify "Can get the outer xml" <|
|
||||
(root.at "/class/teacher[1]/bio" . at 0 . outer_xml) . should_equal '<bio>\n Blah blah\n </bio>'
|
||||
(root.at "/class/teacher[2]/bio" . at 0 . outer_xml) . should_equal '<bio>\n This that\n </bio>'
|
||||
group_builder.specify "Can get the outer xml" <|
|
||||
(data.root.at "/class/teacher[1]/bio" . at 0 . outer_xml) . should_equal '<bio>\n Blah blah\n </bio>'
|
||||
(data.root.at "/class/teacher[2]/bio" . at 0 . outer_xml) . should_equal '<bio>\n This that\n </bio>'
|
||||
|
||||
Test.group "get_elements_by_tag_name" <|
|
||||
Test.specify "Can get elements by tag name" <|
|
||||
teachers = root.get_elements_by_tag_name "teacher"
|
||||
students = root.get_elements_by_tag_name "student"
|
||||
gpas = root.get_elements_by_tag_name "gpa"
|
||||
suite_builder.group "get_elements_by_tag_name" group_builder->
|
||||
group_builder.specify "Can get elements by tag name" <|
|
||||
teachers = data.root.get_elements_by_tag_name "teacher"
|
||||
students = data.root.get_elements_by_tag_name "student"
|
||||
gpas = data.root.get_elements_by_tag_name "gpa"
|
||||
|
||||
teachers.length . should_equal 2
|
||||
students.length . should_equal 3
|
||||
@ -171,33 +184,33 @@ spec =
|
||||
gpas.at 1 . text . should_equal "3.99"
|
||||
gpas.at 2 . text . should_equal "3.99"
|
||||
|
||||
Test.specify "Can get nested elements" <|
|
||||
group_builder.specify "Can get nested elements" <|
|
||||
test_file = enso_project.data / "xml" / "nested.xml"
|
||||
root = XML_Document.from_file test_file . root_element
|
||||
bars = root.get_elements_by_tag_name "bar"
|
||||
bars.length . should_equal 4
|
||||
bars.map (t-> t.at "@id") . should_equal ["2", "4", "5", "6"]
|
||||
|
||||
Test.specify "Can get elements by name with a wildcard" <|
|
||||
root.get_elements_by_tag_name "*" . length . should_equal 20
|
||||
group_builder.specify "Can get elements by name with a wildcard" <|
|
||||
data.root.get_elements_by_tag_name "*" . length . should_equal 20
|
||||
|
||||
Test.group "Data.read / File_Format" <|
|
||||
Test.specify "Can read from a file" <|
|
||||
doc = Data.read test_file
|
||||
suite_builder.group "Data.read / File_Format" group_builder->
|
||||
group_builder.specify "Can read from a file" <|
|
||||
doc = Data.read data.test_file
|
||||
doc.root_element.name . should_equal "class"
|
||||
|
||||
Test.specify "Can read from an endpoint" <|
|
||||
group_builder.specify "Can read from an endpoint" <|
|
||||
doc = Data.fetch "https://enso-data-samples.s3.us-west-1.amazonaws.com/sample.xml"
|
||||
doc.root_element.name . should_equal "class"
|
||||
doc.root_element.at 1 . name . should_equal "teacher"
|
||||
|
||||
Test.group "display text" <|
|
||||
Test.specify "Can generate display text" <|
|
||||
document.to_display_text . should_equal 'XML_Document (XML_Element "class")'
|
||||
root.to_display_text . should_equal 'XML_Element "class"'
|
||||
suite_builder.group "display text" group_builder->
|
||||
group_builder.specify "Can generate display text" <|
|
||||
data.document.to_display_text . should_equal 'XML_Document (XML_Element "class")'
|
||||
data.root.to_display_text . should_equal 'XML_Element "class"'
|
||||
|
||||
Test.group "JSON" <|
|
||||
Test.specify "Can convert to JS_Object" <|
|
||||
suite_builder.group "JSON" group_builder->
|
||||
group_builder.specify "Can convert to JS_Object" <|
|
||||
test_file = enso_project.data / "xml" / "small.xml"
|
||||
document = XML_Document.from_file test_file
|
||||
root = document . root_element
|
||||
@ -223,4 +236,8 @@ spec =
|
||||
js = root.to_js_object
|
||||
js.should_equal expected
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -1,8 +1,6 @@
|
||||
from Standard.Base import all
|
||||
|
||||
from Standard.Test import Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
import Standard.Test_New
|
||||
from Standard.Test_New import all
|
||||
|
||||
import project.Semantic.Any_Spec
|
||||
import project.Semantic.Case_Spec
|
||||
@ -91,88 +89,80 @@ import project.System.Temporary_File_Spec
|
||||
import project.Random_Spec
|
||||
|
||||
main =
|
||||
# Round_Spec needs to be migrated to Test_New, as it is used also from Table_Tests, that
|
||||
# are entirely migrated to Test_New. This is just a temporary workaround, until the migration
|
||||
# is complete.
|
||||
suite = Test_New.Test.build suite_builder->
|
||||
Numbers_Spec.add_specs suite_builder
|
||||
succeeded = suite.run_with_filter should_exit=False
|
||||
if succeeded.not then System.exit 1
|
||||
|
||||
Test_Suite.run_main <|
|
||||
Any_Spec.spec
|
||||
Array_Spec.spec
|
||||
Array_Proxy_Spec.spec
|
||||
Array_Polyglot_Spec.spec
|
||||
Bool_Spec.spec
|
||||
Base_64_Spec.spec
|
||||
Function_Spec.spec
|
||||
Case_Spec.spec
|
||||
Conversion_Spec.spec
|
||||
Deep_Export_Spec.spec
|
||||
Error_Spec.spec
|
||||
Environment_Spec.spec
|
||||
File_Spec.spec
|
||||
Temporary_File_Spec.spec
|
||||
File_Read_Spec.spec
|
||||
Reporting_Stream_Decoder_Spec.spec
|
||||
Reporting_Stream_Encoder_Spec.spec
|
||||
Http_Header_Spec.spec
|
||||
Http_Request_Spec.spec
|
||||
Http_Spec.spec
|
||||
Http_Auto_Parse_Spec.spec
|
||||
Enso_Cloud_Main.spec
|
||||
Import_Loop_Spec.spec
|
||||
Interval_Spec.spec
|
||||
Java_Interop_Spec.spec
|
||||
Js_Interop_Spec.spec
|
||||
Json_Spec.spec
|
||||
List_Spec.spec
|
||||
Locale_Spec.spec
|
||||
Map_Spec.spec
|
||||
Set_Spec.spec
|
||||
Maybe_Spec.spec
|
||||
Meta_Spec.spec
|
||||
Instrumentor_Spec.spec
|
||||
Meta_Location_Spec.spec
|
||||
Names_Spec.spec
|
||||
Equals_Spec.spec
|
||||
Ordering_Spec.spec
|
||||
Comparator_Spec.spec
|
||||
Natural_Order_Spec.spec
|
||||
Vector_Lexicographic_Order_Spec.spec
|
||||
Process_Spec.spec
|
||||
Python_Interop_Spec.spec
|
||||
R_Interop_Spec.spec
|
||||
Pair_Spec.spec
|
||||
Parse_Spec.spec
|
||||
Problems_Spec.spec
|
||||
Range_Spec.spec
|
||||
Ref_Spec.spec
|
||||
Regex_Spec.spec
|
||||
Ascribed_Parameters_Spec.spec
|
||||
Asserts_Spec.spec
|
||||
Lazy_Spec.spec
|
||||
Runtime_Spec.spec
|
||||
Self_Type_Spec.spec
|
||||
Span_Spec.spec
|
||||
State_Spec.spec
|
||||
Encoding_Spec.spec
|
||||
Text_Sub_Range_Spec.spec
|
||||
Managed_Resource_Spec.spec
|
||||
Missing_Required_Arguments_Spec.spec
|
||||
Lazy_Generator_Spec.spec
|
||||
Stack_Traces_Spec.spec
|
||||
Utils_Spec.spec
|
||||
Text_Spec.spec
|
||||
Time_Spec.spec
|
||||
URI_Spec.spec
|
||||
Vector_Spec.spec
|
||||
Slicing_Helpers_Spec.spec
|
||||
Statistics_Spec.spec
|
||||
Regression_Spec.spec
|
||||
Warnings_Spec.spec
|
||||
System_Spec.spec
|
||||
Random_Spec.spec
|
||||
XML_Spec.spec
|
||||
|
||||
suite = Test.build suite_builder->
|
||||
Any_Spec.add_specs suite_builder
|
||||
Array_Spec.add_specs suite_builder
|
||||
Array_Proxy_Spec.add_specs suite_builder
|
||||
Array_Polyglot_Spec.add_specs suite_builder
|
||||
Bool_Spec.add_specs suite_builder
|
||||
Base_64_Spec.add_specs suite_builder
|
||||
Function_Spec.add_specs suite_builder
|
||||
Case_Spec.add_specs suite_builder
|
||||
Conversion_Spec.add_specs suite_builder
|
||||
Deep_Export_Spec.add_specs suite_builder
|
||||
Error_Spec.add_specs suite_builder
|
||||
Environment_Spec.add_specs suite_builder
|
||||
File_Spec.add_specs suite_builder
|
||||
Temporary_File_Spec.add_specs suite_builder
|
||||
File_Read_Spec.add_specs suite_builder
|
||||
Reporting_Stream_Decoder_Spec.add_specs suite_builder
|
||||
Reporting_Stream_Encoder_Spec.add_specs suite_builder
|
||||
Http_Header_Spec.add_specs suite_builder
|
||||
Http_Request_Spec.add_specs suite_builder
|
||||
Http_Spec.add_specs suite_builder
|
||||
Http_Auto_Parse_Spec.add_specs suite_builder
|
||||
Enso_Cloud_Main.add_specs suite_builder
|
||||
Import_Loop_Spec.add_specs suite_builder
|
||||
Interval_Spec.add_specs suite_builder
|
||||
Java_Interop_Spec.add_specs suite_builder
|
||||
Js_Interop_Spec.add_specs suite_builder
|
||||
Json_Spec.add_specs suite_builder
|
||||
List_Spec.add_specs suite_builder
|
||||
Locale_Spec.add_specs suite_builder
|
||||
Map_Spec.add_specs suite_builder
|
||||
Set_Spec.add_specs suite_builder
|
||||
Maybe_Spec.add_specs suite_builder
|
||||
Meta_Spec.add_specs suite_builder
|
||||
Instrumentor_Spec.add_specs suite_builder
|
||||
Meta_Location_Spec.add_specs suite_builder
|
||||
Names_Spec.add_specs suite_builder
|
||||
Equals_Spec.add_specs suite_builder
|
||||
Ordering_Spec.add_specs suite_builder
|
||||
Comparator_Spec.add_specs suite_builder
|
||||
Natural_Order_Spec.add_specs suite_builder
|
||||
Vector_Lexicographic_Order_Spec.add_specs suite_builder
|
||||
Process_Spec.add_specs suite_builder
|
||||
Python_Interop_Spec.add_specs suite_builder
|
||||
R_Interop_Spec.add_specs suite_builder
|
||||
Pair_Spec.add_specs suite_builder
|
||||
Parse_Spec.add_specs suite_builder
|
||||
Problems_Spec.add_specs suite_builder
|
||||
Range_Spec.add_specs suite_builder
|
||||
Ref_Spec.add_specs suite_builder
|
||||
Regex_Spec.add_specs suite_builder
|
||||
Ascribed_Parameters_Spec.add_specs suite_builder
|
||||
Asserts_Spec.add_specs suite_builder
|
||||
Lazy_Spec.add_specs suite_builder
|
||||
Runtime_Spec.add_specs suite_builder
|
||||
Self_Type_Spec.add_specs suite_builder
|
||||
Span_Spec.add_specs suite_builder
|
||||
State_Spec.add_specs suite_builder
|
||||
Encoding_Spec.add_specs suite_builder
|
||||
Text_Sub_Range_Spec.add_specs suite_builder
|
||||
Managed_Resource_Spec.add_specs suite_builder
|
||||
Missing_Required_Arguments_Spec.add_specs suite_builder
|
||||
Lazy_Generator_Spec.add_specs suite_builder
|
||||
Stack_Traces_Spec.add_specs suite_builder
|
||||
Utils_Spec.add_specs suite_builder
|
||||
Text_Spec.add_specs suite_builder
|
||||
Time_Spec.add_specs suite_builder
|
||||
URI_Spec.add_specs suite_builder
|
||||
Vector_Spec.add_specs suite_builder
|
||||
Slicing_Helpers_Spec.add_specs suite_builder
|
||||
Statistics_Spec.add_specs suite_builder
|
||||
Regression_Spec.add_specs suite_builder
|
||||
Warnings_Spec.add_specs suite_builder
|
||||
System_Spec.add_specs suite_builder
|
||||
Random_Spec.add_specs suite_builder
|
||||
XML_Spec.add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
@ -1,7 +1,8 @@
|
||||
from Standard.Base import all
|
||||
import Standard.Base.Errors.Illegal_State.Illegal_State
|
||||
|
||||
import Standard.Test.Test_Environment
|
||||
import Standard.Test_New.Test_Environment
|
||||
|
||||
|
||||
polyglot java import java.lang.Thread
|
||||
polyglot java import java.security.KeyStore
|
||||
|
@ -4,19 +4,19 @@ import Standard.Base.Errors.Common.No_Such_Conversion
|
||||
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
|
||||
import Standard.Base.Network.HTTP.HTTP_Error.HTTP_Error
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Test_Environment
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
import Standard.Test_New.Test_Environment
|
||||
|
||||
|
||||
import project.Network.Enso_Cloud.Cloud_Tests_Setup.Cloud_Tests_Setup
|
||||
|
||||
## To run this test locally:
|
||||
$ sbt 'http-test-helper/run localhost 8080'
|
||||
$ export ENSO_HTTP_TEST_HTTPBIN_URL=http://localhost:8080/
|
||||
spec setup:Cloud_Tests_Setup =
|
||||
add_specs suite_builder setup:Cloud_Tests_Setup =
|
||||
setup.with_prepared_environment <|
|
||||
Test.group "Enso_User - offline logic tests" <|
|
||||
Test.specify "is correctly parsed from JSON" <|
|
||||
suite_builder.group "Enso_User - offline logic tests" group_builder->
|
||||
group_builder.specify "is correctly parsed from JSON" <|
|
||||
json = Json.parse """
|
||||
{
|
||||
"id": "organization-27xJM00p8jWoL2qByTo6tQfciWC",
|
||||
@ -37,9 +37,9 @@ spec setup:Cloud_Tests_Setup =
|
||||
Enso_User.from invalid_json . should_fail_with Illegal_Argument
|
||||
Test.expect_panic No_Such_Conversion (Enso_User.from (Json.parse "[]"))
|
||||
|
||||
Test.group "Enso_User - local mock integration tests" <|
|
||||
suite_builder.group "Enso_User - local mock integration tests" group_builder->
|
||||
# These tests should be kept in sync with tools/http-test-helper/src/main/java/org/enso/shttp/cloud_mock/UsersHandler.java
|
||||
Test.specify "current user can be fetched from mock API" pending=setup.mock_only_pending <|
|
||||
group_builder.specify "current user can be fetched from mock API" pending=setup.mock_only_pending <|
|
||||
current = Enso_User.current
|
||||
current.id.should_equal "organization-27xJM00p8jWoL2qByTo6tQfciWC"
|
||||
current.name.should_equal "My test User 1"
|
||||
@ -51,7 +51,7 @@ spec setup:Cloud_Tests_Setup =
|
||||
home . should_be_a Enso_File
|
||||
home.is_directory.should_be_true
|
||||
|
||||
Test.specify "user list can be fetched from mock API" pending=setup.mock_only_pending <|
|
||||
group_builder.specify "user list can be fetched from mock API" pending=setup.mock_only_pending <|
|
||||
users = Enso_User.list
|
||||
|
||||
users.length.should_equal 2
|
||||
@ -59,7 +59,7 @@ spec setup:Cloud_Tests_Setup =
|
||||
users.at 1 . name . should_equal "My test User 2"
|
||||
users.at 1 . is_enabled . should_be_false
|
||||
|
||||
Test.specify "will fail if the user is not logged in" <|
|
||||
group_builder.specify "will fail if the user is not logged in" <|
|
||||
non_existent_file = (enso_project.data / "nonexistent-file") . absolute . normalize
|
||||
non_existent_file.exists.should_be_false
|
||||
r = Test_Environment.unsafe_with_environment_override "ENSO_CLOUD_CREDENTIALS_FILE" non_existent_file.path <|
|
||||
@ -67,7 +67,7 @@ spec setup:Cloud_Tests_Setup =
|
||||
Enso_User.current
|
||||
r.should_fail_with Not_Logged_In
|
||||
|
||||
Test.specify "will fail if the token is invalid" pending=setup.pending <|
|
||||
group_builder.specify "will fail if the token is invalid" pending=setup.pending <|
|
||||
invalid_token_file = File.create_temporary_file "enso-test-credentials" "-invalid.txt"
|
||||
"invalid-token".write invalid_token_file . should_succeed
|
||||
Cloud_Tests_Setup.reset_token
|
||||
@ -80,4 +80,8 @@ spec setup:Cloud_Tests_Setup =
|
||||
# Ensure the token is reset after the last test, so that any other tests will again use the correct one.
|
||||
Cloud_Tests_Setup.reset_token
|
||||
|
||||
main = Test_Suite.run_main (spec Cloud_Tests_Setup.prepare)
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -2,16 +2,15 @@ from Standard.Base import all
|
||||
import Standard.Base.Errors.Common.Not_Found
|
||||
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Test_Environment
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
import Standard.Test_New.Test_Environment
|
||||
|
||||
import project.Network.Enso_Cloud.Cloud_Tests_Setup.Cloud_Tests_Setup
|
||||
from enso_dev.Base_Tests.Network.Enso_Cloud.Cloud_Tests_Setup import with_retries
|
||||
|
||||
spec setup:Cloud_Tests_Setup = setup.with_prepared_environment <|
|
||||
Test.group "Enso Cloud Files" pending=setup.real_cloud_pending <|
|
||||
Test.specify "should be able to list the root directory" <|
|
||||
add_specs suite_builder setup:Cloud_Tests_Setup = setup.with_prepared_environment <|
|
||||
suite_builder.group "Enso Cloud Files" pending=setup.real_cloud_pending group_builder->
|
||||
group_builder.specify "should be able to list the root directory" <|
|
||||
assets = Enso_File.root.list
|
||||
# We don't a priori know the contents, so we can only check very generic properties
|
||||
assets . should_be_a Vector
|
||||
@ -23,7 +22,7 @@ spec setup:Cloud_Tests_Setup = setup.with_prepared_environment <|
|
||||
[1, 2, 3, "foo"]
|
||||
assets.map .name . should_contain "test_file.json"
|
||||
|
||||
Test.specify "should allow to create and delete a directory" <|
|
||||
group_builder.specify "should allow to create and delete a directory" <|
|
||||
my_name = "my_test_dir-" + (Random.uuid.take 5)
|
||||
my_dir = Enso_File.root.create_directory my_name
|
||||
my_dir.should_succeed
|
||||
@ -49,7 +48,7 @@ spec setup:Cloud_Tests_Setup = setup.with_prepared_environment <|
|
||||
# TODO the dir still shows as 'existing' after deletion, probably because it still is there in the Trash
|
||||
# my_dir.exists . should_be_false
|
||||
|
||||
Test.specify "should set the current working directory by environment variable" <|
|
||||
group_builder.specify "should set the current working directory by environment variable" <|
|
||||
# If nothing set, defaults to root:
|
||||
Enso_File.current_working_directory . should_equal Enso_File.root
|
||||
|
||||
@ -70,7 +69,7 @@ spec setup:Cloud_Tests_Setup = setup.with_prepared_environment <|
|
||||
# It should be back to default afterwards:
|
||||
Enso_File.current_working_directory . should_equal Enso_File.root
|
||||
|
||||
Test.specify "should allow to find a file by name" <|
|
||||
group_builder.specify "should allow to find a file by name" <|
|
||||
# TODO the file should be created programmatically when write is implemented
|
||||
f = Enso_File.root / "test_file.json"
|
||||
f.should_succeed
|
||||
@ -78,11 +77,11 @@ spec setup:Cloud_Tests_Setup = setup.with_prepared_environment <|
|
||||
f.is_directory . should_be_false
|
||||
f.exists . should_be_true
|
||||
|
||||
Test.specify "should not find nonexistent files" <|
|
||||
group_builder.specify "should not find nonexistent files" <|
|
||||
f = Enso_File.root / "nonexistent_file.json"
|
||||
f.should_fail_with Not_Found
|
||||
|
||||
Test.specify "should not allow to create a directory inside of a regular file" <|
|
||||
group_builder.specify "should not allow to create a directory inside of a regular file" <|
|
||||
# TODO the file should be created programmatically when write is implemented
|
||||
test_file = Enso_File.root / "test_file.json"
|
||||
test_file.exists . should_be_true
|
||||
@ -90,7 +89,7 @@ spec setup:Cloud_Tests_Setup = setup.with_prepared_environment <|
|
||||
r = test_file.create_directory "my_test_dir"
|
||||
r.should_fail_with Illegal_Argument
|
||||
|
||||
Test.specify "should delete all contents of a directory when deleting a directory" pending="TODO discuss recursive delete" <|
|
||||
group_builder.specify "should delete all contents of a directory when deleting a directory" pending="TODO discuss recursive delete" <|
|
||||
dir1 = Enso_File.root.create_directory "my_test_dir1"+(Random.uuid.take 5)
|
||||
dir1.should_succeed
|
||||
|
||||
@ -104,14 +103,14 @@ spec setup:Cloud_Tests_Setup = setup.with_prepared_environment <|
|
||||
# The inner directory should also have been trashed if its parent is removed
|
||||
dir2.exists . should_be_false
|
||||
|
||||
Test.specify "should not allow to delete the root directory" <|
|
||||
group_builder.specify "should not allow to delete the root directory" <|
|
||||
Enso_File.root.delete . should_fail_with Illegal_Argument
|
||||
|
||||
Test.specify "should be able to create and delete a file" pending="TODO: Cloud file write support" <|
|
||||
group_builder.specify "should be able to create and delete a file" pending="TODO: Cloud file write support" <|
|
||||
Error.throw "TODO"
|
||||
|
||||
expected_file_text = '[1, 2, 3, "foo"]'
|
||||
Test.specify "should be able to read and decode a file using various formats" <|
|
||||
group_builder.specify "should be able to read and decode a file using various formats" <|
|
||||
# TODO the file should be created programmatically when write is implemented
|
||||
test_file = Enso_File.root / "test_file.json"
|
||||
test_file.exists . should_be_true
|
||||
@ -125,7 +124,7 @@ spec setup:Cloud_Tests_Setup = setup.with_prepared_environment <|
|
||||
|
||||
test_file.read_bytes . should_equal expected_file_text.utf_8
|
||||
|
||||
Test.specify "should be able to open a file as input stream" <|
|
||||
group_builder.specify "should be able to open a file as input stream" <|
|
||||
test_file = Enso_File.root / "test_file.json"
|
||||
test_file.exists . should_be_true
|
||||
|
||||
@ -134,7 +133,7 @@ spec setup:Cloud_Tests_Setup = setup.with_prepared_environment <|
|
||||
|
||||
bytes.should_equal expected_file_text.utf_8
|
||||
|
||||
Test.specify "should be able to read file metadata" <|
|
||||
group_builder.specify "should be able to read file metadata" <|
|
||||
Enso_File.root.exists . should_be_true
|
||||
Enso_File.root.name . should_equal "/"
|
||||
|
||||
@ -155,7 +154,7 @@ spec setup:Cloud_Tests_Setup = setup.with_prepared_environment <|
|
||||
nested_file.is_regular_file.should_be_true
|
||||
nested_file.size . should_equal nested_file.read_bytes.length
|
||||
|
||||
Test.specify "should be able to read file metadata (v2)" pending="TODO Enso_File more detailed metadata, waiting on https://github.com/enso-org/cloud-v2/issues/870" <|
|
||||
group_builder.specify "should be able to read file metadata (v2)" pending="TODO Enso_File more detailed metadata, waiting on https://github.com/enso-org/cloud-v2/issues/870" <|
|
||||
Enso_File.root.parent . should_equal Nothing
|
||||
Enso_File.root.path . should_equal "enso://"
|
||||
|
||||
@ -175,7 +174,7 @@ spec setup:Cloud_Tests_Setup = setup.with_prepared_environment <|
|
||||
nested_file.creation_time . should_be_a Date_Time
|
||||
nested_file.last_modified_time . should_be_a Date_Time
|
||||
|
||||
Test.specify "should be able to read other file metadata" pending="TODO needs further design" <|
|
||||
group_builder.specify "should be able to read other file metadata" pending="TODO needs further design" <|
|
||||
nested_file = Enso_File.root / "test-directory" / "another.txt"
|
||||
|
||||
nested_file.is_absolute.should_be_true
|
||||
@ -184,7 +183,7 @@ spec setup:Cloud_Tests_Setup = setup.with_prepared_environment <|
|
||||
nested_file.posix_permissions . should_be_a File_Permissions
|
||||
nested_file.is_writable . should_be_a Boolean
|
||||
|
||||
Test.specify "should be able to copy a file" pending="TODO Cloud file writing" <|
|
||||
group_builder.specify "should be able to copy a file" pending="TODO Cloud file writing" <|
|
||||
nested_file = Enso_File.root / "test-directory" / "another.txt"
|
||||
## TODO currently `/` only works if the file already exists, so how will we construct a path for a copy target?
|
||||
We either need to make all files just paths (like on regular File), or we need to have concrete files (with ID) and abstract files (just path).
|
||||
@ -192,9 +191,13 @@ spec setup:Cloud_Tests_Setup = setup.with_prepared_environment <|
|
||||
nested_file.exists.should_be_true
|
||||
new_file.exists.should_be_true
|
||||
|
||||
Test.specify "should be able to move a file" pending="TODO Cloud file writing" <|
|
||||
group_builder.specify "should be able to move a file" pending="TODO Cloud file writing" <|
|
||||
nested_file = Enso_File.root / "test-directory" / "another.txt"
|
||||
nested_file.move_to "TODO this needs design"
|
||||
nested_file.exists . should_be_false
|
||||
|
||||
main = Test_Suite.run_main (spec Cloud_Tests_Setup.prepare)
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -1,15 +1,20 @@
|
||||
from Standard.Base import all
|
||||
|
||||
from Standard.Test import Test_Suite
|
||||
from Standard.Test_New import all
|
||||
|
||||
import project.Network.Enso_Cloud.Enso_Cloud_Spec
|
||||
import project.Network.Enso_Cloud.Enso_File_Spec
|
||||
import project.Network.Enso_Cloud.Secrets_Spec
|
||||
import project.Network.Enso_Cloud.Cloud_Tests_Setup.Cloud_Tests_Setup
|
||||
|
||||
spec setup =
|
||||
Enso_Cloud_Spec.spec setup
|
||||
Enso_File_Spec.spec setup
|
||||
Secrets_Spec.spec setup
|
||||
add_specs suite_builder setup =
|
||||
Enso_Cloud_Spec.add_specs suite_builder setup
|
||||
Enso_File_Spec.add_specs suite_builder setup
|
||||
Secrets_Spec.add_specs suite_builder setup
|
||||
|
||||
main =
|
||||
setup = Cloud_Tests_Setup.prepare
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder setup
|
||||
suite.run_with_filter
|
||||
|
||||
main = Test_Suite.run_main (spec Cloud_Tests_Setup.prepare)
|
||||
|
@ -10,10 +10,10 @@ import Standard.Base.Network.HTTP.Request.Request
|
||||
import Standard.Base.Runtime.Context
|
||||
from Standard.Base.Data.Enso_Cloud.Enso_Secret import as_hideable_value
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
from Standard.Test_New import all
|
||||
from Standard.Test.Execution_Context_Helpers import run_with_and_without_output
|
||||
import Standard.Test.Test_Environment
|
||||
import Standard.Test.Extensions
|
||||
|
||||
|
||||
|
||||
import project.Network.Enso_Cloud.Cloud_Tests_Setup.Cloud_Tests_Setup
|
||||
from project.Network.Enso_Cloud.Cloud_Tests_Setup import with_retries
|
||||
@ -21,15 +21,15 @@ from project.Network.Enso_Cloud.Cloud_Tests_Setup import with_retries
|
||||
polyglot java import org.enso.base.enso_cloud.EnsoSecretAccessDenied
|
||||
polyglot java import org.enso.base.enso_cloud.ExternalLibrarySecretHelper
|
||||
|
||||
spec setup:Cloud_Tests_Setup = setup.with_prepared_environment <|
|
||||
Test.group "Enso Cloud Secrets" pending=setup.pending <|
|
||||
Test.specify "should be able to list existing secrets" <|
|
||||
add_specs suite_builder setup:Cloud_Tests_Setup = setup.with_prepared_environment <|
|
||||
suite_builder.group "Enso Cloud Secrets" pending=setup.pending group_builder->
|
||||
group_builder.specify "should be able to list existing secrets" <|
|
||||
# This should work regardless of Output context setting:
|
||||
run_with_and_without_output <|
|
||||
# We cannot test much more because we do not know what secrets are already there, further tests will check more by creating and deleting secrets
|
||||
Enso_Secret.list . should_be_a Vector
|
||||
|
||||
Test.specify "should allow to create, list and delete secrets" <|
|
||||
group_builder.specify "should allow to create, list and delete secrets" <|
|
||||
my_secret = Enso_Secret.create "my_test_secret" "my_secret_value"
|
||||
my_secret.should_succeed
|
||||
my_secret.name . should_equal "my_test_secret"
|
||||
@ -43,7 +43,7 @@ spec setup:Cloud_Tests_Setup = setup.with_prepared_environment <|
|
||||
with_retries <|
|
||||
Enso_Secret.list . should_not_contain my_secret
|
||||
|
||||
Test.specify "should allow to get a secret by name" <|
|
||||
group_builder.specify "should allow to get a secret by name" <|
|
||||
created_secret = Enso_Secret.create "my_test_secret-2" "my_secret_value"
|
||||
created_secret.should_succeed
|
||||
Panic.with_finalizer created_secret.delete <|
|
||||
@ -51,7 +51,7 @@ spec setup:Cloud_Tests_Setup = setup.with_prepared_environment <|
|
||||
fetched_secret = Enso_Secret.get "my_test_secret-2"
|
||||
fetched_secret . should_equal created_secret
|
||||
|
||||
Test.specify "should fail to create a secret if it already exists" <|
|
||||
group_builder.specify "should fail to create a secret if it already exists" <|
|
||||
created_secret = Enso_Secret.create "my_test_secret-3" "my_secret_value"
|
||||
created_secret.should_succeed
|
||||
wait_until_secret_is_propagated created_secret
|
||||
@ -66,7 +66,7 @@ spec setup:Cloud_Tests_Setup = setup.with_prepared_environment <|
|
||||
r1.should_fail_with Illegal_Argument
|
||||
r1.catch.to_display_text . should_contain "already exists"
|
||||
|
||||
Test.specify "should allow to use secrets in HTTPS request headers" pending=setup.httpbin_pending <|
|
||||
group_builder.specify "should allow to use secrets in HTTPS request headers" pending=setup.httpbin_pending <|
|
||||
secret1 = Enso_Secret.create "my_test_secret-6" "Yet another Mystery"
|
||||
secret1.should_succeed
|
||||
|
||||
@ -75,7 +75,7 @@ spec setup:Cloud_Tests_Setup = setup.with_prepared_environment <|
|
||||
response = https.request (Request.get (setup.httpbin_secure_uri / "get") headers=[Header.new "X-My-Secret" secret1])
|
||||
response.decode_as_json.at "headers" . at "X-My-Secret" . should_equal "Yet another Mystery"
|
||||
|
||||
Test.specify "should allow to derive values from secrets in Header.authorization_bearer" pending=setup.httpbin_pending <|
|
||||
group_builder.specify "should allow to derive values from secrets in Header.authorization_bearer" pending=setup.httpbin_pending <|
|
||||
secret_token = Enso_Secret.create "my_test_secret-7" "MySecretToken"
|
||||
secret_token.should_succeed
|
||||
|
||||
@ -85,7 +85,7 @@ spec setup:Cloud_Tests_Setup = setup.with_prepared_environment <|
|
||||
response_json = response.decode_as_json
|
||||
response_json.at "headers" . at "Authorization" . should_equal "Bearer MySecretToken"
|
||||
|
||||
Test.specify "should allow to derive values from secrets in Header.authorization_basic" pending=setup.httpbin_pending <|
|
||||
group_builder.specify "should allow to derive values from secrets in Header.authorization_basic" pending=setup.httpbin_pending <|
|
||||
secret_username = Enso_Secret.create "my_test_secret-8" "MyUsername"
|
||||
secret_username.should_succeed
|
||||
Panic.with_finalizer secret_username.delete <|
|
||||
@ -99,7 +99,7 @@ spec setup:Cloud_Tests_Setup = setup.with_prepared_environment <|
|
||||
response_json = response.decode_as_json
|
||||
response_json.at "headers" . at "Authorization" . should_equal expected
|
||||
|
||||
Test.specify "should allow to derive values from secrets" <|
|
||||
group_builder.specify "should allow to derive values from secrets" <|
|
||||
secret1 = Enso_Secret.create "my_test_secret-10" "Something"
|
||||
secret1.should_succeed
|
||||
Panic.with_finalizer secret1.delete <| with_retries <|
|
||||
@ -125,7 +125,7 @@ spec setup:Cloud_Tests_Setup = setup.with_prepared_environment <|
|
||||
b1.to_text . should_equal "WFk="
|
||||
b2.to_text . should_equal "base64(X__SECRET__)"
|
||||
|
||||
Test.specify "does not allow secrets in HTTP headers" pending=setup.httpbin_pending <|
|
||||
group_builder.specify "does not allow secrets in HTTP headers" pending=setup.httpbin_pending <|
|
||||
secret1 = Enso_Secret.create "my_test_secret-11" "Something"
|
||||
secret1.should_succeed
|
||||
Panic.with_finalizer secret1.delete <| with_retries <|
|
||||
@ -134,7 +134,7 @@ spec setup:Cloud_Tests_Setup = setup.with_prepared_environment <|
|
||||
r1.should_fail_with Illegal_Argument
|
||||
r1.catch.to_display_text . should_contain "Secrets are not allowed in HTTP connections, use HTTPS instead."
|
||||
|
||||
Test.specify "API exposing secrets to external libraries should not be accessible from unauthorized code" <|
|
||||
group_builder.specify "API exposing secrets to external libraries should not be accessible from unauthorized code" <|
|
||||
secret1 = Enso_Secret.create "my_test_secret-12" "Something"
|
||||
secret1.should_succeed
|
||||
Panic.with_finalizer secret1.delete <| with_retries <|
|
||||
@ -142,7 +142,7 @@ spec setup:Cloud_Tests_Setup = setup.with_prepared_environment <|
|
||||
Test.expect_panic EnsoSecretAccessDenied <|
|
||||
ExternalLibrarySecretHelper.resolveValue java_repr
|
||||
|
||||
Test.specify "should allow to create and delete secrets in a sub-directory" pending=setup.real_cloud_pending <|
|
||||
group_builder.specify "should allow to create and delete secrets in a sub-directory" pending=setup.real_cloud_pending <|
|
||||
subdirectory = Enso_File.root.create_directory "my_test_subdirectory-1"
|
||||
subdirectory.should_succeed
|
||||
Panic.with_finalizer subdirectory.delete <|
|
||||
@ -171,7 +171,7 @@ spec setup:Cloud_Tests_Setup = setup.with_prepared_environment <|
|
||||
Enso_Secret.exists "my-nested-secret-1" parent=subdirectory . should_be_false
|
||||
Enso_Secret.get "my-nested-secret-1" parent=subdirectory . should_fail_with Not_Found
|
||||
|
||||
Test.specify "should allow to use secrets from a sub-directory" pending=(setup.real_cloud_pending.if_nothing setup.httpbin_pending) <|
|
||||
group_builder.specify "should allow to use secrets from a sub-directory" pending=(setup.real_cloud_pending.if_nothing setup.httpbin_pending) <|
|
||||
subdirectory = Enso_File.root.create_directory "my_test_subdirectory-2"
|
||||
subdirectory.should_succeed
|
||||
Panic.with_finalizer subdirectory.delete <|
|
||||
@ -183,7 +183,7 @@ spec setup:Cloud_Tests_Setup = setup.with_prepared_environment <|
|
||||
response = https.request (Request.get (setup.httpbin_secure_uri / "get") headers=[Header.new "X-My-Nested-Secret" nested_secret])
|
||||
response.decode_as_json.at "headers" . at "X-My-Nested-Secret" . should_equal "NESTED_secret_value"
|
||||
|
||||
Test.specify "should allow to update secrets within a sub-directory" pending=(setup.real_cloud_pending.if_nothing setup.httpbin_pending) <|
|
||||
group_builder.specify "should allow to update secrets within a sub-directory" pending=(setup.real_cloud_pending.if_nothing setup.httpbin_pending) <|
|
||||
subdirectory = Enso_File.root.create_directory "my_test_subdirectory-3"
|
||||
subdirectory.should_succeed
|
||||
Panic.with_finalizer subdirectory.delete <|
|
||||
@ -205,7 +205,7 @@ spec setup:Cloud_Tests_Setup = setup.with_prepared_environment <|
|
||||
response = https.request (Request.get (setup.httpbin_secure_uri / "get") headers=[Header.new "X-My-Nested-Secret" nested_secret])
|
||||
response.decode_as_json.at "headers" . at "X-My-Nested-Secret" . should_equal "Value-New-B"
|
||||
|
||||
Test.specify "should NOT be able to create/update/delete secrets with disabled Output Context" <|
|
||||
group_builder.specify "should NOT be able to create/update/delete secrets with disabled Output Context" <|
|
||||
Context.Output.with_disabled <|
|
||||
Enso_Secret.create "foo" "bar" . should_fail_with Forbidden_Operation
|
||||
|
||||
@ -221,7 +221,11 @@ spec setup:Cloud_Tests_Setup = setup.with_prepared_environment <|
|
||||
with_retries <| Enso_Secret.get "my_test_secret-13" . should_equal secret1
|
||||
|
||||
|
||||
main = Test_Suite.run_main (spec Cloud_Tests_Setup.prepare)
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
||||
wait_until_secret_is_propagated secret =
|
||||
with_retries <| Enso_Secret.list . should_contain secret
|
||||
|
@ -1,14 +1,18 @@
|
||||
from Standard.Base import all
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
spec =
|
||||
Test.group "Header" <|
|
||||
Test.specify "should check equality" <|
|
||||
|
||||
add_specs suite_builder =
|
||||
suite_builder.group "Header" group_builder->
|
||||
group_builder.specify "should check equality" <|
|
||||
Header.new "A" "B" . should_equal (Header.new "A" "B")
|
||||
Header.new "A" "B" . should_equal (Header.new "a" "B")
|
||||
(Header.new "A" "B" == Header.new "A" "b") . should_equal False
|
||||
(Header.new "A" "B" == Header.new "a" "b") . should_equal False
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -4,12 +4,16 @@ import Standard.Base.Errors.Encoding_Error.Encoding_Error
|
||||
|
||||
import Standard.Base.Network.HTTP.Response.Response
|
||||
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test import Test, Test_Suite
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
from Standard.Test_New import all
|
||||
|
||||
spec =
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
||||
add_specs suite_builder =
|
||||
## To run this test locally:
|
||||
$ sbt 'http-test-helper/run localhost 8080'
|
||||
$ export ENSO_HTTP_TEST_HTTPBIN_URL=http://localhost:8080/
|
||||
@ -19,7 +23,7 @@ spec =
|
||||
base_url_with_slash = base_url.if_not_nothing <|
|
||||
if base_url.ends_with "/" then base_url else base_url + "/"
|
||||
|
||||
Test.group "Format Detection in HTTP Fetch" pending=pending_has_url <|
|
||||
suite_builder.group "Format Detection in HTTP Fetch" pending=pending_has_url group_builder->
|
||||
content_utf = '["x", "Hello! 😊👍 ąę"]'
|
||||
content_windows_1250 = '["y", "Hello! ąę"]'
|
||||
|
||||
@ -31,11 +35,11 @@ spec =
|
||||
. add_query_argument "Content-Type" "text/plain; charset=windows-1250"
|
||||
. add_query_argument "base64_response_data" (Base_64.encode_text content_windows_1250 Encoding.windows_1250)
|
||||
|
||||
Test.specify "should detect the encoding from Content-Type in fetch" <|
|
||||
group_builder.specify "should detect the encoding from Content-Type in fetch" <|
|
||||
url_utf8.fetch . should_equal content_utf
|
||||
url_windows_1250.fetch . should_equal content_windows_1250
|
||||
|
||||
Test.specify "should detect the encoding from Content-Type in decode_as_text" <|
|
||||
group_builder.specify "should detect the encoding from Content-Type in decode_as_text" <|
|
||||
r1 = url_utf8.fetch try_auto_parse_response=False
|
||||
r1.decode_as_text . should_equal content_utf
|
||||
|
||||
@ -46,7 +50,7 @@ spec =
|
||||
# We may override the encoding detected from Content-Type:
|
||||
r3.decode_as_text Encoding.ascii . should_fail_with Encoding_Error
|
||||
|
||||
Test.specify "should detect the encoding from Content-Type in decode_as_json" <|
|
||||
group_builder.specify "should detect the encoding from Content-Type in decode_as_json" <|
|
||||
r1 = url_utf8.fetch try_auto_parse_response=False
|
||||
r1.decode_as_json . should_equal ["x", "Hello! 😊👍 ąę"]
|
||||
|
||||
|
@ -4,47 +4,51 @@ import Standard.Base.Errors.Common.Syntax_Error
|
||||
import Standard.Base.Network.HTTP.Request.Request
|
||||
import Standard.Base.Network.HTTP.Request_Body.Request_Body
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
spec =
|
||||
|
||||
add_specs suite_builder =
|
||||
test_uri = URI.parse "https://httpbin.org/post"
|
||||
test_headers = [Header.application_json, Header.new "X-Foo-Id" "0123456789"]
|
||||
Test.group "Request" <|
|
||||
Test.specify "should return error when creating request from invalid URI" <|
|
||||
suite_builder.group "Request" group_builder->
|
||||
group_builder.specify "should return error when creating request from invalid URI" <|
|
||||
Request.new HTTP_Method.Post "invalid uri" . should_fail_with Syntax_Error
|
||||
Test.specify "should get method" <|
|
||||
group_builder.specify "should get method" <|
|
||||
req = Request.new HTTP_Method.Post test_uri
|
||||
req.method.should_equal HTTP_Method.Post
|
||||
Test.specify "should get uri" <|
|
||||
group_builder.specify "should get uri" <|
|
||||
req = Request.get test_uri
|
||||
req.uri.should_equal test_uri
|
||||
Test.specify "should get headers" <|
|
||||
group_builder.specify "should get headers" <|
|
||||
req = Request.get test_uri test_headers
|
||||
req.headers.should_equal test_headers
|
||||
Test.specify "should add header" <|
|
||||
group_builder.specify "should add header" <|
|
||||
new_header = Header.accept_all
|
||||
req = Request.get test_uri test_headers . with_header new_header.name new_header.value
|
||||
req.headers.should_equal (test_headers + [new_header])
|
||||
Test.specify "should update header" <|
|
||||
group_builder.specify "should update header" <|
|
||||
req = Request.get test_uri test_headers . with_header "X-Foo-Id" "42"
|
||||
req.headers.should_equal [Header.application_json, Header.new "X-Foo-Id" "42"]
|
||||
Test.specify "should add headers" <|
|
||||
group_builder.specify "should add headers" <|
|
||||
req = Request.get test_uri . with_headers test_headers
|
||||
req.headers.should_equal test_headers
|
||||
Test.specify "should update headers" <|
|
||||
group_builder.specify "should update headers" <|
|
||||
new_headers = [Header.multipart_form_data, Header.accept_all]
|
||||
req = Request.get test_uri test_headers . with_headers new_headers
|
||||
req.headers.should_equal [Header.multipart_form_data, test_headers.at 1, Header.accept_all]
|
||||
Test.specify "should set json body" <|
|
||||
group_builder.specify "should set json body" <|
|
||||
json = '{"key":"val"}'
|
||||
req = Request.get test_uri . with_json json
|
||||
req.body.should_equal (Request_Body.Json json)
|
||||
req.headers.should_equal [Header.application_json]
|
||||
Test.specify "should set form body" <|
|
||||
group_builder.specify "should set form body" <|
|
||||
body_form = Map.from_vector [["key", "val"]]
|
||||
req = Request.get test_uri . with_form body_form
|
||||
req.body.should_equal (Request_Body.Form_Data body_form)
|
||||
req.headers.should_equal [Header.application_x_www_form_urlencoded]
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -12,9 +12,8 @@ import Standard.Base.Network.Proxy.Proxy
|
||||
import Standard.Base.Runtime.Context
|
||||
from Standard.Base.Network.HTTP import resolve_headers
|
||||
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test import Test, Test_Suite
|
||||
from Standard.Test.Execution_Context_Helpers import run_with_and_without_output
|
||||
from Standard.Test_New import all
|
||||
from Standard.Test_New.Execution_Context_Helpers import run_with_and_without_output
|
||||
|
||||
type Test_Type
|
||||
Aaa (s:Text)
|
||||
@ -29,7 +28,7 @@ remove_fields json =
|
||||
json.set_value "headers" (json.at "headers" . set_value "User-Agent" "")
|
||||
compare_responses a b = (remove_fields a).should_equal (remove_fields b)
|
||||
|
||||
spec =
|
||||
add_specs suite_builder =
|
||||
## To run this test locally:
|
||||
$ sbt 'http-test-helper/run localhost 8080'
|
||||
$ export ENSO_HTTP_TEST_HTTPBIN_URL=http://localhost:8080/
|
||||
@ -39,24 +38,24 @@ spec =
|
||||
base_url_with_slash = base_url.if_not_nothing <|
|
||||
if base_url.ends_with "/" then base_url else base_url + "/"
|
||||
|
||||
Test.group "HTTP_Method parse" <|
|
||||
Test.specify "should be able to parse a string value into a method" <|
|
||||
suite_builder.group "HTTP_Method parse" group_builder->
|
||||
group_builder.specify "should be able to parse a string value into a method" <|
|
||||
"GET" . to HTTP_Method . should_equal (HTTP_Method.Get)
|
||||
"POST" . to HTTP_Method . should_equal (HTTP_Method.Post)
|
||||
"PUT" . to HTTP_Method . should_equal (HTTP_Method.Put)
|
||||
"PATCH" . to HTTP_Method . should_equal (HTTP_Method.Patch)
|
||||
|
||||
Test.specify "should be case insensitive" <|
|
||||
group_builder.specify "should be case insensitive" <|
|
||||
"get" . to HTTP_Method . should_equal (HTTP_Method.Get)
|
||||
"pOst" . to HTTP_Method . should_equal (HTTP_Method.Post)
|
||||
"puT" . to HTTP_Method . should_equal (HTTP_Method.Put)
|
||||
"PATCH" . to HTTP_Method . should_equal (HTTP_Method.Patch)
|
||||
|
||||
Test.specify "should make a custom method" <|
|
||||
group_builder.specify "should make a custom method" <|
|
||||
"CUSTOM" . to HTTP_Method . should_equal (HTTP_Method.Custom "CUSTOM")
|
||||
|
||||
Test.group "HTTP_Status_Code" <|
|
||||
Test.specify "should have a nice text representation" <|
|
||||
suite_builder.group "HTTP_Status_Code" group_builder->
|
||||
group_builder.specify "should have a nice text representation" <|
|
||||
s1 = HTTP_Status_Code.ok
|
||||
s1.code . should_equal 200
|
||||
s1.to_text . should_equal "200 OK"
|
||||
@ -67,31 +66,31 @@ spec =
|
||||
s2.to_text . should_equal "404 Not Found"
|
||||
s2.to_display_text . should_equal "Not Found"
|
||||
|
||||
Test.group "HTTP client" pending=pending_has_url <|
|
||||
Test.specify "should create HTTP client with timeout setting" <|
|
||||
suite_builder.group "HTTP client" pending=pending_has_url group_builder->
|
||||
group_builder.specify "should create HTTP client with timeout setting" <|
|
||||
http = HTTP.new (timeout = (Duration.new seconds=30))
|
||||
http.timeout.should_equal (Duration.new seconds=30)
|
||||
|
||||
Test.specify "should create HTTP client with follow_redirects setting" <|
|
||||
group_builder.specify "should create HTTP client with follow_redirects setting" <|
|
||||
http = HTTP.new (follow_redirects = False)
|
||||
http.follow_redirects.should_equal False
|
||||
|
||||
Test.specify "should create HTTP client with proxy setting" <|
|
||||
group_builder.specify "should create HTTP client with proxy setting" <|
|
||||
proxy_setting = Proxy.Address "example.com" 80
|
||||
http = HTTP.new (proxy = proxy_setting)
|
||||
http.proxy.should_equal proxy_setting
|
||||
|
||||
Test.specify "should create HTTP client with version setting" <|
|
||||
group_builder.specify "should create HTTP client with version setting" <|
|
||||
version_setting = HTTP_Version.HTTP_2
|
||||
http = HTTP.new (version = version_setting)
|
||||
http.version.should_equal version_setting
|
||||
|
||||
Test.group "fetch" pending=pending_has_url <|
|
||||
suite_builder.group "fetch" pending=pending_has_url group_builder->
|
||||
url_get = base_url_with_slash + "get"
|
||||
url_head = base_url_with_slash + "head"
|
||||
url_options = base_url_with_slash + "options"
|
||||
|
||||
Test.specify "Can perform a GET with a JSON response" <|
|
||||
group_builder.specify "Can perform a GET with a JSON response" <|
|
||||
response = Data.fetch url_get
|
||||
expected_response = Json.parse <| '''
|
||||
{
|
||||
@ -112,7 +111,7 @@ spec =
|
||||
uri_response = url_get.to URI . fetch
|
||||
compare_responses uri_response expected_response
|
||||
|
||||
Test.specify "Can perform a HEAD" <|
|
||||
group_builder.specify "Can perform a HEAD" <|
|
||||
response = Data.fetch url_head method=HTTP_Method.Head
|
||||
response.code.code . should_equal 200
|
||||
response.decode_as_text . should_equal ''
|
||||
@ -121,7 +120,7 @@ spec =
|
||||
uri_response.code.code . should_equal 200
|
||||
uri_response.decode_as_text . should_equal ''
|
||||
|
||||
Test.specify "Can perform an OPTIONS" <|
|
||||
group_builder.specify "Can perform an OPTIONS" <|
|
||||
response = Data.fetch url_options method=HTTP_Method.Options
|
||||
response.code.code . should_equal 200
|
||||
response.decode_as_text . should_equal ''
|
||||
@ -130,14 +129,14 @@ spec =
|
||||
uri_response.code.code . should_equal 200
|
||||
uri_response.decode_as_text . should_equal ''
|
||||
|
||||
Test.specify "Can perform auto-parse" <|
|
||||
group_builder.specify "Can perform auto-parse" <|
|
||||
response = Data.fetch url_get
|
||||
response.at "headers" . at "Content-Length" . should_equal "0"
|
||||
|
||||
uri_response = url_get.to_uri.fetch
|
||||
uri_response.at "headers" . at "Content-Length" . should_equal "0"
|
||||
|
||||
Test.specify "Can skip auto-parse" <|
|
||||
group_builder.specify "Can skip auto-parse" <|
|
||||
response = Data.fetch url_get try_auto_parse_response=False
|
||||
response.code.code . should_equal 200
|
||||
expected_response = Json.parse <| '''
|
||||
@ -160,28 +159,28 @@ spec =
|
||||
uri_response.code.code . should_equal 200
|
||||
compare_responses uri_response.decode_as_json expected_response
|
||||
|
||||
Test.specify "Can still perform request when output context is disabled" <|
|
||||
group_builder.specify "Can still perform request when output context is disabled" <|
|
||||
run_with_and_without_output <|
|
||||
Data.fetch url_get try_auto_parse_response=False . code . code . should_equal 200
|
||||
Data.fetch url_get method=HTTP_Method.Head try_auto_parse_response=False . code . code . should_equal 200
|
||||
Data.fetch url_get method=HTTP_Method.Options try_auto_parse_response=False . code . code . should_equal 200
|
||||
|
||||
Test.specify "Unsupported method" <|
|
||||
group_builder.specify "Unsupported method" <|
|
||||
err = Data.fetch url_get method=HTTP_Method.Post
|
||||
err.catch.should_equal (Illegal_Argument.Error "Unsupported method POST")
|
||||
|
||||
Test.specify "Cannot DELETE through fetch" <|
|
||||
group_builder.specify "Cannot DELETE through fetch" <|
|
||||
err = Data.fetch url_get method=HTTP_Method.Delete
|
||||
err.catch.should_equal (Illegal_Argument.Error "Unsupported method DELETE")
|
||||
|
||||
Test.specify "unknown host" <|
|
||||
group_builder.specify "unknown host" <|
|
||||
Data.fetch "http://undefined_host.invalid" . should_fail_with Illegal_Argument
|
||||
|
||||
Test.specify "Fails on a bad URL scheme" <|
|
||||
group_builder.specify "Fails on a bad URL scheme" <|
|
||||
Data.fetch "zxcv://bad.scheme" . should_fail_with Illegal_Argument
|
||||
Data.fetch "" . should_fail_with Illegal_Argument
|
||||
|
||||
Test.specify "can select the version" <|
|
||||
group_builder.specify "can select the version" <|
|
||||
req = Request.get url_get
|
||||
r2 = HTTP.new version=HTTP_Version.HTTP_2 . request req . decode_as_json
|
||||
r2.at "headers" . at "Connection" . should_equal "Upgrade, HTTP2-Settings"
|
||||
@ -193,13 +192,13 @@ spec =
|
||||
header_names.should_not_contain "http2-settings"
|
||||
header_names.should_not_contain "upgrade"
|
||||
|
||||
Test.group "post" pending=pending_has_url <|
|
||||
suite_builder.group "post" pending=pending_has_url group_builder->
|
||||
url_post = base_url_with_slash + "post"
|
||||
url_put = base_url_with_slash + "put"
|
||||
url_patch = base_url_with_slash + "patch"
|
||||
url_delete = base_url_with_slash + "delete"
|
||||
|
||||
Test.specify "Can perform a Request_Body.Text POST" <|
|
||||
group_builder.specify "Can perform a Request_Body.Text POST" <|
|
||||
response = Data.post url_post (Request_Body.Text "hello world")
|
||||
expected_response = echo_response_template "POST" "/post" "hello world" content_type="text/plain; charset=UTF-8"
|
||||
compare_responses response expected_response
|
||||
@ -207,24 +206,24 @@ spec =
|
||||
url_response = url_post.to_uri.post (Request_Body.Text "hello world")
|
||||
compare_responses url_response expected_response
|
||||
|
||||
Test.specify "Can perform a Request_Body.Json JSON POST" <|
|
||||
group_builder.specify "Can perform a Request_Body.Json JSON POST" <|
|
||||
json = Json.parse '{"a": "asdf", "b": 123}'
|
||||
response = Data.post url_post (Request_Body.Json json)
|
||||
expected_response = echo_response_template "POST" "/post" '{"a":"asdf","b":123}' content_type="application/json"
|
||||
compare_responses response expected_response
|
||||
|
||||
Test.specify "Can perform a JSON POST" <|
|
||||
group_builder.specify "Can perform a JSON POST" <|
|
||||
json = Json.parse '{"a": "asdf", "b": 123}'
|
||||
response = Data.post url_post json
|
||||
expected_response = echo_response_template "POST" "/post" '{"a":"asdf","b":123}' content_type="application/json"
|
||||
compare_responses response expected_response
|
||||
|
||||
Test.specify "Can perform an object Request_Body.Json POST" <|
|
||||
group_builder.specify "Can perform an object Request_Body.Json POST" <|
|
||||
response = Data.post url_post (Request_Body.Json (Test_Type.Aaa "abc"))
|
||||
expected_response = echo_response_template "POST" "/post" '{"type":"Test_Type","constructor":"Aaa","s":"abc"}' content_type="application/json"
|
||||
compare_responses response expected_response
|
||||
|
||||
Test.specify "Can perform an object JSON POST" <|
|
||||
group_builder.specify "Can perform an object JSON POST" <|
|
||||
response = Data.post url_post (Test_Type.Bbb 12)
|
||||
expected_response = echo_response_template "POST" "/post" '{"type":"Test_Type","constructor":"Bbb","i":12}' content_type="application/json"
|
||||
compare_responses response expected_response
|
||||
@ -232,10 +231,10 @@ spec =
|
||||
uri_response = url_post.to_uri.post (Test_Type.Bbb 12)
|
||||
compare_responses uri_response expected_response
|
||||
|
||||
Test.specify "can handle a bad .to_json" <|
|
||||
group_builder.specify "can handle a bad .to_json" <|
|
||||
Data.post url_post (Bad_To_Json.Aaa "abcd") . should_fail_with Illegal_Argument
|
||||
|
||||
Test.specify "Can perform a Text POST with explicit encoding" <|
|
||||
group_builder.specify "Can perform a Text POST with explicit encoding" <|
|
||||
body = Request_Body.Text 'Hello World!' encoding=Encoding.utf_16_le
|
||||
response = Data.post url_post body
|
||||
expected_response = echo_response_template "POST" "/post" "Hello World!" content_type="text/plain; charset=UTF-16LE" content_length=24
|
||||
@ -244,34 +243,34 @@ spec =
|
||||
uri_response = url_post.to_uri.post body
|
||||
compare_responses uri_response expected_response
|
||||
|
||||
Test.specify "Can perform a Text POST with explicit content type" <|
|
||||
group_builder.specify "Can perform a Text POST with explicit content type" <|
|
||||
response = Data.post url_post (Request_Body.Text 'a,b,c\n' content_type="text/csv")
|
||||
|
||||
expected_response = echo_response_template "POST" "/post" 'a,b,c\n' content_type="text/csv; charset=UTF-8"
|
||||
compare_responses response expected_response
|
||||
|
||||
Test.specify "Can perform a File POST" <|
|
||||
group_builder.specify "Can perform a File POST" <|
|
||||
test_file = enso_project.data / "sample.txt"
|
||||
response = Data.post url_post (Request_Body.Binary test_file)
|
||||
response.at "headers" . at "Content-Type" . should_equal "application/octet-stream"
|
||||
expected_text = test_file.read_text
|
||||
response . at "data" . should_equal expected_text
|
||||
|
||||
Test.specify "Can perform a binary File POST" <|
|
||||
group_builder.specify "Can perform a binary File POST" <|
|
||||
test_file = enso_project.data / "sample.png"
|
||||
response = Data.post url_post (Request_Body.Binary test_file)
|
||||
response.at "headers" . at "Content-Type" . should_equal "application/octet-stream"
|
||||
response.at "headers" . at "Content-Length" . should_equal test_file.size.to_text
|
||||
response.at "data" . should_start_with '\uFFFDPNG'
|
||||
|
||||
Test.specify "Can perform a url-encoded form POST" <|
|
||||
group_builder.specify "Can perform a url-encoded form POST" <|
|
||||
test_file = enso_project.data / "sample.txt"
|
||||
form_data = Map.from_vector [["key", "val"], ["a_file", test_file]]
|
||||
response = Data.post url_post (Request_Body.Form_Data form_data url_encoded=True)
|
||||
response.at "headers" . at "Content-Type" . should_equal "application/x-www-form-urlencoded"
|
||||
response.at "data" . replace "%0D%" "%" . should_equal 'key=val&a_file=Cupcake+ipsum+dolor+sit+amet.+Caramels+tootsie+roll+cake+ice+cream.+Carrot+cake+apple+pie+gingerbread+chocolate+cake+pudding+tart+souffl%C3%A9+jelly+beans+gummies.%0A%0ATootsie+roll+chupa+chups+muffin+croissant+fruitcake+jujubes+danish+cotton+candy+danish.+Oat+cake+chocolate+fruitcake+halvah+icing+oat+cake+toffee+powder.+Pastry+drag%C3%A9e+croissant.+Ice+cream+candy+canes+dessert+muffin+sugar+plum+tart+jujubes.%0A'
|
||||
|
||||
Test.specify "Can perform a multipart form POST" <|
|
||||
group_builder.specify "Can perform a multipart form POST" <|
|
||||
test_file = enso_project.data / "sample.png"
|
||||
form_data = Map.from_vector [["key", "val"], ["a_file", test_file]]
|
||||
response = Data.post url_post (Request_Body.Form_Data form_data)
|
||||
@ -279,29 +278,29 @@ spec =
|
||||
response_json.at "headers" . at "Content-Type" . should_start_with "multipart/form-data; boundary="
|
||||
response_json.at "data" . is_empty . should_be_false
|
||||
|
||||
Test.specify "Can perform a File POST with auto-conversion" <|
|
||||
group_builder.specify "Can perform a File POST with auto-conversion" <|
|
||||
test_file = enso_project.data / "sample.txt"
|
||||
response = Data.post url_post test_file
|
||||
response.at "headers" . at "Content-Type" . should_equal "application/octet-stream"
|
||||
expected_text = test_file.read_text
|
||||
response . at "data" . should_equal expected_text
|
||||
|
||||
Test.specify "Can perform a Text POST with auto-conversion" <|
|
||||
group_builder.specify "Can perform a Text POST with auto-conversion" <|
|
||||
response = Data.post url_post "hello world"
|
||||
expected_response = echo_response_template "POST" "/post" "hello world" content_type="text/plain; charset=UTF-8"
|
||||
compare_responses response expected_response
|
||||
|
||||
Test.specify "Can perform a Request_Body.Text PUT" <|
|
||||
group_builder.specify "Can perform a Request_Body.Text PUT" <|
|
||||
response = Data.post url_put (Request_Body.Text "hello world") method=HTTP_Method.Put
|
||||
expected_response = echo_response_template "PUT" "/put" "hello world" content_type="text/plain; charset=UTF-8"
|
||||
compare_responses response expected_response
|
||||
|
||||
Test.specify "Can perform a Request_Body.Text PATCH" <|
|
||||
group_builder.specify "Can perform a Request_Body.Text PATCH" <|
|
||||
response = Data.post url_patch (Request_Body.Text "hello world" content_type="application/diff") method=HTTP_Method.Patch
|
||||
expected_response = echo_response_template "PATCH" "/patch" "hello world" content_type="application/diff; charset=UTF-8"
|
||||
compare_responses response expected_response
|
||||
|
||||
Test.specify "Can perform a DELETE" <|
|
||||
group_builder.specify "Can perform a DELETE" <|
|
||||
response = Data.post url_delete method=HTTP_Method.Delete
|
||||
expected_response = Json.parse <| '''
|
||||
{
|
||||
@ -322,12 +321,12 @@ spec =
|
||||
}
|
||||
compare_responses response expected_response
|
||||
|
||||
Test.specify "Can skip auto-parse" <|
|
||||
group_builder.specify "Can skip auto-parse" <|
|
||||
response = Data.post url_post (Request_Body.Text "hello world") try_auto_parse_response=False
|
||||
expected_response = echo_response_template "POST" "/post" "hello world" content_type="text/plain; charset=UTF-8"
|
||||
compare_responses response.decode_as_json expected_response
|
||||
|
||||
Test.specify "Can send a custom header" <|
|
||||
group_builder.specify "Can send a custom header" <|
|
||||
response = Data.post url_post (Request_Body.Text "hello world") headers=[Header.new "Custom" "asdf", Header.new "Another" 'a:b: c - "ddd"']
|
||||
expected_response = Json.parse <| '''
|
||||
{
|
||||
@ -351,7 +350,7 @@ spec =
|
||||
}
|
||||
compare_responses response expected_response
|
||||
|
||||
Test.specify "can handle HTTP errors" <|
|
||||
group_builder.specify "can handle HTTP errors" <|
|
||||
# This should give us 405 method not allowed
|
||||
r1 = Data.post url_delete
|
||||
r1.should_fail_with HTTP_Error
|
||||
@ -381,35 +380,35 @@ spec =
|
||||
r4.catch.status_code.code . should_equal 404
|
||||
r4.catch.uri . should_equal (URI.from uri4)
|
||||
|
||||
Test.specify "Cannot perform POST when output context is disabled" <|
|
||||
group_builder.specify "Cannot perform POST when output context is disabled" <|
|
||||
Context.Output.with_disabled <|
|
||||
Data.post url_post (Request_Body.Text "hello world") . should_fail_with Forbidden_Operation
|
||||
|
||||
Test.specify "Cannot perform PATCH/PUT/DELETE when output context is disabled" <|
|
||||
group_builder.specify "Cannot perform PATCH/PUT/DELETE when output context is disabled" <|
|
||||
Context.Output.with_disabled <|
|
||||
Data.post url_post (Request_Body.Text "hello world") . should_fail_with Forbidden_Operation
|
||||
Data.post url_put (Request_Body.Text "hello world") method=HTTP_Method.Put . should_fail_with Forbidden_Operation
|
||||
Data.post url_patch (Request_Body.Text "hello world") method=HTTP_Method.Patch . should_fail_with Forbidden_Operation
|
||||
Data.post url_delete method=HTTP_Method.Delete . should_fail_with Forbidden_Operation
|
||||
|
||||
Test.specify "Unsupported method" <|
|
||||
group_builder.specify "Unsupported method" <|
|
||||
err = Data.post url_post (Request_Body.Text "hello world") method=HTTP_Method.Get
|
||||
err.catch.should_equal (Illegal_Argument.Error "Unsupported method GET")
|
||||
|
||||
err2 = Data.post url_post (Request_Body.Text "hello world") method=(HTTP_Method.Custom "BREW_COFFEE")
|
||||
err2.catch.should_equal (Illegal_Argument.Error "Unsupported method Custom: BREW_COFFEE")
|
||||
|
||||
Test.specify "unknown host" <|
|
||||
group_builder.specify "unknown host" <|
|
||||
Data.post "http://undefined_host.invalid" (Request_Body.Text "hello world") . should_fail_with Illegal_Argument
|
||||
|
||||
Test.specify "Nonexistent file" <|
|
||||
group_builder.specify "Nonexistent file" <|
|
||||
test_file = enso_project.data / "does_not_exist.txt"
|
||||
Data.post url_post (Request_Body.Binary test_file) . should_fail_with Request_Error
|
||||
|
||||
Test.group "Headers" pending=pending_has_url <|
|
||||
suite_builder.group "Headers" pending=pending_has_url group_builder->
|
||||
url_post = base_url_with_slash + "post"
|
||||
|
||||
Test.specify "Content-type in the body is respected" <|
|
||||
group_builder.specify "Content-type in the body is respected" <|
|
||||
response = Data.post url_post (Request_Body.Text '{"a": "asdf", "b": 123}' content_type="application/json")
|
||||
expected_response = Json.parse <| '''
|
||||
{
|
||||
@ -431,7 +430,7 @@ spec =
|
||||
}
|
||||
compare_responses response expected_response
|
||||
|
||||
Test.specify "Content type in the header list is respected" <|
|
||||
group_builder.specify "Content type in the header list is respected" <|
|
||||
response = Data.post url_post (Request_Body.Text '{"a": "asdf", "b": 123}') headers=[Header.content_type "application/json"]
|
||||
expected_response = Json.parse <| '''
|
||||
{
|
||||
@ -453,7 +452,7 @@ spec =
|
||||
}
|
||||
compare_responses response expected_response
|
||||
|
||||
Test.specify "Multiple content types in the header list are respected" <|
|
||||
group_builder.specify "Multiple content types in the header list are respected" <|
|
||||
response = Data.post url_post (Request_Body.Text '{"a": "asdf", "b": 123}') headers=[Header.content_type "application/json", Header.content_type "text/plain"]
|
||||
## Our http-test-helper gets 2 Content-Type headers and merges them in the response.
|
||||
How this is interpreted in practice depends on the server.
|
||||
@ -477,7 +476,7 @@ spec =
|
||||
}
|
||||
compare_responses response expected_response
|
||||
|
||||
Test.specify "Unspecified content type defaults to text/plain" <|
|
||||
group_builder.specify "Unspecified content type defaults to text/plain" <|
|
||||
response = Data.post url_post (Request_Body.Text '{"a": "asdf", "b": 123}')
|
||||
expected_response = Json.parse <| '''
|
||||
{
|
||||
@ -499,13 +498,13 @@ spec =
|
||||
}
|
||||
compare_responses response expected_response
|
||||
|
||||
Test.specify "Cannot specify content type in both body and headers" <|
|
||||
group_builder.specify "Cannot specify content type in both body and headers" <|
|
||||
Data.post url_post (Request_Body.Text "hello world" content_type="text/plain") headers=[Header.content_type "application/json"] . should_fail_with Illegal_Argument
|
||||
|
||||
Test.specify "Cannot specify content type (implicitly via explicit text encoding) in both body and headers" <|
|
||||
group_builder.specify "Cannot specify content type (implicitly via explicit text encoding) in both body and headers" <|
|
||||
Data.post url_post (Request_Body.Text "hello world" encoding=Encoding.utf_8) headers=[Header.content_type "application/json"] . should_fail_with Illegal_Argument
|
||||
|
||||
Test.specify "can also read headers from a response, when returning a raw response" <|
|
||||
group_builder.specify "can also read headers from a response, when returning a raw response" <|
|
||||
r1 = Data.post url_post (Request_Body.Text "hello world") try_auto_parse_response=False
|
||||
r1.should_be_a Response
|
||||
# The result is JSON data:
|
||||
@ -524,7 +523,7 @@ spec =
|
||||
r2.headers.find (p-> p.name.equals_ignore_case "Test-Header") . value . should_equal "test-value"
|
||||
r2.headers.find (p-> p.name.equals_ignore_case "Other-Header") . value . should_equal "some other value"
|
||||
|
||||
Test.specify "is capable of handling aliasing headers" <|
|
||||
group_builder.specify "is capable of handling aliasing headers" <|
|
||||
uri = URI.from (base_url_with_slash + "test_headers")
|
||||
. add_query_argument "my-header" "value-1"
|
||||
. add_query_argument "my-header" "value-2"
|
||||
@ -534,42 +533,42 @@ spec =
|
||||
my_headers = r1.headers.filter (p-> p.name.equals_ignore_case "my-header") . map .value
|
||||
my_headers.sort . should_equal ["value-1", "value-2", "value-44"]
|
||||
|
||||
Test.group "Header resolution" <|
|
||||
Test.specify "Default content type and encoding" <|
|
||||
suite_builder.group "Header resolution" group_builder->
|
||||
group_builder.specify "Default content type and encoding" <|
|
||||
expected = [Header.content_type "text/plain; charset=UTF-8"]
|
||||
resolve_headers (Request.new HTTP_Method.Get "" [] (Request_Body.Text "")) . should_contain_the_same_elements_as expected
|
||||
|
||||
Test.specify "Content type specified in body" <|
|
||||
group_builder.specify "Content type specified in body" <|
|
||||
expected = [Header.content_type "application/json; charset=UTF-8"]
|
||||
resolve_headers (Request.new HTTP_Method.Get "" [] (Request_Body.Text "" content_type="application/json")) . should_contain_the_same_elements_as expected
|
||||
|
||||
Test.specify "Content type specified in header list" <|
|
||||
group_builder.specify "Content type specified in header list" <|
|
||||
expected = [Header.content_type "application/json"]
|
||||
resolve_headers (Request.new HTTP_Method.Get "" [Header.content_type "application/json"] (Request_Body.Text "")) . should_contain_the_same_elements_as expected
|
||||
|
||||
Test.specify "Text encoding specified in body" <|
|
||||
group_builder.specify "Text encoding specified in body" <|
|
||||
expected = [Header.content_type "text/plain; charset=UTF-16LE"]
|
||||
resolve_headers (Request.new HTTP_Method.Get "" [] (Request_Body.Text "" encoding=Encoding.utf_16_le)) . should_contain_the_same_elements_as expected
|
||||
|
||||
Test.specify "Can't specify content type in both places" <|
|
||||
group_builder.specify "Can't specify content type in both places" <|
|
||||
resolve_headers (Request.new HTTP_Method.Get "" [Header.content_type "application/json"] (Request_Body.Text "" content_type="text/plain")) . should_fail_with Illegal_Argument
|
||||
|
||||
Test.specify "Custom header" <|
|
||||
group_builder.specify "Custom header" <|
|
||||
expected = [Header.new "some" "header", Header.content_type "application/json; charset=UTF-8"]
|
||||
resolve_headers (Request.new HTTP_Method.Get "" [Header.new "some" "header"] (Request_Body.Text "" content_type="application/json")) . should_contain_the_same_elements_as expected
|
||||
|
||||
Test.specify "Multiple content types in header list are ok" <|
|
||||
group_builder.specify "Multiple content types in header list are ok" <|
|
||||
expected = [Header.content_type "application/json", Header.content_type "text/plain"]
|
||||
resolve_headers (Request.new HTTP_Method.Get "" [Header.content_type "application/json", Header.content_type "text/plain"] (Request_Body.Text "")) . should_contain_the_same_elements_as expected
|
||||
|
||||
Test.group "Http Error handling" <|
|
||||
Test.specify "should be able to handle request errors" <|
|
||||
suite_builder.group "Http Error handling" group_builder->
|
||||
group_builder.specify "should be able to handle request errors" <|
|
||||
err = Data.fetch "http://0.0.0.0:1/"
|
||||
err.should_fail_with Request_Error
|
||||
|
||||
## Checking this error partially as a warning - I spent a lot of time debugging why I'm getting such an error.
|
||||
Apparently it happens when the httpbin server was crashing without sending any response.
|
||||
Test.specify "should be able to handle server crash resulting in no response" pending=pending_has_url <|
|
||||
group_builder.specify "should be able to handle server crash resulting in no response" pending=pending_has_url <|
|
||||
err = Data.fetch (base_url_with_slash+"crash")
|
||||
err.should_fail_with Request_Error
|
||||
err.catch.error_type . should_equal "java.io.IOException"
|
||||
@ -578,13 +577,13 @@ spec =
|
||||
I think it may be worth adding, because it may be really quite confusing for end users who get that kind of error.
|
||||
err.catch.message . should_equal "HTTP/1.1 header parser received no bytes"
|
||||
|
||||
Test.specify "should be able to handle IO errors" pending="TODO: Currently I was unable to figure out a way to test such errors" <|
|
||||
group_builder.specify "should be able to handle IO errors" pending="TODO: Currently I was unable to figure out a way to test such errors" <|
|
||||
# how to trigger this error???
|
||||
err = Data.fetch "TODO"
|
||||
err.should_fail_with HTTP_Error
|
||||
|
||||
Test.group "Http Auth" <|
|
||||
Test.specify "should support Basic user+password authentication" pending=pending_has_url <|
|
||||
suite_builder.group "Http Auth" group_builder->
|
||||
group_builder.specify "should support Basic user+password authentication" pending=pending_has_url <|
|
||||
url = base_url_with_slash + "test_basic_auth"
|
||||
|
||||
# Correct user and password
|
||||
@ -608,7 +607,7 @@ spec =
|
||||
r4.should_fail_with HTTP_Error
|
||||
r4.catch.status_code.code . should_equal 403
|
||||
|
||||
Test.specify "should support Bearer token authentication" pending=pending_has_url <|
|
||||
group_builder.specify "should support Bearer token authentication" pending=pending_has_url <|
|
||||
url = base_url_with_slash + "test_token_auth"
|
||||
|
||||
# Correct token
|
||||
@ -627,7 +626,11 @@ spec =
|
||||
r3.should_fail_with HTTP_Error
|
||||
r3.catch.status_code.code . should_equal 403
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
||||
echo_response_template method path data content_type content_length=data.length =
|
||||
template = '''
|
||||
|
@ -4,13 +4,12 @@ import Standard.Base.Errors.Common.Syntax_Error
|
||||
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
|
||||
import Standard.Base.Network.HTTP.Request.Request
|
||||
|
||||
from Standard.Test import Problems, Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
import project.Network.Enso_Cloud.Cloud_Tests_Setup.Cloud_Tests_Setup
|
||||
from project.Network.Enso_Cloud.Cloud_Tests_Setup import with_retries
|
||||
|
||||
spec =
|
||||
add_specs suite_builder =
|
||||
## To run this test locally:
|
||||
$ sbt 'http-test-helper/run localhost 8080'
|
||||
$ export ENSO_HTTP_TEST_HTTPBIN_URL=http://localhost:8080/
|
||||
@ -20,8 +19,8 @@ spec =
|
||||
pending_has_url = if base_url != Nothing then Nothing else
|
||||
"The HTTP tests only run when the `ENSO_HTTP_TEST_HTTPBIN_URL` environment variable is set to URL of the httpbin server"
|
||||
|
||||
Test.group "URI" <|
|
||||
Test.specify "should parse URI from string" <|
|
||||
suite_builder.group "URI" group_builder->
|
||||
group_builder.specify "should parse URI from string" <|
|
||||
addr = URI.parse "http://user:pass@example.com/foo/bar?key=val"
|
||||
addr.scheme.should_equal "http"
|
||||
addr.user_info.should_equal "user:pass"
|
||||
@ -32,7 +31,7 @@ spec =
|
||||
addr.query.should_equal "key=val"
|
||||
addr.fragment.should_equal Nothing
|
||||
|
||||
Test.specify "should allow to convert a text to URI" <|
|
||||
group_builder.specify "should allow to convert a text to URI" <|
|
||||
addr2 = URI.from "https://example.org:1234/?a=b&c=d+e#line=10,20"
|
||||
addr2.should_be_a URI
|
||||
addr2.scheme.should_equal "https"
|
||||
@ -55,7 +54,7 @@ spec =
|
||||
addr3.query.should_equal Nothing
|
||||
addr3.fragment.should_equal Nothing
|
||||
|
||||
Test.specify "should escape URI" <|
|
||||
group_builder.specify "should escape URI" <|
|
||||
addr = URI.parse "https://%D0%9B%D0%B8%D0%BD%D1%83%D1%81:pass@ru.wikipedia.org/wiki/%D0%AF%D0%B4%D1%80%D0%BE_Linux?%D0%9A%D0%BE%D0%B4"
|
||||
addr.user_info.should_equal "Линус:pass"
|
||||
addr.authority.should_equal "Линус:pass@ru.wikipedia.org"
|
||||
@ -68,13 +67,13 @@ spec =
|
||||
addr.raw_query.should_equal "%D0%9A%D0%BE%D0%B4"
|
||||
addr.raw_fragment.should_equal Nothing
|
||||
|
||||
Test.specify "should return Syntax_Error when parsing invalid URI" <|
|
||||
group_builder.specify "should return Syntax_Error when parsing invalid URI" <|
|
||||
r = URI.parse "a b c"
|
||||
r.should_fail_with Syntax_Error
|
||||
r.catch.to_display_text . should_contain "a b c"
|
||||
URI.from "a b c" . should_fail_with Syntax_Error
|
||||
|
||||
Test.specify "should allow a URI without scheme or authority" <|
|
||||
group_builder.specify "should allow a URI without scheme or authority" <|
|
||||
uri = URI.parse "//a/b/c"
|
||||
uri.scheme.should_equal Nothing
|
||||
uri.authority.should_equal "a"
|
||||
@ -86,11 +85,11 @@ spec =
|
||||
uri2.authority.should_equal Nothing
|
||||
uri2.path.should_equal "/a/b/c"
|
||||
|
||||
Test.specify "should compare two URIs for equality" <|
|
||||
group_builder.specify "should compare two URIs for equality" <|
|
||||
(URI.parse "http://google.com").should_equal (URI.parse "http://google.com")
|
||||
(URI.parse "http://google.com").should_not_equal (URI.parse "http://amazon.com")
|
||||
|
||||
Test.specify "allows adding query arguments" <|
|
||||
group_builder.specify "allows adding query arguments" <|
|
||||
base_uri = URI.parse "http://a_user@example.com"
|
||||
uri = base_uri . add_query_argument "foo" "bar"
|
||||
|
||||
@ -105,7 +104,7 @@ spec =
|
||||
uri.fragment.should_equal Nothing
|
||||
uri.to_text . should_equal "http://a_user@example.com/?foo=bar"
|
||||
|
||||
Test.specify "should be able to add multiple query arguments" <|
|
||||
group_builder.specify "should be able to add multiple query arguments" <|
|
||||
base_uri = URI.parse "https://example.com/path?a=b"
|
||||
uri = base_uri . add_query_argument "c" "d" . add_query_argument "e" "f"
|
||||
|
||||
@ -120,7 +119,7 @@ spec =
|
||||
uri.fragment.should_equal Nothing
|
||||
|
||||
# We rely on the http-test-helper for these tests, to ensure that the encoding is indeed correctly interpreted by a real-life server:
|
||||
Test.specify "should correctly handle various characters within the key and value of arguments" pending=pending_has_url <|
|
||||
group_builder.specify "should correctly handle various characters within the key and value of arguments" pending=pending_has_url <|
|
||||
base_uri = URI.parse base_url+"get"
|
||||
|
||||
uri1 = base_uri . add_query_argument "a" "b"
|
||||
@ -158,14 +157,14 @@ spec =
|
||||
r4 = uri4.fetch
|
||||
decode_query_params r4 . should_equal [["p+r", "b c"], ["p r", "b c"], ["🚀", "🚧"], ["śnieżnobiałą", "łąkę"], [s2, "zzz"]]
|
||||
|
||||
Test.specify "may allow duplicate keys in query parameters" pending=pending_has_url <|
|
||||
group_builder.specify "may allow duplicate keys in query parameters" pending=pending_has_url <|
|
||||
uri = URI.parse base_url+"get"
|
||||
. add_query_argument "a" "b"
|
||||
. add_query_argument "a" "c"
|
||||
. add_query_argument "a" "d"
|
||||
decode_query_params uri.fetch . should_equal [["a", "b"], ["a", "c"], ["a", "d"]]
|
||||
|
||||
Test.specify "should allow to get properties of a URI with added query arguments and user info" <|
|
||||
group_builder.specify "should allow to get properties of a URI with added query arguments and user info" <|
|
||||
base_uri = URI.parse "https://example.com/path?a=b#FRAG"
|
||||
uri1 = base_uri . add_query_argument "c" "d" . add_query_argument "e" "f"
|
||||
|
||||
@ -178,7 +177,7 @@ spec =
|
||||
uri1.fragment.should_equal "FRAG"
|
||||
uri1.should_equal (URI.parse "https://example.com/path?a=b&c=d&e=f#FRAG")
|
||||
|
||||
Test.specify "should allow the / syntax for extending the path on an URI" <|
|
||||
group_builder.specify "should allow the / syntax for extending the path on an URI" <|
|
||||
uri0 = URI.parse "https://example.com"
|
||||
uri1 = uri0 / "path"
|
||||
uri1.path . should_equal "/path"
|
||||
@ -196,7 +195,7 @@ spec =
|
||||
uri4.path . should_equal "/path/2"
|
||||
uri4.to_text . should_equal "https://example.com/path/2"
|
||||
|
||||
Test.specify "should work fine with / characters and empty segments" <|
|
||||
group_builder.specify "should work fine with / characters and empty segments" <|
|
||||
uri0 = URI.parse "http://example.com/get/"
|
||||
|
||||
uri1 = uri0 / "///0" / "a///b" / "c///"
|
||||
@ -212,7 +211,7 @@ spec =
|
||||
uri5 = uri4 / "b"
|
||||
uri5.path . should_equal "/get/a//b"
|
||||
|
||||
Test.specify "should allow various characters in the path" pending=pending_has_url <|
|
||||
group_builder.specify "should allow various characters in the path" pending=pending_has_url <|
|
||||
uri0 = URI.parse base_url+"get"
|
||||
uri1 = uri0 / "a b c" / "d+e" / "f%20g" / "ś🚧:@"
|
||||
|
||||
@ -230,13 +229,13 @@ spec =
|
||||
r2 = uri3.fetch
|
||||
r2.at "path" . should_equal "/get/"+ext
|
||||
|
||||
Test.specify "should allow slashes within path" <|
|
||||
group_builder.specify "should allow slashes within path" <|
|
||||
uri0 = URI.parse "https://example.com/"
|
||||
uri1 = uri0 / "a" / "b" / "c/d/e" / "f////g"
|
||||
uri1.path . should_equal "/a/b/c/d/e/f////g"
|
||||
uri1.to_text . should_equal "https://example.com/a/b/c/d/e/f////g"
|
||||
|
||||
Test.specify "will warn about ? or # being encoded in path" <|
|
||||
group_builder.specify "will warn about ? or # being encoded in path" <|
|
||||
uri0 = URI.parse "https://example.com/a/b/c"
|
||||
|
||||
uri1 = uri0 / "x?y"
|
||||
@ -261,7 +260,7 @@ spec =
|
||||
uri4.fragment . should_equal "d"
|
||||
uri4.to_text . should_equal "https://example.com/a/x%3Fy/z%23w?b=c#d"
|
||||
|
||||
Test.specify "should keep existing query arguments intact when extending the path" <|
|
||||
group_builder.specify "should keep existing query arguments intact when extending the path" <|
|
||||
uri0 = URI.parse "https://example.com/path?a=b&c=d"
|
||||
uri1 = uri0 / "x" / "y"
|
||||
uri2 = uri1 . add_query_argument "e" "f"
|
||||
@ -271,7 +270,7 @@ spec =
|
||||
uri3.query . should_equal "a=b&c=d&e=f"
|
||||
uri3.to_text . should_equal "https://example.com/path/x/y/z?a=b&c=d&e=f"
|
||||
|
||||
Test.specify "should allow to reset query arguments" <|
|
||||
group_builder.specify "should allow to reset query arguments" <|
|
||||
uri0 = URI.parse "https://example.com/path?a=b&c=d#FRAG"
|
||||
uri1 = uri0 . add_query_argument "e" "f"
|
||||
|
||||
@ -282,8 +281,9 @@ spec =
|
||||
uri2.to_text . should_equal "https://example.com/path#FRAG"
|
||||
|
||||
cloud_setup = Cloud_Tests_Setup.prepare
|
||||
cloud_setup.with_prepared_environment <|
|
||||
Test.specify "will not convert back to URI if secrets are present in the query arguments" pending=cloud_setup.pending <|
|
||||
|
||||
group_builder.specify "will not convert back to URI if secrets are present in the query arguments" pending=cloud_setup.pending <|
|
||||
cloud_setup.with_prepared_environment <|
|
||||
secret1 = Enso_Secret.create "my_test_secret-uri-1" "My Value"
|
||||
secret1.should_succeed
|
||||
Panic.with_finalizer secret1.delete <|
|
||||
@ -295,7 +295,8 @@ spec =
|
||||
uri.to_java_uri . should_fail_with Enso_Secret_Error
|
||||
uri.raw_query . should_fail_with Enso_Secret_Error
|
||||
|
||||
Test.specify "should allow to use secrets in query arguments, containing all kinds of various characters" pending=cloud_setup.httpbin_pending <|
|
||||
group_builder.specify "should allow to use secrets in query arguments, containing all kinds of various characters" pending=cloud_setup.httpbin_pending <|
|
||||
cloud_setup.with_prepared_environment <|
|
||||
secret1 = Enso_Secret.create "my_test_secret-uri-4" "My Very Secret Value"
|
||||
secret1.should_succeed
|
||||
Panic.with_finalizer secret1.delete <|
|
||||
@ -315,7 +316,8 @@ spec =
|
||||
response = cloud_setup.httpbin_secure_client.request (Request.get uri)
|
||||
decode_query_params response.decode_as_json . should_equal [["arg1", "My Very Secret Value"], ["arg2", "plain value"], ["arg3", s2]]
|
||||
|
||||
Test.specify "does not allow secrets in non-https requests" pending=cloud_setup.pending <|
|
||||
group_builder.specify "does not allow secrets in non-https requests" pending=cloud_setup.pending <|
|
||||
cloud_setup.with_prepared_environment <|
|
||||
secret1 = Enso_Secret.create "my_test_secret-uri-8" "My Value"
|
||||
secret1.should_succeed
|
||||
Panic.with_finalizer secret1.delete <|
|
||||
@ -328,7 +330,11 @@ spec =
|
||||
r1.should_fail_with Illegal_Argument
|
||||
r1.catch.to_display_text . should_contain "Secrets are not allowed in HTTP connections, use HTTPS instead."
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
||||
decode_query_params : Json -> Vector (Pair Text Text)
|
||||
decode_query_params json_response =
|
||||
|
@ -2,11 +2,11 @@ from Standard.Base import all
|
||||
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
|
||||
import Standard.Base.Random.Random_Generator
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
spec = Test.group "Random" <|
|
||||
Test.specify "should allow generating random integers" <|
|
||||
|
||||
add_specs suite_builder = suite_builder.group "Random" group_builder->
|
||||
group_builder.specify "should allow generating random integers" <|
|
||||
Random.set_seed 12345
|
||||
Random.integer 0 99 . should_equal 51
|
||||
Random.integer 0 9999 . should_equal 9080
|
||||
@ -15,7 +15,7 @@ spec = Test.group "Random" <|
|
||||
(random_range.at 0 >= 0) . should_equal True
|
||||
(random_range.at 1 <= 99) . should_equal True
|
||||
|
||||
Test.specify "should allow generating random floats" <|
|
||||
group_builder.specify "should allow generating random floats" <|
|
||||
Random.set_seed 12345
|
||||
Random.float . should_equal 0.3618031071604718 epsilon=0.00000001
|
||||
Random.float . should_equal 0.932993485288541 epsilon=0.00000001
|
||||
@ -24,11 +24,11 @@ spec = Test.group "Random" <|
|
||||
(random_range.at 0 >= 0) . should_equal True
|
||||
(random_range.at 1 <= 1) . should_equal True
|
||||
|
||||
Test.specify "should allow generating random booleans" <|
|
||||
group_builder.specify "should allow generating random booleans" <|
|
||||
Random.set_seed 0
|
||||
0.up_to 3 . map _->Random.boolean . should_equal [True, True, False]
|
||||
|
||||
Test.specify "should allow generating random gaussian floats" <|
|
||||
group_builder.specify "should allow generating random gaussian floats" <|
|
||||
Random.set_seed 12345
|
||||
Random.gaussian . should_equal -0.187808989658912 epsilon=0.00000001
|
||||
Random.gaussian . should_equal 0.5884363051154796 epsilon=0.00000001
|
||||
@ -40,7 +40,7 @@ spec = Test.group "Random" <|
|
||||
0.up_to 100 . map (_-> Random.gaussian mean=1000000) . any (_ > 1000000) . should_be_true
|
||||
0.up_to 100 . map (_-> Random.gaussian) . any (_ > 1000000) . should_be_false
|
||||
|
||||
Test.specify "should allow to generate random indices" <|
|
||||
group_builder.specify "should allow to generate random indices" <|
|
||||
Random.set_seed 0
|
||||
|
||||
two_out_of_three = 0.up_to 100 . map _->
|
||||
@ -69,7 +69,7 @@ spec = Test.group "Random" <|
|
||||
Random.indices 1 100 . should_equal [0]
|
||||
Random.indices 100 0 . should_equal []
|
||||
|
||||
Test.specify "should allow generating random dates" <|
|
||||
group_builder.specify "should allow generating random dates" <|
|
||||
Random.set_seed 4000
|
||||
Random.date (Date.new 2023 03 01) (Date.new 2023 10 14) . should_equal (Date.new 2023 6 9)
|
||||
Random.date (Date.new 2023 03 01) (Date.new 2023 10 14) . should_equal (Date.new 2023 7 16)
|
||||
@ -79,7 +79,7 @@ spec = Test.group "Random" <|
|
||||
dates = 0.up_to 100 . map (_-> Random.date (Date.new 2023 03 01) (Date.new 2023 03 03))
|
||||
dates.should_contain_the_same_elements_as all_from_small_range
|
||||
|
||||
Test.specify "should allow generating random times" <|
|
||||
group_builder.specify "should allow generating random times" <|
|
||||
Random.set_seed 12345
|
||||
start = Time_Of_Day.new 8 12 15
|
||||
end = Time_Of_Day.new 9 40 2
|
||||
@ -96,10 +96,10 @@ spec = Test.group "Random" <|
|
||||
times = 0.up_to 100 . map (_-> Random.time small_range_start small_range_end)
|
||||
times.should_contain_the_same_elements_as all_from_small_range
|
||||
|
||||
Test.specify "should allow generating random UUIDs" <|
|
||||
group_builder.specify "should allow generating random UUIDs" <|
|
||||
Random.uuid . should_only_contain_elements_in "0123456789abcdef-"
|
||||
|
||||
Test.specify "should allow to select random items from a vector, without replacement" <|
|
||||
group_builder.specify "should allow to select random items from a vector, without replacement" <|
|
||||
Random.set_seed 0
|
||||
vector = ["A", "B", "C"]
|
||||
shuffles = 0.up_to 100 . map _->
|
||||
@ -110,7 +110,7 @@ spec = Test.group "Random" <|
|
||||
Random.items ["A", "A", "A"] 0 . should_equal []
|
||||
Random.items ["A", "A", "A"] 3 . should_equal ["A", "A", "A"]
|
||||
|
||||
Test.specify "should allow to select random items from a vector, with replacement" <|
|
||||
group_builder.specify "should allow to select random items from a vector, with replacement" <|
|
||||
Random.set_seed 0
|
||||
vector = 0.up_to 10 . to_vector
|
||||
|
||||
@ -125,12 +125,12 @@ spec = Test.group "Random" <|
|
||||
items.should_only_contain_elements_in vector
|
||||
items.length . should_equal 5
|
||||
|
||||
Test.specify "should not allow to select more items than the vector has, without replacement" <|
|
||||
group_builder.specify "should not allow to select more items than the vector has, without replacement" <|
|
||||
vector = 0.up_to 10 . to_vector
|
||||
Random.items vector 11 . should_fail_with Illegal_Argument
|
||||
Random.items vector 100 . should_fail_with Illegal_Argument
|
||||
|
||||
Test.specify "should allow to generate random permutations" <|
|
||||
group_builder.specify "should allow to generate random permutations" <|
|
||||
Random.set_seed 0
|
||||
list = [0, 1, 2]
|
||||
permutations = 0.up_to 100 . map _->
|
||||
@ -142,15 +142,19 @@ spec = Test.group "Random" <|
|
||||
permutations . should_contain_the_same_elements_as all_permutations
|
||||
permutations_2 . should_contain_the_same_elements_as all_permutations
|
||||
|
||||
Test.specify "should not allow using a too-large integer range" <|
|
||||
group_builder.specify "should not allow using a too-large integer range" <|
|
||||
high = 9223372036854775806999
|
||||
Random.integer 0 high . should_fail_with Illegal_Argument
|
||||
|
||||
Test.specify "Can call an instance directly" <|
|
||||
group_builder.specify "Can call an instance directly" <|
|
||||
Random.new_generator . should_be_a Random_Generator
|
||||
Random.new_generator 12345 . should_be_a Random_Generator
|
||||
Random.new_generator 12345 . integer 0 99 . should_equal 51
|
||||
Random.new_generator 12345 . float . should_equal 0.3618031071604718 epsilon=0.00000001
|
||||
Random.new_generator 12345 . gaussian . should_equal -0.187808989658912 epsilon=0.00000001
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -1,11 +1,11 @@
|
||||
from Standard.Base import all
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
|
||||
import Standard.Base.Errors.Common.Type_Error
|
||||
|
||||
spec = Test.group "Function Ascribed Parameters" <|
|
||||
add_specs suite_builder = suite_builder.group "Function Ascribed Parameters" group_builder->
|
||||
t1 (f1 : Function) =
|
||||
f1 "x"
|
||||
|
||||
@ -24,41 +24,45 @@ spec = Test.group "Function Ascribed Parameters" <|
|
||||
|
||||
with_type_error ~action = Panic.catch Type_Error action panic->panic.convert_to_dataflow_error
|
||||
|
||||
Test.specify "t1 with surround type check" <|
|
||||
group_builder.specify "t1 with surround type check" <|
|
||||
(t1 surround) . should_equal "|x|"
|
||||
|
||||
Test.specify "t2 with surround type check" <|
|
||||
group_builder.specify "t2 with surround type check" <|
|
||||
(t2 surround) . should_equal "|x|"
|
||||
|
||||
Test.specify "t3 with surround type check" <|
|
||||
group_builder.specify "t3 with surround type check" <|
|
||||
(t3 surround) . should_equal "|x|"
|
||||
|
||||
Test.specify "t4 with surround type check" <|
|
||||
group_builder.specify "t4 with surround type check" <|
|
||||
(t4 surround) . should_equal "|x|"
|
||||
|
||||
Test.specify "t1 with 42 type check" <|
|
||||
group_builder.specify "t1 with 42 type check" <|
|
||||
with_type_error (t1 6) . should_fail_with Type_Error
|
||||
|
||||
Test.specify "t2 with 42 type check" <|
|
||||
group_builder.specify "t2 with 42 type check" <|
|
||||
with_type_error (t2 6) . should_fail_with Type_Error
|
||||
|
||||
Test.specify "t3 with 42 type check" <|
|
||||
group_builder.specify "t3 with 42 type check" <|
|
||||
(t3 6) . should_equal 42
|
||||
|
||||
Test.specify "t4 with 42 type check" <|
|
||||
group_builder.specify "t4 with 42 type check" <|
|
||||
(t4 6) . should_equal 42
|
||||
|
||||
Test.specify "t1 with text type check" <|
|
||||
group_builder.specify "t1 with text type check" <|
|
||||
with_type_error (t1 "hi") . should_fail_with Type_Error
|
||||
|
||||
Test.specify "t2 with text type check" <|
|
||||
group_builder.specify "t2 with text type check" <|
|
||||
with_type_error (t2 "hi") . should_fail_with Type_Error
|
||||
|
||||
Test.specify "t3 with text type check" <|
|
||||
group_builder.specify "t3 with text type check" <|
|
||||
with_type_error (t3 "hi") . should_fail_with Type_Error
|
||||
|
||||
Test.specify "t4 with text type check" <|
|
||||
group_builder.specify "t4 with text type check" <|
|
||||
with_type_error (t4 "hi") . should_fail_with Type_Error
|
||||
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -3,35 +3,39 @@ from Standard.Base import all
|
||||
from Standard.Base.Errors.Common import Assertion_Error
|
||||
from Standard.Base.Errors.Common import Type_Error
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
|
||||
foreign js js_check = """
|
||||
return (4 == 2 + 2)
|
||||
|
||||
spec = Test.group "Asserts" <|
|
||||
Test.specify "should be enabled in tests" <|
|
||||
add_specs suite_builder = suite_builder.group "Asserts" group_builder->
|
||||
group_builder.specify "should be enabled in tests" <|
|
||||
p = Panic.catch Assertion_Error (Runtime.assert False) err->
|
||||
err.payload
|
||||
Meta.type_of p . should_be_a Assertion_Error
|
||||
|
||||
Test.specify "should be able to take foreign functions as expressions" <|
|
||||
group_builder.specify "should be able to take foreign functions as expressions" <|
|
||||
ret = Runtime.assert js_check
|
||||
ret . should_be_a Nothing
|
||||
|
||||
Test.specify "should be able to take a block as expressions" <|
|
||||
group_builder.specify "should be able to take a block as expressions" <|
|
||||
ret = Runtime.assert <|
|
||||
4 == 2 + 2
|
||||
ret . should_be_a Nothing
|
||||
|
||||
Test.specify "should be able to take values with warnings" <|
|
||||
group_builder.specify "should be able to take values with warnings" <|
|
||||
foo x = Warning.attach "My warning" (x+2)
|
||||
Runtime.assert (foo 2 > 2) . should_be_a Nothing
|
||||
|
||||
Test.specify "should fail with Type_Error if action does not return Boolean" <|
|
||||
group_builder.specify "should fail with Type_Error if action does not return Boolean" <|
|
||||
p = Panic.catch Type_Error (Runtime.assert 42) err->
|
||||
err
|
||||
Meta.type_of p.payload . should_be_a Type_Error
|
||||
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
|
||||
type Generator
|
||||
Value n ~next
|
||||
@ -8,8 +8,8 @@ natural =
|
||||
gen n = Generator.Value n (gen n+1)
|
||||
gen 2
|
||||
|
||||
spec = Test.group "Lazy Generator" <|
|
||||
Test.specify "Generates four numbers properly" <|
|
||||
add_specs suite_builder = suite_builder.group "Lazy Generator" group_builder->
|
||||
group_builder.specify "Generates four numbers properly" <|
|
||||
two = natural
|
||||
two.n . should_equal 2
|
||||
three = two.next
|
||||
@ -17,4 +17,8 @@ spec = Test.group "Lazy Generator" <|
|
||||
four = three.next
|
||||
four.n . should_equal 4
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -3,16 +3,16 @@ from Standard.Base import all
|
||||
import Standard.Base.Runtime.Ref.Ref
|
||||
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
|
||||
type Lazy
|
||||
Value ~get
|
||||
new ~computation = Lazy.Value computation
|
||||
new_eager computation = Lazy.Value computation
|
||||
|
||||
spec = Test.group "Lazy" <|
|
||||
Test.specify "should compute the result only once" <|
|
||||
add_specs suite_builder = suite_builder.group "Lazy" group_builder->
|
||||
group_builder.specify "should compute the result only once" <|
|
||||
ref = Ref.new 0
|
||||
compute =
|
||||
ref.put (ref.get+1)
|
||||
@ -31,7 +31,7 @@ spec = Test.group "Lazy" <|
|
||||
|
||||
ref.get . should_equal 3
|
||||
|
||||
Test.specify "should compute the result only once, even if copied" <|
|
||||
group_builder.specify "should compute the result only once, even if copied" <|
|
||||
ref = Ref.new 0
|
||||
compute =
|
||||
ref.put (ref.get+1)
|
||||
@ -43,7 +43,7 @@ spec = Test.group "Lazy" <|
|
||||
vec.map .get . should_equal ["Value", "Value", "Value", "Value"]
|
||||
ref.get . should_equal 1
|
||||
|
||||
Test.specify "should cache the result even if it results in a dataflow error" <|
|
||||
group_builder.specify "should cache the result even if it results in a dataflow error" <|
|
||||
ref = Ref.new 0
|
||||
compute =
|
||||
ref.put (ref.get+1)
|
||||
@ -57,7 +57,7 @@ spec = Test.group "Lazy" <|
|
||||
r.get.catch.message . should_equal "FOO"
|
||||
ref.get . should_equal 1
|
||||
|
||||
Test.specify "should cache the result even if the operation panics" <|
|
||||
group_builder.specify "should cache the result even if the operation panics" <|
|
||||
ref = Ref.new 0
|
||||
compute =
|
||||
ref.put (ref.get+1)
|
||||
@ -75,7 +75,7 @@ spec = Test.group "Lazy" <|
|
||||
msg.should_equal "FOO"
|
||||
ref.get . should_equal 1
|
||||
|
||||
Test.specify "should allow to create a Lazy instance that is computed eagerly" <|
|
||||
group_builder.specify "should allow to create a Lazy instance that is computed eagerly" <|
|
||||
ref = Ref.new 0
|
||||
compute =
|
||||
ref.put (ref.get+1)
|
||||
@ -89,11 +89,15 @@ spec = Test.group "Lazy" <|
|
||||
r.get . should_equal "Value"
|
||||
ref.get . should_equal 1
|
||||
|
||||
Test.specify "eager mode will not handle dataflow errors/panics specially" <|
|
||||
group_builder.specify "eager mode will not handle dataflow errors/panics specially" <|
|
||||
r = Lazy.new_eager (Error.throw (Illegal_Argument.Error "FOO"))
|
||||
r.should_fail_with Illegal_Argument
|
||||
|
||||
Test.expect_panic_with matcher=Illegal_Argument <|
|
||||
Lazy.new_eager (Panic.throw (Illegal_Argument.Error "FOO"))
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -2,11 +2,11 @@ from Standard.Base import all
|
||||
import Standard.Base.Errors.Illegal_State.Illegal_State
|
||||
import Standard.Base.Runtime.Managed_Resource.Managed_Resource
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
spec = Test.group "Managed_Resource" <|
|
||||
Test.specify "should call the destructor even if the action fails" <|
|
||||
|
||||
add_specs suite_builder = suite_builder.group "Managed_Resource" group_builder->
|
||||
group_builder.specify "should call the destructor even if the action fails" <|
|
||||
log_1 = Vector.new_builder
|
||||
r_1 = Managed_Resource.bracket 42 log_1.append x->
|
||||
log_1.append x+1
|
||||
@ -31,7 +31,7 @@ spec = Test.group "Managed_Resource" <|
|
||||
r_3.catch . should_equal (Illegal_State.Error "foo")
|
||||
log_3.to_vector . should_equal [43, 44, 42]
|
||||
|
||||
Test.specify "should not proceed further if initialization fails" <|
|
||||
group_builder.specify "should not proceed further if initialization fails" <|
|
||||
log_1 = Vector.new_builder
|
||||
r_1 = Panic.recover Any <| Managed_Resource.bracket (Panic.throw (Illegal_State.Error "foo")) (_ -> log_1.append "destructor") _->
|
||||
log_1.append "action"
|
||||
@ -46,7 +46,7 @@ spec = Test.group "Managed_Resource" <|
|
||||
r_2.catch . should_equal (Illegal_State.Error "foo")
|
||||
log_2.to_vector . should_equal []
|
||||
|
||||
Test.specify "should forward panics thrown in initializer and destructor" <|
|
||||
group_builder.specify "should forward panics thrown in initializer and destructor" <|
|
||||
r_1 = Panic.recover Any <| Managed_Resource.bracket (Panic.throw "init") (_-> Panic.throw "destruct") (_-> Panic.throw "action")
|
||||
r_1.catch . should_equal "init"
|
||||
|
||||
@ -56,4 +56,8 @@ spec = Test.group "Managed_Resource" <|
|
||||
r_3 = Panic.recover Any <| Managed_Resource.bracket 42 (_-> Nothing) (_-> Panic.throw "action")
|
||||
r_3.catch . should_equal "action"
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -4,8 +4,8 @@ import Standard.Base.Errors.Common.Not_Invokable
|
||||
|
||||
import Standard.Base.Runtime.Ref.Ref
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
|
||||
my_function (xyz : Integer = Missing_Required_Argument.ensure_present "xyz") (y : Integer = 100) =
|
||||
xyz + y
|
||||
@ -19,8 +19,8 @@ type Foo
|
||||
member self (txt : Text = Missing_Required_Argument.ensure_present "txt") =
|
||||
"Foo(" + self.x.to_text + ", " + self.y.to_text + "): " + txt
|
||||
|
||||
spec = Test.group "Missing_Required_Argument" <|
|
||||
Test.specify "should do nothing if the argument is provided" <|
|
||||
add_specs suite_builder = suite_builder.group "Missing_Required_Argument" group_builder->
|
||||
group_builder.specify "should do nothing if the argument is provided" <|
|
||||
r1 = my_function 23
|
||||
r1.should_equal 123
|
||||
|
||||
@ -37,7 +37,7 @@ spec = Test.group "Missing_Required_Argument" <|
|
||||
r5 = Foo.Ctor 44 . member "Hello"
|
||||
r5 . should_equal "Foo(44, 100): Hello"
|
||||
|
||||
Test.specify "should raise an error if a required argument is not specified" <|
|
||||
group_builder.specify "should raise an error if a required argument is not specified" <|
|
||||
r1 = my_function
|
||||
r1.should_fail_with Missing_Required_Argument
|
||||
r1.catch.argument_name . should_equal "xyz"
|
||||
@ -58,7 +58,7 @@ spec = Test.group "Missing_Required_Argument" <|
|
||||
r4.catch.function_name . should_equal "Foo.member"
|
||||
r4.catch.to_display_text . should_equal "Missing required argument `txt` in function `Foo.member`."
|
||||
|
||||
Test.specify "but will still allow suspending the argument explicitly" <|
|
||||
group_builder.specify "but will still allow suspending the argument explicitly" <|
|
||||
f1 = my_function _
|
||||
f1.should_be_a Function
|
||||
|
||||
@ -84,4 +84,8 @@ spec = Test.group "Missing_Required_Argument" <|
|
||||
mf4.should_be_a Function
|
||||
(mf4 "Hello") . should_equal "Foo(44, 100): Hello"
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -2,11 +2,11 @@ from Standard.Base import all
|
||||
|
||||
import Standard.Base.Runtime.Ref.Ref
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
spec = Test.group "Refs" <|
|
||||
Test.specify "should be able to store and retrieve value in references" <|
|
||||
|
||||
add_specs suite_builder = suite_builder.group "Refs" group_builder->
|
||||
group_builder.specify "should be able to store and retrieve value in references" <|
|
||||
r = Ref.new 'foo'
|
||||
r.get . should_equal 'foo'
|
||||
r.put 'bar'
|
||||
@ -18,7 +18,11 @@ spec = Test.group "Refs" <|
|
||||
vec.at 0 . put 123
|
||||
vec.at 1 . get . should_equal 123
|
||||
|
||||
Test.specify "Example" <|
|
||||
group_builder.specify "Example" <|
|
||||
(Ref.new 10) . modify (_+1) . should_equal 10
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
from Standard.Base import all
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
|
||||
type My_Type
|
||||
|
||||
@ -11,8 +11,8 @@ Number.foo self = baz
|
||||
foo x = x.foo
|
||||
My_Type.foo self = foo 123
|
||||
|
||||
spec = Test.group "Stack traces" <|
|
||||
Test.specify "should capture traces correctly" <|
|
||||
add_specs suite_builder = suite_builder.group "Stack traces" group_builder->
|
||||
group_builder.specify "should capture traces correctly" <|
|
||||
modname = Meta.get_simple_type_name Stack_Traces_Spec
|
||||
stack = My_Type.foo
|
||||
names = [modname + ".bar", modname + ".baz", "Number.foo", modname + ".foo", "My_Type.foo"]
|
||||
|
@ -3,16 +3,16 @@ from Standard.Base import all
|
||||
import Standard.Base.Runtime.State
|
||||
import Standard.Base.Errors.Common.Unsupported_Argument_Types
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
spec = Test.group "State" <|
|
||||
Test.specify "Type as a key" <|
|
||||
|
||||
add_specs suite_builder = suite_builder.group "State" group_builder->
|
||||
group_builder.specify "Type as a key" <|
|
||||
s = State.run Test 42 <|
|
||||
State.get Test
|
||||
s . should_equal 42
|
||||
|
||||
Test.specify "string as a key" <|
|
||||
group_builder.specify "string as a key" <|
|
||||
s n = State.run "my_state" n <|
|
||||
State.get "my_state"
|
||||
|
||||
@ -22,4 +22,8 @@ spec = Test.group "State" <|
|
||||
Meta.type_of p . should_equal Unsupported_Argument_Types
|
||||
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -1,44 +1,44 @@
|
||||
from Standard.Base import all
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
from project.Semantic.Definitions.Any_Types import all
|
||||
|
||||
spec =
|
||||
Test.group "Any.map_nothing" <|
|
||||
Test.specify "should apply the function to a value" <|
|
||||
add_specs suite_builder =
|
||||
suite_builder.group "Any.map_nothing" group_builder->
|
||||
group_builder.specify "should apply the function to a value" <|
|
||||
10.map_nothing *2 . should_equal 20
|
||||
|
||||
Test.specify "should return `Nothing` unchanged" <|
|
||||
group_builder.specify "should return `Nothing` unchanged" <|
|
||||
Nothing.map_nothing *2 . should_equal Nothing
|
||||
|
||||
Test.group "Callables" <|
|
||||
Test.specify "should be able to be applied in a pipeline using |>" <|
|
||||
suite_builder.group "Callables" group_builder->
|
||||
group_builder.specify "should be able to be applied in a pipeline using |>" <|
|
||||
(1 |> *2) . should_equal 2
|
||||
(2 |> My_Type.Value) . should_equal (My_Type.Value 2)
|
||||
(2.3 |> .floor) . should_equal 2
|
||||
|
||||
Test.specify "should be able to be applied to an argument using <|" <|
|
||||
group_builder.specify "should be able to be applied to an argument using <|" <|
|
||||
(*2 <| 1) . should_equal 2
|
||||
(My_Type.Value <| 2) . should_equal (My_Type.Value 2)
|
||||
(.floor <| 2.3) . should_equal 2
|
||||
|
||||
Test.specify "should be able to be composed backward using <<" <|
|
||||
group_builder.specify "should be able to be composed backward using <<" <|
|
||||
(+1 << *2) 2 . should_equal 5
|
||||
(My_Type.Value << *2) 2 . should_equal <| My_Type.Value 4
|
||||
(.floor << *2.25) 2 . should_equal 4
|
||||
|
||||
Test.specify "should be able to be composed forward using >>" <|
|
||||
group_builder.specify "should be able to be composed forward using >>" <|
|
||||
(+1 >> *2) 2 . should_equal 6
|
||||
(*2 >> My_Type.Value) 2 . should_equal <| My_Type.Value 4
|
||||
(*2 >> .floor) 2.75 . should_equal 5
|
||||
|
||||
Test.specify "should define generic inequality on values" <|
|
||||
group_builder.specify "should define generic inequality on values" <|
|
||||
(1 != 2) . should_be_true
|
||||
(1 != 1) . should_be_false
|
||||
|
||||
Test.group "Any's methods" <|
|
||||
Test.specify "should not be overridable when called statically" <|
|
||||
suite_builder.group "Any's methods" group_builder->
|
||||
group_builder.specify "should not be overridable when called statically" <|
|
||||
My_Type.Value 33 . x . should_equal "Any:(My_Type.Value 33)"
|
||||
With_X.Value 44 . x . should_equal "With_X:(With_X.Value 44)"
|
||||
With_Y.Value 44 . x . should_equal "With_Y:With_Y(44)"
|
||||
@ -58,7 +58,7 @@ spec =
|
||||
Any.x (With_Y.Value 22) . should_equal "Any:With_Y(22)"
|
||||
Date.to_display_text . should_equal "Date"
|
||||
|
||||
Test.specify "static method calls on Any should have defaulted self argument to Any" <|
|
||||
group_builder.specify "static method calls on Any should have defaulted self argument to Any" <|
|
||||
# Any.== is a method that takes two arguments (self other)
|
||||
(Any.==) . should_be_a Function
|
||||
(Any.== self=Boolean) . should_be_a Function
|
||||
@ -96,7 +96,7 @@ spec =
|
||||
Date.spec_method Boolean Vector . should_equal "Any.spec_method:{Date}{Boolean}{Vector}"
|
||||
|
||||
|
||||
Test.specify "instance methods on different types should not have defaulted self argument" <|
|
||||
group_builder.specify "instance methods on different types should not have defaulted self argument" <|
|
||||
# Vector.contains has two arguments (self elem)
|
||||
Vector.contains . to_text . contains "Vector.type.contains" . should_be_true
|
||||
(Vector.contains [42]) . to_text . contains "Vector.type.contains" . should_be_true
|
||||
@ -109,4 +109,8 @@ spec =
|
||||
Vector.is_empty [] . should_be_true
|
||||
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -11,11 +11,11 @@ polyglot java import java.util.AbstractList
|
||||
polyglot java import java.util.ArrayList
|
||||
polyglot java import java.util.List as Java_List
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
spec = Test.group "Pattern Matches" <|
|
||||
Test.specify "should be able to match on the Boolean type" <|
|
||||
|
||||
add_specs suite_builder = suite_builder.group "Pattern Matches" group_builder->
|
||||
group_builder.specify "should be able to match on the Boolean type" <|
|
||||
case Boolean of
|
||||
Boolean -> Nothing
|
||||
_ -> Test.fail "Expected the Boolean constructor to match."
|
||||
@ -27,7 +27,7 @@ spec = Test.group "Pattern Matches" <|
|
||||
Boolean -> Test.fail "Expected False to match on a Boolean type check."
|
||||
_ : Boolean -> Nothing
|
||||
_ -> Test.fail "Expected False to match on a Boolean type check."
|
||||
Test.specify "should be able to match on the Integer type" <|
|
||||
group_builder.specify "should be able to match on the Integer type" <|
|
||||
case 1 of
|
||||
Integer -> Test.fail "Expected an integer to match."
|
||||
_ : Integer -> Nothing
|
||||
@ -39,7 +39,7 @@ spec = Test.group "Pattern Matches" <|
|
||||
case Integer of
|
||||
Integer -> Nothing
|
||||
_ -> Test.fail "Expected the Integer constructor to match."
|
||||
Test.specify "should be able to match on the Float type" <|
|
||||
group_builder.specify "should be able to match on the Float type" <|
|
||||
case 1.7 of
|
||||
_ : Integer -> Test.fail "Expected a decimal to match."
|
||||
Float -> Test.fail "Expected a decimal to match."
|
||||
@ -48,7 +48,7 @@ spec = Test.group "Pattern Matches" <|
|
||||
case Float of
|
||||
Float -> Nothing
|
||||
_ -> Test.fail "Expected the Float constructor to match."
|
||||
Test.specify "match on the Number literal" <|
|
||||
group_builder.specify "match on the Number literal" <|
|
||||
foo x = case x of
|
||||
1 -> "ONE"
|
||||
1.0 -> "one"
|
||||
@ -69,7 +69,7 @@ spec = Test.group "Pattern Matches" <|
|
||||
foo 9999 . should_equal "NINE THOUSAND NINE HUNDRED NINETY NINE"
|
||||
foo 12345678901234567890123456789*10 . should_equal "a lot"
|
||||
|
||||
Test.specify "should be able to match on the Number type" <|
|
||||
group_builder.specify "should be able to match on the Number type" <|
|
||||
case 1 of
|
||||
_ : Number -> Nothing
|
||||
_ -> Test.fail "Expected a number to match."
|
||||
@ -91,7 +91,7 @@ spec = Test.group "Pattern Matches" <|
|
||||
case Float of
|
||||
_ : Number -> Test.fail "Float type isn't instance of Number type"
|
||||
_ -> Nothing
|
||||
Test.specify "should be able to match on the Text type" <|
|
||||
group_builder.specify "should be able to match on the Text type" <|
|
||||
case "foo" of
|
||||
Text -> Test.fail "Expected a text type to match."
|
||||
_ : Text -> Nothing
|
||||
@ -106,7 +106,7 @@ spec = Test.group "Pattern Matches" <|
|
||||
Text -> Test.fail "Expected the Text module to match4."
|
||||
Text_Module -> Nothing
|
||||
_ -> Test.fail "Expected the Text module to matc5h."
|
||||
Test.specify "should be able to match on the Array type" <|
|
||||
group_builder.specify "should be able to match on the Array type" <|
|
||||
case [1].to_array of
|
||||
Array -> Test.fail "Expected an array type to match."
|
||||
_ : Array -> Nothing
|
||||
@ -129,7 +129,7 @@ spec = Test.group "Pattern Matches" <|
|
||||
Array -> Test.fail "Expected the Array module to match."
|
||||
Array_Module -> Nothing
|
||||
_ -> Test.fail "Expected the Array module to match."
|
||||
Test.specify "should be able to match on the Polyglot type" <|
|
||||
group_builder.specify "should be able to match on the Polyglot type" <|
|
||||
random_gen = Java_Random.new
|
||||
case random_gen of
|
||||
Polyglot -> Nothing
|
||||
@ -137,7 +137,7 @@ spec = Test.group "Pattern Matches" <|
|
||||
case Polyglot of
|
||||
Polyglot -> Nothing
|
||||
_ -> Test.fail "Expected the Polyglot constructor to match."
|
||||
Test.specify "should be able to match on the Any type" <|
|
||||
group_builder.specify "should be able to match on the Any type" <|
|
||||
value_1 = 1.23143
|
||||
value_2 = "foo bar"
|
||||
case value_1 of
|
||||
@ -155,7 +155,7 @@ spec = Test.group "Pattern Matches" <|
|
||||
case Any of
|
||||
Any -> Nothing
|
||||
_ -> Test.fail "Expected the Any constructor to match."
|
||||
Test.specify "should be able to match on date/time values" <|
|
||||
group_builder.specify "should be able to match on date/time values" <|
|
||||
new_date = Date.new 2020 6 1
|
||||
new_date_time = Date_Time.new 2020 6 1
|
||||
new_time = Time_Of_Day.new 11 11
|
||||
@ -206,7 +206,7 @@ spec = Test.group "Pattern Matches" <|
|
||||
_ : Time_Zone -> Nothing
|
||||
_ -> Test.fail "Expected timezone value to match Time_Zone type."
|
||||
|
||||
Test.specify "should be able to match on literal values" <|
|
||||
group_builder.specify "should be able to match on literal values" <|
|
||||
value_1 = 42
|
||||
value_2 = "foo"
|
||||
value_3 = 's\u0301'
|
||||
@ -230,7 +230,7 @@ spec = Test.group "Pattern Matches" <|
|
||||
"ę" -> Test.fail "Expected value to match constant."
|
||||
'\u0065\u{301}' -> Nothing
|
||||
_ -> Test.fail "Expected value to match constant."
|
||||
Test.specify "should be able to match on literal values nested in constructors" <|
|
||||
group_builder.specify "should be able to match on literal values nested in constructors" <|
|
||||
value_1 = List.Cons 42 List.Nil
|
||||
value_2 = List.Cons (List.Cons 42 List.Nil) List.Nil
|
||||
case value_1 of
|
||||
@ -247,21 +247,21 @@ spec = Test.group "Pattern Matches" <|
|
||||
List.Cons _ List.Nil -> Test.fail "Expected value to match constant."
|
||||
_ -> Test.fail "Expected value to match constant."
|
||||
|
||||
Test.specify "should be able to match on module rather than a type" <|
|
||||
group_builder.specify "should be able to match on module rather than a type" <|
|
||||
case Vector_Module of
|
||||
_ : Vector -> Test.fail "Expected to match on module."
|
||||
Vector -> Test.fail "Expected to match on module."
|
||||
Vector_Module -> Nothing
|
||||
_ -> Test.fail "Expected to match on module."
|
||||
|
||||
Test.specify "should be able to match on a type of a value rather tha module" <|
|
||||
group_builder.specify "should be able to match on a type of a value rather tha module" <|
|
||||
case [1,2,3] of
|
||||
Vector_Module -> Test.fail "Expected to match on a type."
|
||||
Vector -> Test.fail "Expected to match on a type."
|
||||
_ : Vector -> Nothing
|
||||
_ -> Test.fail "Expected to match on a type."
|
||||
|
||||
Test.specify "should correctly pattern match on array-like polyglot values" <|
|
||||
group_builder.specify "should correctly pattern match on array-like polyglot values" <|
|
||||
list = ArrayList.new
|
||||
list.add 1
|
||||
list.add 2
|
||||
@ -301,7 +301,7 @@ spec = Test.group "Pattern Matches" <|
|
||||
u = v.map foo
|
||||
u.should_equal ["text", "array", "array"]
|
||||
|
||||
Test.specify "should correctly pattern match on supertype" <|
|
||||
group_builder.specify "should correctly pattern match on supertype" <|
|
||||
case 1 of
|
||||
_ : Any -> Nothing
|
||||
_ -> Test.fail "Expected to match on Any type."
|
||||
@ -322,9 +322,13 @@ spec = Test.group "Pattern Matches" <|
|
||||
_ : Any -> Nothing
|
||||
_ -> Test.fail "Expected to match on Any."
|
||||
|
||||
Test.specify "should allow for pattern matching on unresolved symbol" <|
|
||||
group_builder.specify "should allow for pattern matching on unresolved symbol" <|
|
||||
case (.name) of
|
||||
_ : Function -> Nothing
|
||||
_ -> Test.fail "Expected to match on Function type."
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -6,8 +6,8 @@ import project.Semantic.Conversion.Methods
|
||||
import project.Semantic.Conversion.Types
|
||||
import project.Semantic.Conversion_Use.Hello
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
|
||||
polyglot java import java.lang.Object
|
||||
polyglot java import java.time.format.DateTimeFormatter
|
||||
@ -92,57 +92,57 @@ Blob.from (that:Text) = Blob.Text that
|
||||
Blob.from (that:File) = Blob.Binary that
|
||||
Blob.from (that:Any) = Blob.Json that
|
||||
|
||||
spec =
|
||||
Test.group "Conversion" <|
|
||||
Test.specify "should be able to convert atoms" <|
|
||||
add_specs suite_builder =
|
||||
suite_builder.group "Conversion" group_builder->
|
||||
group_builder.specify "should be able to convert atoms" <|
|
||||
((Foo.from (Baz.Value 10)).foo + (Foo.from (Bar.Value 20)).foo) . should_equal 30
|
||||
Foo.from Quaffle . foo . should_equal "quaffle"
|
||||
Test.specify "should be able to convert text" <|
|
||||
group_builder.specify "should be able to convert text" <|
|
||||
Foo.from "123" . foo . should_equal 3
|
||||
Test.specify "should be able to convert foreign text" <|
|
||||
group_builder.specify "should be able to convert foreign text" <|
|
||||
Foo.from (make_str 4) . foo . should_equal 9
|
||||
Test.specify "should be able to convert numbers" <|
|
||||
group_builder.specify "should be able to convert numbers" <|
|
||||
Foo.from 4 . should_equal (Foo.Value [4, 0, 0, 0])
|
||||
Foo.from (10^100) . should_equal (Foo.Value [10^100, 0, 0, 0])
|
||||
Foo.from 4.5 . should_equal (Foo.Value [4.5, 0, 0, 0])
|
||||
Test.specify "should be able to convert dataflow errors" <|
|
||||
group_builder.specify "should be able to convert dataflow errors" <|
|
||||
Foo.from (Error.throw <| My_Error.Value "i was bad") . should_equal (Foo.Value "oops")
|
||||
Test.specify "should be able to convert functions" <|
|
||||
group_builder.specify "should be able to convert functions" <|
|
||||
Foo.from (e -> e) . foo . should_equal 5
|
||||
Test.specify "should be able to convert booleans" <|
|
||||
group_builder.specify "should be able to convert booleans" <|
|
||||
Foo.from True . foo . should_be_true
|
||||
Foo.from False . foo . should_be_false
|
||||
Test.specify "should be able to convert arrays" <|
|
||||
group_builder.specify "should be able to convert arrays" <|
|
||||
Foo.from [1,2,3].to_array . foo . should_equal 3
|
||||
Test.specify "should be able to convert Any" <|
|
||||
group_builder.specify "should be able to convert Any" <|
|
||||
Not_Foo.from that=Quaffle . notfoo . should_equal "ANY!!!"
|
||||
Not_Foo.from 4 . notfoo . should_equal "ANY!!!"
|
||||
Not_Foo.from (e -> e) . notfoo . should_equal "ANY!!!"
|
||||
Not_Foo.from [1,2,3].to_array . notfoo . should_equal "ANY!!!"
|
||||
Not_Foo.from [1,2,3] . notfoo . should_equal "ANY!!!"
|
||||
Test.specify "apply Any conversion to foreign object" <|
|
||||
group_builder.specify "apply Any conversion to foreign object" <|
|
||||
Not_Foo.from Object.new . notfoo . should_equal "ANY!!!"
|
||||
Test.specify "apply Any conversion to type" <|
|
||||
group_builder.specify "apply Any conversion to type" <|
|
||||
Not_Foo.from Boolean . notfoo . should_equal "ANY!!!"
|
||||
Test.specify "should call intrinsic object conversions for unimported constructors" <|
|
||||
group_builder.specify "should call intrinsic object conversions for unimported constructors" <|
|
||||
Vector.from Methods.get_foo . should_equal ["foo"]
|
||||
Test.specify "should call extension conversions" <|
|
||||
group_builder.specify "should call extension conversions" <|
|
||||
Text.from Methods.get_bar . should_equal "bar"
|
||||
|
||||
Test.specify "should fail graciously when there is no conversion" <|
|
||||
group_builder.specify "should fail graciously when there is no conversion" <|
|
||||
Panic.recover Any (Foo.from (Quux.Value 10)) . catch Any .to_display_text . should_equal "Could not find a conversion from `Quux.Value` to `Foo`."
|
||||
Test.specify "should fail graciously when the conversion target is invalid" <|
|
||||
group_builder.specify "should fail graciously when the conversion target is invalid" <|
|
||||
Panic.recover Any (123.from (Quux.Value 10)) . catch Any .to_display_text . should_equal "123 is not a valid conversion target. Expected a type."
|
||||
|
||||
Test.specify "should be callable with by-name arguments" <|
|
||||
group_builder.specify "should be callable with by-name arguments" <|
|
||||
.from self=Foo that=4 first_param=2 . should_equal (Foo.Value [4, 2, 0, 0])
|
||||
Test.specify "should support the use of multiple arguments" <|
|
||||
group_builder.specify "should support the use of multiple arguments" <|
|
||||
Foo.from that=4 second_param=1 2 . should_equal (Foo.Value [4, 2, 1, 0])
|
||||
|
||||
Test.specify "should play nicely with polyglot" <|
|
||||
group_builder.specify "should play nicely with polyglot" <|
|
||||
call_function .from Foo . should_equal (Foo.Value 8)
|
||||
|
||||
Test.specify "should support the meta functions" <|
|
||||
group_builder.specify "should support the meta functions" <|
|
||||
meta_from = Meta.meta .from
|
||||
is_symbol = case meta_from of
|
||||
_ : Meta.Unresolved_Symbol -> True
|
||||
@ -159,37 +159,37 @@ spec =
|
||||
meta_from.rename "foo" 123 . should_equal "foo called"
|
||||
meta_from.rename "foo" . should_equal .foo
|
||||
|
||||
Test.specify "should not allow currying" <|
|
||||
group_builder.specify "should not allow currying" <|
|
||||
Panic.recover Any (Foo.from) . catch Any .to_display_text . should_equal "Conversion currying without `that` argument is not supported."
|
||||
|
||||
Test.specify "Use conversions to in Conversion_Use module" <|
|
||||
group_builder.specify "Use conversions to in Conversion_Use module" <|
|
||||
Hello.formulate [ Hello.Say "Proper", Hello.Say "Type" ] . should_equal "ProperType"
|
||||
Hello.formulate [ Foo.Value "Perform", Bar.Value "Conversion" ] . should_equal "PERFORM conversion!"
|
||||
|
||||
Test.specify "Convert Foo.to Hello" <|
|
||||
group_builder.specify "Convert Foo.to Hello" <|
|
||||
hello = Foo.Value "Perform" . to Hello
|
||||
hello . msg . should_equal "PERFORM "
|
||||
|
||||
Test.specify "Convert Bar.to Hello" <|
|
||||
group_builder.specify "Convert Bar.to Hello" <|
|
||||
hello = Bar.Value "Conversion" . to Hello
|
||||
hello . msg . should_equal "conversion!"
|
||||
|
||||
Test.specify "Convert Bar.to Hello with other suffix" <|
|
||||
group_builder.specify "Convert Bar.to Hello with other suffix" <|
|
||||
hello = Bar.Value "Conversion" . to Hello suffix="?"
|
||||
hello . msg . should_equal "conversion?"
|
||||
|
||||
Test.specify "Idempotent convert Hello.to Hello" <|
|
||||
group_builder.specify "Idempotent convert Hello.to Hello" <|
|
||||
Hello.Say "Hi there!" . to Hello . msg . should_equal "Hi there!"
|
||||
|
||||
Test.specify "Unknown convertion Text.to Hello" <|
|
||||
group_builder.specify "Unknown convertion Text.to Hello" <|
|
||||
h = Panic.recover No_Such_Conversion <| "Hi there!" . to Hello
|
||||
h . should_fail_with No_Such_Conversion
|
||||
|
||||
Test.specify "Use Any.to in Conversion_Use module" <|
|
||||
group_builder.specify "Use Any.to in Conversion_Use module" <|
|
||||
Hello.formulate_with_to [ Hello.Say "Proper", Hello.Say "Type" ] . should_equal "ProperType"
|
||||
Hello.formulate_with_to [ Foo.Value "Perform", Bar.Value "Conversion" ] . should_equal "PERFORM conversion!"
|
||||
|
||||
Test.specify "Avoid parameter conversion of Blob into Blob" <|
|
||||
group_builder.specify "Avoid parameter conversion of Blob into Blob" <|
|
||||
blob_me (b:Blob) = b
|
||||
|
||||
once = blob_me "Ahoj"
|
||||
@ -200,7 +200,7 @@ spec =
|
||||
once . should_equal second
|
||||
Meta.is_same_object once second . should_be_true
|
||||
|
||||
Test.specify "Avoid Any.to conversion of Blob into Blob" <|
|
||||
group_builder.specify "Avoid Any.to conversion of Blob into Blob" <|
|
||||
blob_me b = b.to Blob
|
||||
|
||||
once = blob_me "Ahoj"
|
||||
@ -211,7 +211,7 @@ spec =
|
||||
once . should_equal second
|
||||
Meta.is_same_object once second . should_be_true
|
||||
|
||||
Test.specify "Avoid inline conversion of Blob into Blob" <|
|
||||
group_builder.specify "Avoid inline conversion of Blob into Blob" <|
|
||||
once = "Ahoj" : Blob
|
||||
second = once : Blob
|
||||
|
||||
@ -220,7 +220,7 @@ spec =
|
||||
once . should_equal second
|
||||
Meta.is_same_object once second . should_be_true
|
||||
|
||||
Test.specify "Avoid back and forth conversions" <|
|
||||
group_builder.specify "Avoid back and forth conversions" <|
|
||||
one = Forth.Times 1
|
||||
|
||||
two = Back.exchange one
|
||||
@ -235,7 +235,7 @@ spec =
|
||||
# no conversions needed when calling `exchange` methods
|
||||
nine . should_equal one
|
||||
|
||||
Test.specify "Requesting Text & Foo" <|
|
||||
group_builder.specify "Requesting Text & Foo" <|
|
||||
check a (n : Text & Foo) = case a of
|
||||
0 -> n.foo
|
||||
1 -> n.take (First 3)
|
||||
@ -247,7 +247,7 @@ spec =
|
||||
fail = Panic.recover Type_Error <| check 0 True
|
||||
fail . should_fail_with Type_Error
|
||||
|
||||
Test.specify "Requesting Foo & Not_Foo & Boolean" <|
|
||||
group_builder.specify "Requesting Foo & Not_Foo & Boolean" <|
|
||||
check a (n : Foo & Not_Foo & Boolean) = case a of
|
||||
0 -> n.foo
|
||||
1 -> n.not
|
||||
@ -260,7 +260,7 @@ spec =
|
||||
fail = Panic.recover Type_Error <| check 0 "not a boolean"
|
||||
fail . should_fail_with Type_Error
|
||||
|
||||
Test.specify "Requesting Number & Integer & Float" <|
|
||||
group_builder.specify "Requesting Number & Integer & Float" <|
|
||||
m = MultiNumber.Value 5
|
||||
|
||||
m.to Number . should_equal 1.5
|
||||
@ -285,7 +285,7 @@ spec =
|
||||
to_6 (v : Number & Float & Integer) = v
|
||||
to_6 m . should_equal 1.5
|
||||
|
||||
Test.specify "Requesting Integer & Fool" <|
|
||||
group_builder.specify "Requesting Integer & Fool" <|
|
||||
do_number (x : Integer & Fool) =
|
||||
x.foo . should_equal "foo called"
|
||||
x.fool . should_equal 42
|
||||
@ -306,7 +306,7 @@ spec =
|
||||
|
||||
do_number 42
|
||||
|
||||
Test.specify "Requesting Float & Fool" <|
|
||||
group_builder.specify "Requesting Float & Fool" <|
|
||||
do_number (x : Float & Fool) =
|
||||
x.foo . should_equal "foo called"
|
||||
x.fool . should_equal 42.3
|
||||
@ -327,7 +327,7 @@ spec =
|
||||
|
||||
do_number 42.3
|
||||
|
||||
Test.specify "Requesting Boolean & Fool" <|
|
||||
group_builder.specify "Requesting Boolean & Fool" <|
|
||||
do_boolean (x : Boolean & Fool) =
|
||||
x.fool . should_equal True
|
||||
x==x . should_be_true
|
||||
@ -344,7 +344,7 @@ spec =
|
||||
|
||||
do_boolean True
|
||||
|
||||
Test.specify "Requesting Text & Fool" <|
|
||||
group_builder.specify "Requesting Text & Fool" <|
|
||||
do_text (x : Text & Fool) =
|
||||
x.fool . should_equal "Hello"
|
||||
x==x . should_be_true
|
||||
@ -361,7 +361,7 @@ spec =
|
||||
|
||||
do_text "Hello"
|
||||
|
||||
Test.specify "Requesting Time_Of_Day & Fool" <|
|
||||
group_builder.specify "Requesting Time_Of_Day & Fool" <|
|
||||
now = Time_Of_Day.now
|
||||
|
||||
do_time (x : Time_Of_Day & Fool) =
|
||||
@ -377,7 +377,7 @@ spec =
|
||||
|
||||
do_time now
|
||||
|
||||
Test.specify "Requesting Date & Fool" <|
|
||||
group_builder.specify "Requesting Date & Fool" <|
|
||||
now = Date.today
|
||||
|
||||
do_date (x : Date & Fool) =
|
||||
@ -393,7 +393,7 @@ spec =
|
||||
|
||||
do_date now
|
||||
|
||||
Test.specify "Requesting Date_Time & Fool" <|
|
||||
group_builder.specify "Requesting Date_Time & Fool" <|
|
||||
now = Date_Time.now
|
||||
|
||||
do_time (x : Date_Time & Fool) =
|
||||
@ -409,7 +409,7 @@ spec =
|
||||
|
||||
do_time now
|
||||
|
||||
Test.specify "Requesting Duration & Fool" <|
|
||||
group_builder.specify "Requesting Duration & Fool" <|
|
||||
now = Duration.new hours=5
|
||||
|
||||
do_duration (x : Duration & Fool) =
|
||||
@ -425,7 +425,7 @@ spec =
|
||||
|
||||
do_duration now
|
||||
|
||||
Test.group "Polyglot Argument" <|
|
||||
suite_builder.group "Polyglot Argument" group_builder->
|
||||
f1 (x : DateTimeFormatter) = x.to_text
|
||||
f2 (x : Text | DateTimeFormatter) = case x of
|
||||
_ : DateTimeFormatter -> "DateTimeFormatter: "+x.to_text
|
||||
@ -442,44 +442,44 @@ spec =
|
||||
|
||||
d = DateTimeFormatter.ISO_DATE
|
||||
|
||||
Test.specify "f1 d" <|
|
||||
group_builder.specify "f1 d" <|
|
||||
(f1 d).to_display_text . should_contain "ParseCaseSensitive(false)(Value"
|
||||
|
||||
Test.specify "f1 42" <|
|
||||
group_builder.specify "f1 42" <|
|
||||
Panic.recover Any (f1 42).to_display_text . should_fail_with Type_Error
|
||||
|
||||
Test.specify "f2 AAA" <|
|
||||
group_builder.specify "f2 AAA" <|
|
||||
(f2 "AAA").to_display_text . should_equal "TEXT: AAA"
|
||||
|
||||
Test.specify "f2 d" <|
|
||||
group_builder.specify "f2 d" <|
|
||||
(f2 d).to_display_text . should_contain "DateTimeFormatter: ParseCaseSensitive(false)(Value"
|
||||
|
||||
Test.specify "f3 AAA" <|
|
||||
group_builder.specify "f3 AAA" <|
|
||||
(f3 "AAA").to_display_text . should_equal "TEXT: AAA"
|
||||
|
||||
Test.specify "f3 d" <|
|
||||
group_builder.specify "f3 d" <|
|
||||
(f3 d).to_display_text . should_contain "DateTimeFormatter: ParseCaseSensitive(false)(Value"
|
||||
|
||||
Test.specify "f4 d" <|
|
||||
group_builder.specify "f4 d" <|
|
||||
(f4 d).to_display_text . should_contain "DateTimeFormatter: Parse"
|
||||
|
||||
Test.group "Polyglot Argument with Inheritance" <|
|
||||
suite_builder.group "Polyglot Argument with Inheritance" group_builder->
|
||||
f1 (x : JPeriod) = x.to_text
|
||||
f2 (x : ChronoPeriod) = x.negated.to_text
|
||||
f3 (x : TemporalAmount) = x.getUnits.to_text
|
||||
|
||||
quarter = JPeriod.ofMonths 3
|
||||
|
||||
Test.specify "f1 quarter" <|
|
||||
group_builder.specify "f1 quarter" <|
|
||||
(f1 quarter) . should_equal "P3M"
|
||||
|
||||
Test.specify "f2 quarter" <|
|
||||
group_builder.specify "f2 quarter" <|
|
||||
(f2 quarter) . should_equal "P-3M"
|
||||
|
||||
Test.specify "f3 quarter" <|
|
||||
group_builder.specify "f3 quarter" <|
|
||||
(f3 quarter) . should_equal "[Years, Months, Days]"
|
||||
|
||||
Test.group "Polyglot Conversion" <|
|
||||
suite_builder.group "Polyglot Conversion" group_builder->
|
||||
f1 (x : DateTimeFormatter) = x.to_text
|
||||
f2 (x : Hello | DateTimeFormatter) = case x of
|
||||
_ : DateTimeFormatter -> "DateTimeFormatter: "+x.to_text
|
||||
@ -500,50 +500,54 @@ spec =
|
||||
|
||||
d = DateTimeFormatter.ISO_DATE
|
||||
|
||||
Test.specify "f1 d" <|
|
||||
group_builder.specify "f1 d" <|
|
||||
(f1 d).to_display_text . should_contain "ParseCaseSensitive(false)(Value"
|
||||
|
||||
Test.specify "f1 42" <|
|
||||
group_builder.specify "f1 42" <|
|
||||
Panic.recover Any (f1 42).to_display_text . should_fail_with Type_Error
|
||||
|
||||
Test.specify "f2 foo AAA" <|
|
||||
group_builder.specify "f2 foo AAA" <|
|
||||
(f2 <| Foo.Value "AAA").to_display_text . should_equal "HELLO: (Say 'AAA ')"
|
||||
|
||||
Test.specify "f2 hello AAA" <|
|
||||
group_builder.specify "f2 hello AAA" <|
|
||||
(f2 <| Hello.Say "AAA").to_display_text . should_equal "HELLO: (Say 'AAA')"
|
||||
|
||||
Test.specify "f2 d" <|
|
||||
group_builder.specify "f2 d" <|
|
||||
(f2 d).to_display_text . should_contain "DateTimeFormatter: ParseCaseSensitive(false)(Value"
|
||||
|
||||
Test.specify "f3 bar AAA" <|
|
||||
group_builder.specify "f3 bar AAA" <|
|
||||
(f3 <| Bar.Value "AAA").to_display_text . should_equal "HELLO: (Say 'aaa!')"
|
||||
|
||||
Test.specify "f3 hello AAA" <|
|
||||
group_builder.specify "f3 hello AAA" <|
|
||||
(f3 <| Hello.Say "AAA").to_display_text . should_equal "HELLO: (Say 'AAA')"
|
||||
|
||||
Test.specify "f3 AAA" <|
|
||||
group_builder.specify "f3 AAA" <|
|
||||
(f3 'AAA').to_display_text . should_equal "HELLO: (Say 'Any fallback:AAA?')"
|
||||
|
||||
Test.specify "f3 d" <|
|
||||
group_builder.specify "f3 d" <|
|
||||
(f3 d).to_display_text . should_contain "DateTimeFormatter: ParseCaseSensitive(false)(Value"
|
||||
|
||||
Test.specify "f4 d" <|
|
||||
group_builder.specify "f4 d" <|
|
||||
(f4 d).to_display_text . should_contain "DateTimeFormatter: Parse"
|
||||
|
||||
Test.specify "f4 AAA" <|
|
||||
group_builder.specify "f4 AAA" <|
|
||||
(f4 'AAA').to_display_text . should_equal "OTHER"
|
||||
|
||||
Test.specify "f5 AAA" <|
|
||||
group_builder.specify "f5 AAA" <|
|
||||
(f5 'AAA').to_display_text . should_equal "HELLO: (Say 'Any fallback:AAA?')"
|
||||
|
||||
Test.specify "f5 hello AAA" <|
|
||||
group_builder.specify "f5 hello AAA" <|
|
||||
(f5 <| Hello.Say "AAA").to_display_text . should_equal "HELLO: (Say 'AAA')"
|
||||
|
||||
Test.specify "f5 d" <|
|
||||
group_builder.specify "f5 d" <|
|
||||
(f5 d).to_display_text . should_contain "HELLO: (Say 'Any fallback:ParseCaseSensitive(false)(Value"
|
||||
|
||||
Hello.from (that:Any) suffix="?" = Hello.Say <| ("Any fallback:"+that.to_text) + suffix
|
||||
Hello.from (that:Foo) suffix=" " = Hello.Say <| (that.foo.to_case Case.Upper) + suffix
|
||||
Hello.from (that:Bar) suffix="!" = Hello.Say <| (that.bar.to_case Case.Lower) + suffix
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -2,12 +2,16 @@ from Standard.Base import all
|
||||
|
||||
import project.Semantic.Deep_Export.Internal
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
spec =
|
||||
Test.group "Deep Exports" <|
|
||||
Test.specify "should allow to re-export a symbol through a module hierarchy" <|
|
||||
|
||||
add_specs suite_builder =
|
||||
suite_builder.group "Deep Exports" group_builder->
|
||||
group_builder.specify "should allow to re-export a symbol through a module hierarchy" <|
|
||||
Internal.my_fun.should_equal 478
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
from Standard.Base import all
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
|
||||
from project.Semantic.Default_Args_Spec.Box import all
|
||||
|
||||
@ -21,15 +21,15 @@ type G a=4 b=Bar
|
||||
local_fun a b=1 c=local_const = a + b + c
|
||||
local_const = 42
|
||||
|
||||
spec =
|
||||
Test.group "Atom Constructors" <|
|
||||
Test.specify "should be allowed to use primitive default arguments" <|
|
||||
add_specs suite_builder =
|
||||
suite_builder.group "Atom Constructors" group_builder->
|
||||
group_builder.specify "should be allowed to use primitive default arguments" <|
|
||||
x = A 1
|
||||
x.b.should_equal 1
|
||||
y = A 1
|
||||
y.b.should_equal 1
|
||||
|
||||
Test.specify "should be allowed to use non-primitive default arguments" <|
|
||||
group_builder.specify "should be allowed to use non-primitive default arguments" <|
|
||||
a = B 1 (Foo False)
|
||||
a.b.should_equal (Foo False)
|
||||
b = B 1
|
||||
@ -40,18 +40,22 @@ spec =
|
||||
d.b.b.should_equal (Foo False)
|
||||
d.b.c.should_equal False
|
||||
|
||||
Test.specify "should be allowed to use default arguments that refer to previous parameters" <|
|
||||
group_builder.specify "should be allowed to use default arguments that refer to previous parameters" <|
|
||||
e = E 1
|
||||
e.b.should_equal 1
|
||||
e.c.should_equal 2
|
||||
f = F 1
|
||||
f.c.should_equal False
|
||||
|
||||
Test.specify "apply defaulted arguments that are themselves fully defaulted" <|
|
||||
group_builder.specify "apply defaulted arguments that are themselves fully defaulted" <|
|
||||
G.should_equal (G 4 (Bar 1 (Foo False) False))
|
||||
|
||||
Test.group "Functions" <|
|
||||
Test.specify "should apply default arguments that involve local functions" <|
|
||||
suite_builder.group "Functions" group_builder->
|
||||
group_builder.specify "should apply default arguments that involve local functions" <|
|
||||
local_fun 0 . should_equal 43
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
from Standard.Base import all
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
|
||||
polyglot java import java.math.BigInteger as Java_Big_Integer
|
||||
polyglot java import java.nio.file.Path as Java_Path
|
||||
@ -132,9 +132,9 @@ foreign js js_obj a b = """
|
||||
m.b = b;
|
||||
return m;
|
||||
|
||||
spec =
|
||||
Test.group "Operator ==" <|
|
||||
Test.specify "should handle primitive values" <|
|
||||
add_specs suite_builder =
|
||||
suite_builder.group "Operator ==" group_builder->
|
||||
group_builder.specify "should handle primitive values" <|
|
||||
(2 == (2.0)).should_be_true
|
||||
(2 == (2.1)).should_be_false
|
||||
(2.0).should_equal 2
|
||||
@ -143,16 +143,16 @@ spec =
|
||||
(js_true == False).should_be_false
|
||||
(js_text_foo == "foo").should_be_true
|
||||
|
||||
Test.specify "should handle Text via NFD normalization" <|
|
||||
group_builder.specify "should handle Text via NFD normalization" <|
|
||||
('ś' == 's\u0301') . should_be_true
|
||||
('e\u0301abc' == 'éabc') . should_be_true
|
||||
('e\u0301abc' == 'é') . should_be_false
|
||||
((Point.Value 'ś' 23.0) == (Point.Value 's\u0301' 23)) . should_be_true
|
||||
|
||||
Test.specify "should dispatch to overriden `==` on atoms" <|
|
||||
group_builder.specify "should dispatch to overriden `==` on atoms" <|
|
||||
(Child.Value 11 == Child.Value 111) . should_be_true
|
||||
|
||||
Test.specify "should dispatch to overriden `==` on atoms transitively" <|
|
||||
group_builder.specify "should dispatch to overriden `==` on atoms transitively" <|
|
||||
child1 = Child.Value 11
|
||||
parent1 = Parent.Value child1
|
||||
grand_parent1 = GrandParent.Value parent1
|
||||
@ -163,34 +163,34 @@ spec =
|
||||
|
||||
(grand_parent1 == grand_parent2).should_be_true
|
||||
|
||||
Test.specify "should handle `==` on types with many fields with custom comparator" <|
|
||||
group_builder.specify "should handle `==` on types with many fields with custom comparator" <|
|
||||
many_fields1 = ManyFieldType.Value (Child.Value 1) (Child.Value 2) (Child.Value 3) (Child.Value 4) (Child.Value 5) (Child.Value 6) (Child.Value 7) (Child.Value 8) (Child.Value 9) (Child.Value 10) (Child.Value 11) (Child.Value 12) (Child.Value 13) (Child.Value 14) (Child.Value 15)
|
||||
many_fields2 = ManyFieldType.Value (Child.Value 101) (Child.Value 102) (Child.Value 103) (Child.Value 104) (Child.Value 105) (Child.Value 106) (Child.Value 107) (Child.Value 108) (Child.Value 109) (Child.Value 110) (Child.Value 111) (Child.Value 112) (Child.Value 113) (Child.Value 114) (Child.Value 115)
|
||||
|
||||
(many_fields1 == many_fields2).should_be_true
|
||||
|
||||
Test.specify "should handle `==` on atoms with fields with mixed comparators" <|
|
||||
group_builder.specify "should handle `==` on atoms with fields with mixed comparators" <|
|
||||
obj_1 = FourFieldType.Value (Child.Value 1) 42 (Child.Value 2) 83
|
||||
obj_2 = FourFieldType.Value (Child.Value 101) 42 (Child.Value 102) 83
|
||||
(obj_1 == obj_2).should_be_true
|
||||
|
||||
Test.specify "should be able to compare atoms with different constructors" <|
|
||||
group_builder.specify "should be able to compare atoms with different constructors" <|
|
||||
((CustomEqType.C1 10) == (CustomEqType.C2 7 3)).should_be_true
|
||||
((CustomEqType.C1 0) == (CustomEqType.C2 7 3)).should_be_false
|
||||
|
||||
Test.specify "should dispatch to equals on host values" <|
|
||||
group_builder.specify "should dispatch to equals on host values" <|
|
||||
path1 = Java_Path.of "home" "user" . resolve "file.txt"
|
||||
path2 = Java_Path.of "home" "user" "file.txt"
|
||||
(path1 == path2).should_be_true
|
||||
path3 = path1.resolve "subfile.txt"
|
||||
(path3 == path2).should_be_false
|
||||
|
||||
Test.specify "should return False for different Atoms with same fields" <|
|
||||
group_builder.specify "should return False for different Atoms with same fields" <|
|
||||
rect = Rect.Value (Point.Value 1 2) (Point.Value 3 4)
|
||||
four_field = FourFieldType.Value 1 2 3 4
|
||||
(rect == four_field).should_be_false
|
||||
|
||||
Test.specify "Any.== should check for Meta.is_same_object" <|
|
||||
group_builder.specify "Any.== should check for Meta.is_same_object" <|
|
||||
obj_1 = My_Nan.Value 42
|
||||
obj_2 = My_Nan.Value 42
|
||||
(obj_1 == obj_2).should_be_false
|
||||
@ -198,25 +198,25 @@ spec =
|
||||
(obj_1 == obj_1).should_be_true
|
||||
Meta.is_same_object obj_1 obj_1 . should_be_true
|
||||
|
||||
Test.specify "should handle `==` on types" <|
|
||||
group_builder.specify "should handle `==` on types" <|
|
||||
(Child == Child).should_be_true
|
||||
(Child == Point).should_be_false
|
||||
(Point == Child).should_be_false
|
||||
(Boolean == Any).should_be_false
|
||||
(Boolean == Boolean).should_be_true
|
||||
|
||||
Test.specify "should handle `==` on types with Any as first operand" <|
|
||||
group_builder.specify "should handle `==` on types with Any as first operand" <|
|
||||
(Any == Boolean).should_be_false
|
||||
(Any == Any).should_be_true
|
||||
|
||||
Test.specify "should dispatch to overriden `==` in vectors" <|
|
||||
group_builder.specify "should dispatch to overriden `==` in vectors" <|
|
||||
([(Child.Value 1)] == [(Child.Value 101)]).should_be_true
|
||||
([(Child.Value 1)] == [(Child.Value 2)]).should_be_false
|
||||
|
||||
Test.specify "should dispatch to overriden `==` in arrays" <|
|
||||
group_builder.specify "should dispatch to overriden `==` in arrays" <|
|
||||
([Child.Value 1].to_array == [Child.Value 101].to_array).should_be_true
|
||||
|
||||
Test.specify "should handle recursive atoms without custom `==`" <|
|
||||
group_builder.specify "should handle recursive atoms without custom `==`" <|
|
||||
rnd = (Random.new_generator seed=42).java_random
|
||||
trees = (0.up_to 5).map _->
|
||||
create_random_tree 5 rnd
|
||||
@ -224,13 +224,13 @@ spec =
|
||||
dupl_tree = tree.deep_copy
|
||||
Test.with_clue "Seed sed to 42" (tree == dupl_tree).should_be_true
|
||||
|
||||
Test.specify "partially applied constructors aren't == " <|
|
||||
group_builder.specify "partially applied constructors aren't == " <|
|
||||
f1 = CustomEqType.C2 10
|
||||
f2 = CustomEqType.C2 10
|
||||
f1==f2 . should_be_false
|
||||
|
||||
Test.group "Polyglot Operator ==" <|
|
||||
Test.specify "should not try to compare members" <|
|
||||
suite_builder.group "Polyglot Operator ==" group_builder->
|
||||
group_builder.specify "should not try to compare members" <|
|
||||
x = IntHolder.new 5
|
||||
y = IntHolder.new 5
|
||||
z = IntHolder.new 3
|
||||
@ -240,7 +240,7 @@ spec =
|
||||
x==x . should_be_true
|
||||
z==z . should_be_true
|
||||
|
||||
Test.specify "should not try to compare members in JS object" <|
|
||||
group_builder.specify "should not try to compare members in JS object" <|
|
||||
x = js_obj 5 3
|
||||
y = js_obj 5 3
|
||||
z = js_obj 3 5
|
||||
@ -250,7 +250,7 @@ spec =
|
||||
x==x . should_be_true
|
||||
z==z . should_be_true
|
||||
|
||||
Test.specify "should invoke equals" <|
|
||||
group_builder.specify "should invoke equals" <|
|
||||
x = IntHolderEquals.new 5
|
||||
y = IntHolderEquals.new 5
|
||||
z = IntHolderEquals.new 3
|
||||
@ -260,23 +260,27 @@ spec =
|
||||
x==x . should_be_true
|
||||
z==z . should_be_true
|
||||
|
||||
Test.specify "should compare big integer" <|
|
||||
group_builder.specify "should compare big integer" <|
|
||||
x = Java_Big_Integer.new "54024430107564432"
|
||||
y = Java_Big_Integer.new "54024430107564432"
|
||||
x==y . should_be_true
|
||||
|
||||
Test.specify "JSON is found different" <|
|
||||
group_builder.specify "JSON is found different" <|
|
||||
x = js_json 10 5
|
||||
y = js_json 10 5
|
||||
x==y . should_be_false
|
||||
|
||||
Test.specify "Identical JSON is found equal" <|
|
||||
group_builder.specify "Identical JSON is found equal" <|
|
||||
x = js_json 10 5
|
||||
x==x . should_be_true
|
||||
|
||||
Test.specify "JavaScript Map is found same" <|
|
||||
group_builder.specify "JavaScript Map is found same" <|
|
||||
x = js_map 10 5
|
||||
y = js_map 10 5
|
||||
x==y . should_be_true
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -13,8 +13,8 @@ polyglot java import java.lang.Exception as JException
|
||||
polyglot java import java.util.ArrayList
|
||||
polyglot java import java.util.Random as Java_Random
|
||||
|
||||
from Standard.Test import Problems, Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
|
||||
type My_Type
|
||||
Value foo
|
||||
@ -44,9 +44,9 @@ foreign js throw_js_str = """
|
||||
foreign js throw_js_arr = """
|
||||
throw [1,2,3];
|
||||
|
||||
spec =
|
||||
Test.group "No Method Errors" <|
|
||||
Test.specify "should be recoverable" <|
|
||||
add_specs suite_builder =
|
||||
suite_builder.group "No Method Errors" group_builder->
|
||||
group_builder.specify "should be recoverable" <|
|
||||
err_1 = Panic.recover Any (123 . foobar "baz") . catch
|
||||
err_2 = Panic.recover Any ("foo" . baz 123) . catch
|
||||
err_3 = Panic.recover Any (My_Type.Value False . nope) . catch
|
||||
@ -60,48 +60,48 @@ spec =
|
||||
err_3.target.to_text.should_equal "(My_Type.Value False)"
|
||||
err_3.method_name.should_equal "nope"
|
||||
|
||||
Test.group "Dataflow Errors" <|
|
||||
Test.specify "should be recoverable" <|
|
||||
suite_builder.group "Dataflow Errors" group_builder->
|
||||
group_builder.specify "should be recoverable" <|
|
||||
err = Error.throw 42
|
||||
err.catch . should_equal 42
|
||||
err.should_fail_with Integer
|
||||
|
||||
Test.specify "should allow recovery of only a specific error-type" <|
|
||||
group_builder.specify "should allow recovery of only a specific error-type" <|
|
||||
recover_illegal_argument ~action =
|
||||
action . catch Illegal_Argument err->
|
||||
"recovered error: "+err.message
|
||||
(recover_illegal_argument (Error.throw (Illegal_Argument.Error "foo"))) . should_equal "recovered error: foo"
|
||||
(recover_illegal_argument (Error.throw (Illegal_State.Error "bar"))) . should_fail_with Illegal_State
|
||||
|
||||
Test.specify "should implement to_display_text" <|
|
||||
group_builder.specify "should implement to_display_text" <|
|
||||
Error.throw Nothing . to_display_text . should_equal "Error: Nothing"
|
||||
|
||||
Test.specify "should implement to_text" <|
|
||||
group_builder.specify "should implement to_text" <|
|
||||
Error.throw Nothing . to_text . should_equal "(Error: Nothing)"
|
||||
Error.to_text Error . should_equal "Error"
|
||||
case (Error.to_text) of
|
||||
_ : Function -> Nothing
|
||||
_ -> Test.fail "Expected the expression to be of Function type"
|
||||
|
||||
Test.specify "should be able to be mapped" <|
|
||||
group_builder.specify "should be able to be mapped" <|
|
||||
error = Error.throw 42
|
||||
regular = 10
|
||||
f x = 2*x
|
||||
(error.map_error f . catch).should_equal 84
|
||||
regular.map_error f . should_equal 10
|
||||
|
||||
Test.specify "should allow to check if the value is an error" <|
|
||||
group_builder.specify "should allow to check if the value is an error" <|
|
||||
error = Error.throw 42
|
||||
regular = 10
|
||||
|
||||
error.is_error . should_equal True
|
||||
regular.is_error . should_equal False
|
||||
|
||||
Test.specify "should short-circuit polyglot evaluation" <|
|
||||
group_builder.specify "should short-circuit polyglot evaluation" <|
|
||||
error = Error.throw 42
|
||||
Java_Random.new error . should_fail_with Integer
|
||||
|
||||
Test.specify "should allow to inspect their stacktrace" <|
|
||||
group_builder.specify "should allow to inspect their stacktrace" <|
|
||||
error = throw_a_bar
|
||||
error.catch . should_equal "bar"
|
||||
arr = error.stack_trace
|
||||
@ -110,12 +110,12 @@ spec =
|
||||
arr.first.source_location.file.name . should_equal "Error_Spec.enso"
|
||||
arr.first.source_location.start_line . should_equal 32
|
||||
|
||||
Test.specify "should allow to inspect the stack trace of a recovered panic" <|
|
||||
group_builder.specify "should allow to inspect the stack trace of a recovered panic" <|
|
||||
error = Panic.recover Any <| throw_a_bar_panicking
|
||||
error.catch . should_equal "bar"
|
||||
error.stack_trace.first.name . should_equal "Error_Spec.throw_a_bar_panicking"
|
||||
|
||||
Test.specify "it should be possible to introduce arbitrary dataflow dependencies between values using `if_not_error`" <|
|
||||
group_builder.specify "it should be possible to introduce arbitrary dataflow dependencies between values using `if_not_error`" <|
|
||||
42.if_not_error 23 . should_equal 23
|
||||
|
||||
r1 = Error.throw (Illegal_State.Error "foo") . if_not_error 23
|
||||
@ -123,13 +123,13 @@ spec =
|
||||
|
||||
42.if_not_error (Error.throw (Illegal_State.Error "foo")) . should_fail_with Illegal_State
|
||||
|
||||
Test.specify "should allow calling catch on types" <|
|
||||
group_builder.specify "should allow calling catch on types" <|
|
||||
Vector.catch . should_equal Vector
|
||||
Any.catch . should_equal Any
|
||||
Boolean.catch . should_equal Boolean
|
||||
|
||||
Test.group "Panics" <|
|
||||
Test.specify "should be able to be caught" <|
|
||||
suite_builder.group "Panics" group_builder->
|
||||
group_builder.specify "should be able to be caught" <|
|
||||
result = Panic.catch Any (Panic.throw 42) caught_panic->
|
||||
caught_panic.payload+10
|
||||
result . should_equal 52
|
||||
@ -138,17 +138,17 @@ spec =
|
||||
caught_panic.payload+10
|
||||
result_2 . should_equal 3
|
||||
|
||||
Test.specify "should not mix with dataflow errors" <|
|
||||
group_builder.specify "should not mix with dataflow errors" <|
|
||||
result = Panic.catch Any (Error.throw 42) caught_panic->
|
||||
caught_panic.payload+10
|
||||
result.catch . should_equal 42
|
||||
|
||||
Test.specify "should provide access to stack traces" <|
|
||||
group_builder.specify "should provide access to stack traces" <|
|
||||
stack = Panic.catch Any throw_a_bar_panicking caught_panic->
|
||||
caught_panic.stack_trace
|
||||
stack.first.name . should_equal "Error_Spec.throw_a_bar_panicking"
|
||||
|
||||
Test.specify "should provide access to Java stack traces" <|
|
||||
group_builder.specify "should provide access to Java stack traces" <|
|
||||
stack_1 = Panic.recover Any (do_a_parse "foo") . stack_trace
|
||||
stack_1.at 0 . name . should_equal "Error_Spec.do_a_parse"
|
||||
|
||||
@ -156,14 +156,14 @@ spec =
|
||||
caught_panic.stack_trace
|
||||
stack_2.at 0 . name . should_equal "Error_Spec.do_a_parse"
|
||||
|
||||
Test.specify "should be able to be rethrown without changing the stack trace" <|
|
||||
group_builder.specify "should be able to be rethrown without changing the stack trace" <|
|
||||
caught_panic = Panic.catch Any throw_a_bar_panicking x->x
|
||||
rethrow foo = Panic.throw foo
|
||||
rethrown_panic = Panic.catch Any (rethrow caught_panic) x->x
|
||||
(rethrown_panic.stack_trace.length > 0).should_be_true
|
||||
(rethrown_panic.stack_trace.map .name) . should_equal (caught_panic.stack_trace.map .name)
|
||||
|
||||
Test.specify "should allow the pattern for handling selected exceptions" <|
|
||||
group_builder.specify "should allow the pattern for handling selected exceptions" <|
|
||||
perform_operation ~action =
|
||||
Panic.catch Any action caught_panic->
|
||||
if caught_panic.payload == "bar" then 42 else
|
||||
@ -174,7 +174,7 @@ spec =
|
||||
error.catch . should_equal "foo"
|
||||
error.stack_trace.first.name . should_equal "Error_Spec.throw_a_foo_panicking"
|
||||
|
||||
Test.specify "should work as in the examples" <|
|
||||
group_builder.specify "should work as in the examples" <|
|
||||
fun ~act =
|
||||
Panic.catch Any act caught_panic-> case caught_panic.payload of
|
||||
Illegal_Argument.Error message _ -> "Illegal arguments were provided: "+message
|
||||
@ -183,7 +183,7 @@ spec =
|
||||
Panic.recover Any (fun (Panic.throw "foo")) . catch . should_equal "foo"
|
||||
Panic.recover Any (fun (Panic.throw (Illegal_Argument.Error "msg" Nothing))) . should_equal "Illegal arguments were provided: msg"
|
||||
|
||||
Test.specify "should allow catching Java exceptions easily" <|
|
||||
group_builder.specify "should allow catching Java exceptions easily" <|
|
||||
parse str =
|
||||
Panic.catch NumberFormatException (Long.parseLong str) caught_panic->
|
||||
Error.throw (Illegal_Argument.Error "The provided string is not a valid number: "+caught_panic.payload.getMessage)
|
||||
@ -193,7 +193,7 @@ spec =
|
||||
dataflow_error.catch . should_equal (Illegal_Argument.Error 'The provided string is not a valid number: For input string: "foo"')
|
||||
Test.expect_panic_with (parse 0.0) Unsupported_Argument_Types
|
||||
|
||||
Test.specify "should allow to throw raw Java exceptions" <|
|
||||
group_builder.specify "should allow to throw raw Java exceptions" <|
|
||||
exception = Panic.catch NumberFormatException (throw_raw_java "foo") (p -> p)
|
||||
exception.payload.getMessage . should_equal "foo"
|
||||
Panic.get_attached_stack_trace exception . first . name . should_equal "Error_Spec.throw_raw_java"
|
||||
@ -202,7 +202,7 @@ spec =
|
||||
caught_panic.stack_trace.first.name . should_equal "Error_Spec.throw_raw_java"
|
||||
caught_panic.payload . should_be_a JException
|
||||
|
||||
Test.specify "should allow to re-throw raw Java exceptions" <|
|
||||
group_builder.specify "should allow to re-throw raw Java exceptions" <|
|
||||
message_1 = Ref.new ""
|
||||
caught_1 = Panic.recover Any <|
|
||||
Panic.catch NumberFormatException (do_a_parse "foo") caught_panic->
|
||||
@ -221,7 +221,7 @@ spec =
|
||||
caught_2.catch . should_be_a JException
|
||||
caught_2.stack_trace.first.name . should_equal "Error_Spec.throw_raw_java"
|
||||
|
||||
Test.specify "should allow to catch a specific panic type easily" <|
|
||||
group_builder.specify "should allow to catch a specific panic type easily" <|
|
||||
message_1 = Panic.catch Illegal_Argument (Panic.throw (Illegal_Argument.Error "msg" Nothing)) caught_panic->
|
||||
caught_panic.payload.message
|
||||
message_1 . should_equal "msg"
|
||||
@ -247,7 +247,7 @@ spec =
|
||||
Test.expect_panic_with (Panic.catch Illegal_Argument (Long.parseLong "foo") (_->"polyglot3")) JException
|
||||
Test.expect_panic_with (Panic.catch Nothing (Long.parseLong 0) (_->"polyglot4")) Unsupported_Argument_Types
|
||||
|
||||
Test.specify "should be able to be recovered selectively" <|
|
||||
group_builder.specify "should be able to be recovered selectively" <|
|
||||
Panic.recover Illegal_Argument (Panic.throw (Illegal_Argument.Error "msg" Nothing)) . catch . should_be_a Illegal_Argument.Error
|
||||
Panic.recover Any (Panic.throw (Illegal_Argument.Error "msg" Nothing)) . catch . should_be_a Illegal_Argument.Error
|
||||
Panic.recover [Illegal_Argument] (Panic.throw (Illegal_Argument.Error "msg" Nothing)) . catch . should_be_a Illegal_Argument.Error
|
||||
@ -262,14 +262,14 @@ spec =
|
||||
Panic.recover Any throw_a_bar_panicking . catch . should_equal "bar"
|
||||
Panic.recover Text throw_a_bar_panicking . stack_trace . first . name . should_equal "Error_Spec.throw_a_bar_panicking"
|
||||
|
||||
Test.specify "Unsupported_Argument_Types message should be readable" <|
|
||||
group_builder.specify "Unsupported_Argument_Types message should be readable" <|
|
||||
check err =
|
||||
(err.payload.message) . should_equal "Cannot convert '42'(language: Java, type: java.lang.Long) to Java type 'java.lang.String': Invalid or lossy primitive coercion."
|
||||
(err.payload.message) . should_equal (err.payload.to_display_text)
|
||||
|
||||
Panic.catch Unsupported_Argument_Types (Long.decode 42) handler=check
|
||||
|
||||
Test.specify "should be able to throw and catch polyglot arrays" <|
|
||||
group_builder.specify "should be able to throw and catch polyglot arrays" <|
|
||||
handle_panic caught prefix="" =
|
||||
if prefix == "" then caught.payload.to_text else prefix+": "+caught.payload.to_text
|
||||
|
||||
@ -291,7 +291,7 @@ spec =
|
||||
(Panic.throw (ArrayList.new))
|
||||
catch_array . should_equal "[]"
|
||||
|
||||
Test.specify "should be able to throw and catch polyglot values" <|
|
||||
group_builder.specify "should be able to throw and catch polyglot values" <|
|
||||
caught_js_int_panic = Panic.catch Any handler=err->"Any:"+err.payload.to_text <|
|
||||
Panic.catch Integer handler=err->"JS integer:"+err.payload.to_text <|
|
||||
throw_js_int
|
||||
@ -310,7 +310,7 @@ spec =
|
||||
|
||||
caught_js_arr_panic . should_equal "JS array:[1, 2, 3]"
|
||||
|
||||
Test.specify "should allow to use `with_finalizer`" <|
|
||||
group_builder.specify "should allow to use `with_finalizer`" <|
|
||||
ref1 = Ref.new ""
|
||||
r1 = Panic.with_finalizer (ref1.put "finalized") <|
|
||||
42
|
||||
@ -332,7 +332,7 @@ spec =
|
||||
r3.should_fail_with Illegal_Argument
|
||||
ref3.get . should_equal "finalized"
|
||||
|
||||
Test.specify "should propagate any panics raised in `with_finalizer` finalization" <|
|
||||
group_builder.specify "should propagate any panics raised in `with_finalizer` finalization" <|
|
||||
v1 = Vector.new_builder
|
||||
c1 = Panic.catch Any handler=(.payload) <|
|
||||
do_finalize =
|
||||
@ -357,7 +357,7 @@ spec =
|
||||
c2 . should_equal "finalizer"
|
||||
v2.to_vector . should_equal [1, 2]
|
||||
|
||||
Test.group "Type Errors" <|
|
||||
suite_builder.group "Type Errors" group_builder->
|
||||
my_func x y =
|
||||
x + y
|
||||
|
||||
@ -368,100 +368,100 @@ spec =
|
||||
|
||||
extract x:My_Type = x.foo
|
||||
|
||||
Test.specify "everything is ok" <|
|
||||
group_builder.specify "everything is ok" <|
|
||||
neg (my_func -5 -2) . should_equal 7
|
||||
|
||||
Test.specify "try to apply one argument" <|
|
||||
group_builder.specify "try to apply one argument" <|
|
||||
r = Panic.recover Type_Error <| neg (my_func -5)
|
||||
r . should_fail_with Type_Error
|
||||
r.to_display_text.should_contain "Try to apply y argument."
|
||||
|
||||
Test.specify "try to apply two arguments" <|
|
||||
group_builder.specify "try to apply two arguments" <|
|
||||
r = Panic.recover Type_Error <| neg my_func
|
||||
r . should_fail_with Type_Error
|
||||
r.to_display_text.should_contain "Try to apply x, y arguments."
|
||||
|
||||
Test.specify "apply two arguments with one defaulted" <|
|
||||
group_builder.specify "apply two arguments with one defaulted" <|
|
||||
r = Panic.recover Type_Error <| neg my_defaulted_func
|
||||
r . should_fail_with Type_Error
|
||||
r.to_display_text.should_contain "Try to apply y argument."
|
||||
|
||||
Test.specify "printed non-defaulted argument" <|
|
||||
group_builder.specify "printed non-defaulted argument" <|
|
||||
r = Panic.recover Type_Error <| neg (my_defaulted_func 33)
|
||||
r . should_fail_with Type_Error
|
||||
r.to_display_text.should_contain "Try to apply y argument."
|
||||
r.to_display_text.should_contain "x=33"
|
||||
|
||||
Test.specify "report unapplied constructor nicely" <|
|
||||
group_builder.specify "report unapplied constructor nicely" <|
|
||||
r = Panic.recover Type_Error <| extract My_Type.Value
|
||||
r . should_fail_with Type_Error
|
||||
r.to_display_text.should_contain "Try to apply foo argument."
|
||||
|
||||
Test.specify "report unapplied constructor with default value nicely" <|
|
||||
group_builder.specify "report unapplied constructor with default value nicely" <|
|
||||
r = Panic.recover Type_Error <| extract My_Type.Default_Value
|
||||
r . should_fail_with Type_Error
|
||||
r.to_display_text.should_contain "Try to apply bar argument."
|
||||
|
||||
Test.specify "report non-defaulted constructor argument" <|
|
||||
group_builder.specify "report non-defaulted constructor argument" <|
|
||||
r = Panic.recover Type_Error <| extract (My_Type.Default_Value foo=33)
|
||||
r . should_fail_with Type_Error
|
||||
r.to_display_text.should_contain "Try to apply bar argument."
|
||||
r.to_display_text.should_contain "foo=33"
|
||||
|
||||
Test.specify "report partially applied constructor nicely" <|
|
||||
group_builder.specify "report partially applied constructor nicely" <|
|
||||
r = Panic.recover Type_Error <| extract (My_Type.Multi_Value 42)
|
||||
r . should_fail_with Type_Error
|
||||
r.to_display_text.should_contain "Try to apply bar argument."
|
||||
|
||||
Test.specify "try to apply two arguments with over-saturated" <|
|
||||
group_builder.specify "try to apply two arguments with over-saturated" <|
|
||||
r = Panic.recover Type_Error <| neg (my_func z=10)
|
||||
r . should_fail_with Type_Error
|
||||
r.to_display_text . should_contain "Try to apply x, y arguments"
|
||||
|
||||
Test.specify "types and unapplied arguments" <|
|
||||
group_builder.specify "types and unapplied arguments" <|
|
||||
c = C.Baz C.to_text
|
||||
r = Panic.recover Type_Error <| neg (c.to_num c=3)
|
||||
r . should_fail_with Type_Error
|
||||
r.to_display_text . should_contain "Try to apply a, b arguments"
|
||||
|
||||
Test.group "Wrapped errors" <|
|
||||
Test.specify ".catch unwraps an error when the inner error is explicitly specified" <|
|
||||
suite_builder.group "Wrapped errors" group_builder->
|
||||
group_builder.specify ".catch unwraps an error when the inner error is explicitly specified" <|
|
||||
e = Error.throw (My_Error_Wrapper.Error (My_Error.Error 12)) . catch My_Error
|
||||
e . should_equal (My_Error.Error 12)
|
||||
|
||||
Test.specify ".catch does not unwrap an error when the wrapper is explicitly specified" <|
|
||||
group_builder.specify ".catch does not unwrap an error when the wrapper is explicitly specified" <|
|
||||
e = Error.throw (My_Error_Wrapper.Error (My_Error.Error 12)) . catch My_Error_Wrapper
|
||||
e . should_equal (My_Error_Wrapper.Error (My_Error.Error 12))
|
||||
|
||||
Test.specify ".catch does not unwrap an error when the wrapper is not specified" <|
|
||||
group_builder.specify ".catch does not unwrap an error when the wrapper is not specified" <|
|
||||
e = Error.throw (My_Error_Wrapper.Error (My_Error.Error 12)) . catch
|
||||
e . should_equal (My_Error_Wrapper.Error (My_Error.Error 12))
|
||||
|
||||
Test.specify "When unwrapping, .catch unwraps an error fully when it is wrapped multiple times" <|
|
||||
group_builder.specify "When unwrapping, .catch unwraps an error fully when it is wrapped multiple times" <|
|
||||
wrap = My_Error_Wrapper.Error
|
||||
error = My_Error.Error 12
|
||||
[error, wrap error, wrap (wrap error), wrap (wrap (wrap error))].map err->
|
||||
e = Error.throw err . catch My_Error
|
||||
e . should_equal (My_Error.Error 12)
|
||||
|
||||
Test.specify "works with Nothing as an error" <|
|
||||
group_builder.specify "works with Nothing as an error" <|
|
||||
e = Error.throw (My_Error_Wrapper.Error Nothing) . catch Nothing
|
||||
e . should_equal Nothing
|
||||
|
||||
Test.specify "Can unwrap a wrapped error" <|
|
||||
group_builder.specify "Can unwrap a wrapped error" <|
|
||||
wrapped_error = My_Error_Wrapper.Error "error"
|
||||
wrapped_error2 = My_Error_Wrapper.Error (My_Error_Wrapper.Error "error")
|
||||
Error.unwrap wrapped_error . should_equal "error"
|
||||
Error.unwrap wrapped_error2 . should_equal "error"
|
||||
|
||||
Test.specify "Can unwrap errors in test utils" <|
|
||||
group_builder.specify "Can unwrap errors in test utils" <|
|
||||
wrapped_error = My_Error_Wrapper.Error "error"
|
||||
x = Warning.attach wrapped_error "12"
|
||||
Problems.expect_warning "error" x
|
||||
Problems.expect_only_warning "error" x
|
||||
Error.throw wrapped_error . should_fail_with "error"
|
||||
|
||||
Test.specify "Unwrapping a non-error-wraper is the identity" <|
|
||||
group_builder.specify "Unwrapping a non-error-wraper is the identity" <|
|
||||
not_wrapped = Illegal_Argument.Error "abc"
|
||||
Error.unwrap not_wrapped . should_equal not_wrapped
|
||||
|
||||
@ -470,4 +470,8 @@ type C
|
||||
|
||||
C.to_num self a b c = a+b+c
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -2,9 +2,9 @@ from Standard.Base import all
|
||||
|
||||
import project.Semantic.Import_Loop.B
|
||||
|
||||
from Standard.Test import Test
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
spec = Test.group "Looping Imports" <|
|
||||
Test.specify "should behave correctly and not loop the compiler" <|
|
||||
|
||||
add_specs suite_builder = suite_builder.group "Looping Imports" group_builder->
|
||||
group_builder.specify "should behave correctly and not loop the compiler" <|
|
||||
B.My_Type.baz . should_equal 11
|
||||
|
@ -1,5 +1,5 @@
|
||||
from Standard.Base import all
|
||||
from Standard.Test import all
|
||||
from Standard.Test_New import all
|
||||
|
||||
fib n b=1 = if n <= 1 then b else
|
||||
a = fib n-1
|
||||
@ -12,11 +12,11 @@ fib2 n f=1 s=1 =
|
||||
|
||||
acc f s 1
|
||||
|
||||
spec =
|
||||
Test.group "Instrument fibonacci" <|
|
||||
add_specs suite_builder =
|
||||
suite_builder.group "Instrument fibonacci" group_builder->
|
||||
a_plus_b_uuid = "00000000-aaaa-bbbb-0000-000000000000" # UUID for a+b
|
||||
|
||||
Test.specify "collect and filter on return updates" <|
|
||||
group_builder.specify "collect and filter on return updates" <|
|
||||
b = Vector.new_builder
|
||||
|
||||
collect uuid:Text result =
|
||||
@ -45,7 +45,7 @@ spec =
|
||||
# no more instrumenting after finalize
|
||||
b.to_vector.length . should_equal 1
|
||||
|
||||
Test.specify "access local variables " <|
|
||||
group_builder.specify "access local variables " <|
|
||||
b = Vector.new_builder
|
||||
|
||||
collect uuid:Text ~result =
|
||||
@ -77,7 +77,7 @@ spec =
|
||||
# no more instrumenting after finalize
|
||||
b.to_vector.length . should_equal 3
|
||||
|
||||
Test.specify "replay with caches and specify different result" <|
|
||||
group_builder.specify "replay with caches and specify different result" <|
|
||||
replay uuid:Text = case uuid of
|
||||
"00000000-ffff-bbbb-0000-000000000000" -> 42
|
||||
_ -> Nothing
|
||||
@ -93,13 +93,13 @@ spec =
|
||||
result = fib 10
|
||||
result . should_equal 89
|
||||
|
||||
Test.group "Instrument @Tail_Call fibonacci" <|
|
||||
Test.specify "check fib & fib2 are the same" <|
|
||||
suite_builder.group "Instrument @Tail_Call fibonacci" group_builder->
|
||||
group_builder.specify "check fib & fib2 are the same" <|
|
||||
r1 = -10.up_to 10 . map fib
|
||||
r2 = -10.up_to 10 . map fib2
|
||||
r1 . should_equal r2
|
||||
|
||||
Test.specify "call and collect on return updates" <|
|
||||
group_builder.specify "call and collect on return updates" <|
|
||||
n1_plus_n2 = "00000000-eeee-bbbb-2222-000000000000" # UUID for n1+n2
|
||||
b = Vector.new_builder
|
||||
|
||||
@ -136,7 +136,7 @@ spec =
|
||||
# no more instrumenting after finalize
|
||||
b.to_vector.length . should_equal 10
|
||||
|
||||
Test.specify "* instead of + on return updates" <|
|
||||
group_builder.specify "* instead of + on return updates" <|
|
||||
n1_plus_n2 = "00000000-eeee-bbbb-2222-000000000000" # UUID for n1+n2
|
||||
|
||||
call_fn uuid:Text fn:Function (args : Vector Any) =
|
||||
@ -162,7 +162,7 @@ spec =
|
||||
result3 = fib2 10
|
||||
result3 . should_equal 89
|
||||
|
||||
Test.specify "Caching in on_enter prevents on_call" <|
|
||||
group_builder.specify "Caching in on_enter prevents on_call" <|
|
||||
n1_plus_n2 = "00000000-eeee-bbbb-2222-000000000000" # UUID for n1+n2
|
||||
|
||||
ninety uuid:Text =
|
||||
@ -188,7 +188,7 @@ spec =
|
||||
result3 . should_equal 89
|
||||
|
||||
|
||||
Test.specify "Access (even default) arguments of calls" <|
|
||||
group_builder.specify "Access (even default) arguments of calls" <|
|
||||
b = Vector.new_builder
|
||||
|
||||
call_fn uuid:Text fn:Function args:Any =
|
||||
@ -225,7 +225,7 @@ spec =
|
||||
# no change to the value
|
||||
b.to_vector.to_text . should_equal "[8]"
|
||||
|
||||
Test.specify "GC disables Instrumentor" <|
|
||||
group_builder.specify "GC disables Instrumentor" <|
|
||||
n1_plus_n2 = "00000000-eeee-bbbb-2222-000000000000" # UUID for n1+n2
|
||||
|
||||
enable_instrumentor value:Integer =
|
||||
@ -254,9 +254,13 @@ spec =
|
||||
result3 = fib2 10
|
||||
result3 . should_equal 89
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
||||
|
||||
|
||||
#### METADATA ####
|
||||
[[{"index":{"value":131},"size":{"value":3}},"00000000-aaaa-bbbb-0000-000000000000"],[{"index":{"value":73},"size":{"value":61}},"00000000-ffff-bbbb-0000-000000000000"],[{"index":{"value":226},"size":{"value":5}},"00000000-eeee-bbbb-2222-000000000000"],[{"index":{"value":119},"size":{"value":7}},"00000000-ffff-1111-bbbb-000000000000"]]
|
||||
[[{"index":{"value":135},"size":{"value":3}},"00000000-aaaa-bbbb-0000-000000000000"],[{"index":{"value":77},"size":{"value":61}},"00000000-ffff-bbbb-0000-000000000000"],[{"index":{"value":230},"size":{"value":5}},"00000000-eeee-bbbb-2222-000000000000"],[{"index":{"value":123},"size":{"value":7}},"00000000-ffff-1111-bbbb-000000000000"]]
|
||||
|
@ -2,8 +2,8 @@ from Standard.Base import all
|
||||
import Standard.Base.Errors.Common.No_Such_Method
|
||||
import Standard.Base.Errors.Common.Compile_Error
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
|
||||
polyglot java import java.lang.Float as Java_Float
|
||||
polyglot java import java.lang.Integer as Java_Integer
|
||||
@ -18,48 +18,48 @@ polyglot java import org.enso.base.statistics.Moments
|
||||
|
||||
Any.test_me self x = x.is_nothing
|
||||
|
||||
spec =
|
||||
Test.group "Java FFI" <|
|
||||
Test.specify "should call methods imported from Java" <|
|
||||
add_specs suite_builder =
|
||||
suite_builder.group "Java FFI" group_builder->
|
||||
group_builder.specify "should call methods imported from Java" <|
|
||||
Long.sum 1 2 . should_equal 3
|
||||
|
||||
Test.specify "should call constructors imported from Java" <|
|
||||
group_builder.specify "should call constructors imported from Java" <|
|
||||
list = ArrayList.new
|
||||
list.add 432
|
||||
list.get 0 . should_equal 432
|
||||
Test.specify "should report missing method error on Java Arrays" pending="Failing due to #6609" <|
|
||||
group_builder.specify "should report missing method error on Java Arrays" pending="Failing due to #6609" <|
|
||||
list = ArrayList.new
|
||||
list.add 432
|
||||
Test.expect_panic_with (list.asList) No_Such_Method
|
||||
Test.specify "should auto-convert numeric types across the polyglot boundary" <|
|
||||
group_builder.specify "should auto-convert numeric types across the polyglot boundary" <|
|
||||
(Java_Float.valueOf "123.3" + 5).should_equal 128.3 epsilon=0.0001
|
||||
(Java_Integer.sum 1 2 + 3) . should_equal 6
|
||||
Test.specify "should auto-convert strings across the polyglot boundary" <|
|
||||
group_builder.specify "should auto-convert strings across the polyglot boundary" <|
|
||||
(String.format "%s bar %s" "baz" "quux" + " foo").should_equal "baz bar quux foo"
|
||||
Test.specify "should support Java import renaming" <|
|
||||
group_builder.specify "should support Java import renaming" <|
|
||||
builder = Java_String_Builder.new
|
||||
builder.append "foo"
|
||||
builder.append "bar"
|
||||
str = builder.toString
|
||||
str.should_equal "foobar"
|
||||
Test.specify "should invoke static methods" <|
|
||||
group_builder.specify "should invoke static methods" <|
|
||||
x = Java_Integer.valueOf 1
|
||||
x.test_me x . should_equal False
|
||||
|
||||
Test.group "Numeric values" <|
|
||||
Test.specify "can be passed in host calls without lossy coercion exception" <|
|
||||
suite_builder.group "Numeric values" group_builder->
|
||||
group_builder.specify "can be passed in host calls without lossy coercion exception" <|
|
||||
large_long = 6907338656278321365
|
||||
moments = Moments.new 1
|
||||
moments.add large_long
|
||||
|
||||
Test.group "Java/Enso Date" <|
|
||||
Test.specify "Java date has Enso properties" <|
|
||||
suite_builder.group "Java/Enso Date" group_builder->
|
||||
group_builder.specify "Java date has Enso properties" <|
|
||||
april1st = LocalDate.of 2022 04 01
|
||||
april1st.year.should_equal 2022
|
||||
april1st.month.should_equal 4
|
||||
april1st.day.should_equal 1
|
||||
|
||||
Test.specify "send Enso date into Java" <|
|
||||
group_builder.specify "send Enso date into Java" <|
|
||||
ensodate = Date.new 2022 04 01
|
||||
javatime = LocalTime.of 10 26
|
||||
javatimedate = javatime . to_date_time ensodate
|
||||
@ -68,21 +68,21 @@ spec =
|
||||
april1st.month.should_equal 4
|
||||
april1st.day.should_equal 1
|
||||
|
||||
Test.group "Java case of" <|
|
||||
Test.specify "case on Thread.State enum" <|
|
||||
suite_builder.group "Java case of" group_builder->
|
||||
group_builder.specify "case on Thread.State enum" <|
|
||||
match x = case x of
|
||||
State.NEW -> "new"
|
||||
_ -> "unknown"
|
||||
match State.NEW . should_equal "new"
|
||||
match State.BLOCKED . should_equal "unknown"
|
||||
|
||||
Test.specify "case on String static field" <|
|
||||
group_builder.specify "case on String static field" <|
|
||||
match x = case x of
|
||||
String.CASE_INSENSITIVE_ORDER -> "match"
|
||||
_ -> "unknown"
|
||||
match String.CASE_INSENSITIVE_ORDER . should_equal "match"
|
||||
|
||||
Test.specify "case on non-existing field yields Compile_Error" <|
|
||||
group_builder.specify "case on non-existing field yields Compile_Error" <|
|
||||
match x = case x of
|
||||
State.NON_EXISTING -> "match"
|
||||
_ -> "unknown"
|
||||
@ -91,4 +91,8 @@ spec =
|
||||
err.to_text . contains "NON_EXISTING" . should_be_true
|
||||
err.to_text . contains "is not visible in this scope" . should_be_true
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
from Standard.Base import all
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
|
||||
foreign js my_method a b = """
|
||||
return a + b;
|
||||
@ -79,24 +79,24 @@ foreign js test_array_properties = """
|
||||
my_array.index = 0
|
||||
return my_array
|
||||
|
||||
spec = Test.group "Polyglot JS" <|
|
||||
add_specs suite_builder = suite_builder.group "Polyglot JS" group_builder->
|
||||
|
||||
Test.specify "should allow declaring module-level methods in JS" <|
|
||||
group_builder.specify "should allow declaring module-level methods in JS" <|
|
||||
my_method 1 2 . should_equal 3
|
||||
m = my_method 1
|
||||
m 2 . should_equal 3
|
||||
|
||||
Test.specify "should allow mutual calling of instance-level methods" <|
|
||||
group_builder.specify "should allow mutual calling of instance-level methods" <|
|
||||
My_Type.Value 3 4 . my_method_3 5 . should_equal 36
|
||||
|
||||
Test.specify "should expose methods and fields of JS objects" <|
|
||||
group_builder.specify "should expose methods and fields of JS objects" <|
|
||||
obj = make_object
|
||||
obj.x . should_equal 10
|
||||
obj.y . should_be_false
|
||||
obj.compare 5 . should_be_false
|
||||
obj.compare 11 . should_be_true
|
||||
|
||||
Test.specify "should expose array interfaces for JS arrays" <|
|
||||
group_builder.specify "should expose array interfaces for JS arrays" <|
|
||||
vec = Vector.from_polyglot_array make_array
|
||||
vec.map .x . should_equal [10, 20, 30]
|
||||
vec2 = Vector.from_polyglot_array make_simple_array
|
||||
@ -111,11 +111,11 @@ spec = Test.group "Polyglot JS" <|
|
||||
arr_2 . should_equal [30, 10, 20]
|
||||
sorted_2 . should_equal [10, 20, 30]
|
||||
|
||||
Test.specify "should correctly marshall strings" <|
|
||||
group_builder.specify "should correctly marshall strings" <|
|
||||
str = make_str "x" + " baz"
|
||||
str.should_equal "foo x bar baz"
|
||||
|
||||
Test.specify "should make JS strings type pattern-matchable" <|
|
||||
group_builder.specify "should make JS strings type pattern-matchable" <|
|
||||
str = make_str "x"
|
||||
t = case str of
|
||||
Text -> False
|
||||
@ -123,7 +123,7 @@ spec = Test.group "Polyglot JS" <|
|
||||
_ -> False
|
||||
t.should_be_true
|
||||
|
||||
Test.specify "should make JS booleans type pattern-matchable" <|
|
||||
group_builder.specify "should make JS booleans type pattern-matchable" <|
|
||||
bool = make_true
|
||||
t = case bool of
|
||||
True -> True
|
||||
@ -143,7 +143,7 @@ spec = Test.group "Polyglot JS" <|
|
||||
_ -> False
|
||||
c_2.should_be_true
|
||||
|
||||
Test.specify "should make JS arrays type pattern-matchable as arrays" <|
|
||||
group_builder.specify "should make JS arrays type pattern-matchable as arrays" <|
|
||||
arr = make_array
|
||||
r = case arr of
|
||||
Array -> False
|
||||
@ -151,11 +151,11 @@ spec = Test.group "Polyglot JS" <|
|
||||
_ -> False
|
||||
r.should_be_true
|
||||
|
||||
Test.specify "should make JS null values equal to Nothing" <|
|
||||
group_builder.specify "should make JS null values equal to Nothing" <|
|
||||
js_null = make_null
|
||||
js_null . should_equal Nothing
|
||||
|
||||
Test.specify "should make JS numbers type pattern-matchable" <|
|
||||
group_builder.specify "should make JS numbers type pattern-matchable" <|
|
||||
int_match = case make_int of
|
||||
_ : Integer -> True
|
||||
_ -> False
|
||||
@ -173,46 +173,50 @@ spec = Test.group "Polyglot JS" <|
|
||||
_ -> False
|
||||
num_double_match.should_be_true
|
||||
|
||||
Test.specify "should allow Enso to catch JS exceptions" <|
|
||||
group_builder.specify "should allow Enso to catch JS exceptions" <|
|
||||
value = My_Type.Value 1 2
|
||||
result = Panic.recover Any <| value.my_throw
|
||||
err = result.catch
|
||||
err.message . should_equal "JS Exc"
|
||||
err.name . should_equal "Error"
|
||||
|
||||
Test.specify "should allow JS to catch Enso exceptions" <|
|
||||
group_builder.specify "should allow JS to catch Enso exceptions" <|
|
||||
value = My_Type.Value 7 2
|
||||
result = value.do_catch
|
||||
result . should_equal 7
|
||||
|
||||
Test.specify "should properly handle parse errors" <|
|
||||
group_builder.specify "should properly handle parse errors" <|
|
||||
err = Panic.recover Any does_not_parse . catch
|
||||
err.message.should_contain "Expected }"
|
||||
|
||||
Test.specify "allow access to properties of nested objects" <|
|
||||
group_builder.specify "allow access to properties of nested objects" <|
|
||||
value = test_multilevel
|
||||
value.b.x . should_equal 1
|
||||
|
||||
Test.specify "allow access to properties of array objects in JS" <|
|
||||
group_builder.specify "allow access to properties of array objects in JS" <|
|
||||
array = test_array_properties
|
||||
array.groups.a . should_equal 1
|
||||
array.index . should_equal 0
|
||||
|
||||
Test.specify "allow access to the length property of a JS array in Enso" <|
|
||||
group_builder.specify "allow access to the length property of a JS array in Enso" <|
|
||||
array = make_array
|
||||
array.length . should_equal 3
|
||||
|
||||
Test.specify "should perform maths with mixed numbers" <|
|
||||
group_builder.specify "should perform maths with mixed numbers" <|
|
||||
js_num = make_int
|
||||
enso_num = 10
|
||||
(enso_num + js_num) . should_equal 20
|
||||
(js_num - enso_num) . should_equal 0
|
||||
|
||||
Test.specify "should propagate dataflow errors" <|
|
||||
group_builder.specify "should propagate dataflow errors" <|
|
||||
error = Error.throw 42
|
||||
my_method error 0 . should_fail_with Integer
|
||||
|
||||
Test.specify "allow use of JavaScript debugger statement" <|
|
||||
group_builder.specify "allow use of JavaScript debugger statement" <|
|
||||
debug
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -1,23 +1,23 @@
|
||||
from Standard.Base import all
|
||||
import Standard.Base
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
|
||||
type My_Type
|
||||
Value foo bar baz
|
||||
|
||||
spec = Test.group "Meta-Value Inspection" <|
|
||||
add_specs suite_builder = suite_builder.group "Meta-Value Inspection" group_builder->
|
||||
location_pending = case Platform.os of
|
||||
Platform.OS.Windows -> "This test is disabled on Windows until issue 1561 is fixed."
|
||||
_ -> Nothing
|
||||
|
||||
Test.specify "should allow to get the source location of a frame" pending=location_pending <|
|
||||
group_builder.specify "should allow to get the source location of a frame" pending=location_pending <|
|
||||
src = Meta.get_source_location 0
|
||||
loc = "Meta_Location_Spec.enso:16:15-40"
|
||||
src.take (Last loc.length) . should_equal loc
|
||||
|
||||
Test.specify "should allow to get qualified type names of values" <|
|
||||
group_builder.specify "should allow to get qualified type names of values" <|
|
||||
x = 42
|
||||
y = My_Type.Value 1 2 3
|
||||
Meta.get_qualified_type_name x . should_equal "Standard.Base.Data.Numbers.Integer"
|
||||
@ -25,11 +25,15 @@ spec = Test.group "Meta-Value Inspection" <|
|
||||
Meta.get_qualified_type_name y . should_equal "enso_dev.Base_Tests.Semantic.Meta_Location_Spec.My_Type"
|
||||
Meta.get_simple_type_name y . should_equal "My_Type"
|
||||
|
||||
Test.specify "should allow access to package names" <|
|
||||
group_builder.specify "should allow access to package names" <|
|
||||
enso_project.name.should_equal 'Base_Tests'
|
||||
(Project_Description.new Base).name.should_equal 'Base'
|
||||
|
||||
Test.specify "should allow to fetch enso project description from a module" <|
|
||||
group_builder.specify "should allow to fetch enso project description from a module" <|
|
||||
(Project_Description.new Standard.Base.Data.Vector).name.should_equal "Base"
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -11,9 +11,10 @@ polyglot java import java.util.ArrayList
|
||||
polyglot java import java.util.Random as Java_Random
|
||||
polyglot java import java.util.Locale as JavaLocale
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
import Standard.Test.Test_Result.Test_Result
|
||||
from Standard.Test_New import all
|
||||
import Standard.Test_New.Spec_Result.Spec_Result
|
||||
|
||||
|
||||
|
||||
type My_Type
|
||||
@foo (test_method)
|
||||
@ -51,41 +52,41 @@ type Sum_Type
|
||||
Variant_A x
|
||||
Variant_B y
|
||||
|
||||
spec =
|
||||
Test.group "Meta-Value Manipulation" <|
|
||||
Test.specify "should allow manipulating unresolved symbols" <|
|
||||
add_specs suite_builder =
|
||||
suite_builder.group "Meta-Value Manipulation" group_builder->
|
||||
group_builder.specify "should allow manipulating unresolved symbols" <|
|
||||
sym = .does_not_exist
|
||||
meta_sym = Meta.meta sym
|
||||
meta_sym.name.should_equal "does_not_exist"
|
||||
new_sym = meta_sym . rename "my_method"
|
||||
object = My_Type.Value 1 2 3
|
||||
new_sym object . should_equal 6
|
||||
Test.specify "should allow manipulating atoms" <|
|
||||
group_builder.specify "should allow manipulating atoms" <|
|
||||
atom = My_Type.Value 1 "foo" Nothing
|
||||
meta_atom = Meta.meta atom
|
||||
meta_atom.constructor.value.should_equal My_Type.Value
|
||||
meta_atom.fields.should_equal [1, "foo", Nothing]
|
||||
Meta.meta (meta_atom.constructor.value) . new [1, "foo", Nothing] . should_equal atom
|
||||
Test.specify "should allow getting a value's constructor's name" <|
|
||||
group_builder.specify "should allow getting a value's constructor's name" <|
|
||||
Meta.meta List.Nil . constructor . name . should_equal "Nil"
|
||||
Meta.meta (List.Cons 1 List.Nil) . constructor . name . should_equal "Cons"
|
||||
Test.specify "should allow getting a value's constructor's fields" <|
|
||||
group_builder.specify "should allow getting a value's constructor's fields" <|
|
||||
Meta.meta List.Nil . constructor . fields . should_equal []
|
||||
Meta.meta (List.Cons 1 List.Nil) . constructor . fields . should_equal ["x", "xs"]
|
||||
Test.specify "should allow creating atoms from atom constructors" <|
|
||||
group_builder.specify "should allow creating atoms from atom constructors" <|
|
||||
atom_1 = Meta.new_atom My_Type.Value [1,"foo", Nothing]
|
||||
(Meta.meta atom_1).constructor.value . should_equal My_Type.Value
|
||||
atom_2 = Meta.new_atom My_Type.Value [1,"foo", Nothing].to_array
|
||||
(Meta.meta atom_2).constructor.value . should_equal My_Type.Value
|
||||
Test.specify "should correctly return representations of different classes of objects" <|
|
||||
group_builder.specify "should correctly return representations of different classes of objects" <|
|
||||
Meta.meta 1 . should_equal (Meta.Primitive.Value 1)
|
||||
Meta.meta "foo" . should_equal (Meta.Primitive.Value "foo")
|
||||
Test.specify "should allow manipulation of error values" <|
|
||||
group_builder.specify "should allow manipulation of error values" <|
|
||||
err = Error.throw "My Error"
|
||||
meta_err = Meta.meta err
|
||||
meta_err.is_a Meta.Error . should_be_true
|
||||
meta_err.value . should_equal "My Error"
|
||||
Test.specify "should allow checking if a value is of a certain type" <|
|
||||
group_builder.specify "should allow checking if a value is of a certain type" <|
|
||||
1.is_a Any . should_be_true
|
||||
1.2.is_a Any . should_be_true
|
||||
(My_Type.Value 1 "foo" Nothing).is_a Any . should_be_true
|
||||
@ -138,30 +139,30 @@ spec =
|
||||
Meta.is_a Date_Time.now.zone Time_Zone . should_be_true
|
||||
Meta.is_a Date_Time.now.zone Date . should_be_false
|
||||
|
||||
Test.specify "constructor variants check" <|
|
||||
group_builder.specify "constructor variants check" <|
|
||||
(Sum_Type.Variant_A 42).is_a Sum_Type . should_be_true
|
||||
(Sum_Type.Variant_A 42).is_a Sum_Type.Variant_A . should_be_false
|
||||
(Sum_Type.Variant_A 42).is_a Sum_Type.Variant_B . should_be_false
|
||||
|
||||
Test.specify "constructor variants case of" <|
|
||||
group_builder.specify "constructor variants case of" <|
|
||||
case (Sum_Type.Variant_A 42) of
|
||||
_ : Sum_Type -> Nothing
|
||||
e -> Test.fail "Expected "+e.to_text+" to be a Sum_Type"
|
||||
|
||||
Test.specify "constructor True check" <|
|
||||
group_builder.specify "constructor True check" <|
|
||||
True . should_be_a True
|
||||
True . should_be_a Boolean
|
||||
Panic.recover Any (True . should_be_a False) . should_fail_with Test_Result
|
||||
Panic.recover Any (True . should_be_a False) . should_fail_with Spec_Result
|
||||
|
||||
Test.specify "constructor Case.Lower check" <|
|
||||
group_builder.specify "constructor Case.Lower check" <|
|
||||
Case.Lower . should_be_a Case.Lower
|
||||
Case.Lower . should_be_a Case
|
||||
Panic.recover Any (Case.Lower . should_be_a Case.Upper) . should_fail_with Test_Result
|
||||
Panic.recover Any (Case.Lower . should_be_a Case.Upper) . should_fail_with Spec_Result
|
||||
|
||||
Test.specify "5 isn't a constructor" <|
|
||||
Panic.recover Any (5.should_be_a File_Error.Not_Found) . should_fail_with Test_Result
|
||||
group_builder.specify "5 isn't a constructor" <|
|
||||
Panic.recover Any (5.should_be_a File_Error.Not_Found) . should_fail_with Spec_Result
|
||||
|
||||
Test.specify "should allow for returning the type of a value" <|
|
||||
group_builder.specify "should allow for returning the type of a value" <|
|
||||
n_1 = Meta.type_of 42
|
||||
n_1 . should_equal_type Integer
|
||||
n_1 . should_not_equal_type Float
|
||||
@ -208,7 +209,7 @@ spec =
|
||||
e_tpe . should_equal_type IOException
|
||||
e_tpe . should_not_equal_type JException
|
||||
|
||||
Test.specify "constructors of Boolean" <|
|
||||
group_builder.specify "constructors of Boolean" <|
|
||||
typ = Boolean
|
||||
|
||||
Meta.is_atom typ . should_be_false
|
||||
@ -224,7 +225,7 @@ spec =
|
||||
cons.at 1 . should_be_a Meta.Constructor
|
||||
cons . map (x -> x.name) . sort . should_equal [ "False", "True" ]
|
||||
|
||||
Test.specify "constructors of MyType" <|
|
||||
group_builder.specify "constructors of MyType" <|
|
||||
typ = My_Type
|
||||
|
||||
Meta.is_atom typ . should_be_false
|
||||
@ -241,7 +242,7 @@ spec =
|
||||
cons.each ctor->
|
||||
ctor.declaring_type . should_equal meta_typ
|
||||
|
||||
Test.specify "methods of MyType" <|
|
||||
group_builder.specify "methods of MyType" <|
|
||||
typ = My_Type
|
||||
|
||||
Meta.is_atom typ . should_be_false
|
||||
@ -253,25 +254,25 @@ spec =
|
||||
|
||||
methods.sort . should_equal ['bar', 'baz', 'first_method', 'foo', 'my_method', 'other_method', 'second_method']
|
||||
|
||||
Test.specify "static methods of MyType" <|
|
||||
group_builder.specify "static methods of MyType" <|
|
||||
methods = Meta.meta (Meta.type_of My_Type) . methods
|
||||
methods.sort . should_equal ['Value', 'create', 'factory', 'first_method', 'my_method', 'other_method', 'second_method']
|
||||
|
||||
Test.specify "methods of Integer" <|
|
||||
group_builder.specify "methods of Integer" <|
|
||||
Meta.meta Integer . methods . sort . should_equal ['%', '*', '+', '-', '/', '<', '<=', '>', '>=', '^', 'abs', 'bit_and', 'bit_not', 'bit_or', 'bit_shift', 'bit_shift_l', 'bit_shift_r', 'bit_xor', 'ceil', 'div', 'floor', 'negate', 'round', 'to_float', 'truncate']
|
||||
|
||||
Test.specify "static methods of Integer" <|
|
||||
group_builder.specify "static methods of Integer" <|
|
||||
Meta.meta (Meta.type_of Integer) . methods . sort . should_equal ['%', '*', '+', '-', '/', '<', '<=', '>', '>=', '^', 'abs', 'bit_and', 'bit_not', 'bit_or', 'bit_shift', 'bit_shift_l', 'bit_shift_r', 'bit_xor', 'ceil', 'div', 'floor', 'negate', 'parse', 'round', 'to_float', 'truncate']
|
||||
|
||||
Test.specify "methods of Any" <|
|
||||
group_builder.specify "methods of Any" <|
|
||||
Meta.meta Any . methods . should_contain "to_text"
|
||||
|
||||
Test.specify "should correctly handle Java values" <|
|
||||
group_builder.specify "should correctly handle Java values" <|
|
||||
java_meta = Meta.meta Java_Random.new
|
||||
java_meta . should_be_a Meta.Polyglot
|
||||
java_meta . get_language . should_equal Meta.Language.Java
|
||||
|
||||
Test.specify "should correctly handle equality of Java values" <|
|
||||
group_builder.specify "should correctly handle equality of Java values" <|
|
||||
a = JavaLocale.new "en"
|
||||
b = JavaLocale.new "en"
|
||||
c = JavaLocale.new "pl"
|
||||
@ -284,7 +285,7 @@ spec =
|
||||
(Test_Type.Value a)==(Test_Type.Value b) . should_be_true
|
||||
(Test_Type.Value a)==(Test_Type.Value c) . should_be_false
|
||||
|
||||
Test.specify "get annotations" <|
|
||||
group_builder.specify "get annotations" <|
|
||||
Meta.get_annotation Meta_Spec .test_method "a" . should_equal 7
|
||||
Meta.get_annotation Meta_Spec .test_method "b" . should_equal (Test_Type.Value 49)
|
||||
Meta.get_annotation Meta_Spec .test_method "c" . should_fail_with Text
|
||||
@ -301,31 +302,31 @@ spec =
|
||||
|
||||
Meta.get_annotation value .my_method "self" . should_equal "self"
|
||||
|
||||
Test.specify "no constructor annotations on value" <|
|
||||
group_builder.specify "no constructor annotations on value" <|
|
||||
value = My_Type.Value 99 "bar" True
|
||||
Meta.get_annotation value .Value "foo" . should_equal Nothing
|
||||
Meta.get_annotation value .Value "bar" . should_equal Nothing
|
||||
Meta.get_annotation value .Value "baz" . should_equal Nothing
|
||||
|
||||
Test.specify "get annotations on constructor" <|
|
||||
group_builder.specify "get annotations on constructor" <|
|
||||
Meta.get_annotation My_Type .Value "foo" 7 8 . should_equal 15
|
||||
Meta.get_annotation My_Type .Value "bar" . should_equal Nothing
|
||||
Meta.get_annotation My_Type .Value "baz" . should_equal (My_Type.Value 1 2 3)
|
||||
|
||||
Test.group "Check Nothing and NaN" <|
|
||||
Test.specify "Nothing.is_a Nothing" <|
|
||||
suite_builder.group "Check Nothing and NaN" group_builder->
|
||||
group_builder.specify "Nothing.is_a Nothing" <|
|
||||
Nothing.is_a Nothing . should_be_true
|
||||
Meta.is_same_object Nothing Nothing . should_be_true
|
||||
|
||||
Test.specify "type_of Nothing is Nothing" <|
|
||||
group_builder.specify "type_of Nothing is Nothing" <|
|
||||
Meta.type_of Nothing . should_equal Nothing
|
||||
|
||||
Test.specify "NaN and NaN should be the same object" <|
|
||||
group_builder.specify "NaN and NaN should be the same object" <|
|
||||
Meta.is_same_object Number.nan Number.nan . should_be_true
|
||||
(Number.nan == Number.nan) . should_be_false
|
||||
|
||||
Test.group "Atom with holes" <|
|
||||
Test.specify "construct and fill" <|
|
||||
suite_builder.group "Atom with holes" group_builder->
|
||||
group_builder.specify "construct and fill" <|
|
||||
pair = Meta.atom_with_hole (e -> My_Type.Value 1 e 3)
|
||||
|
||||
atom = pair.value
|
||||
@ -350,7 +351,7 @@ spec =
|
||||
fill 10 # no change
|
||||
atom.bar . should_equal 2
|
||||
|
||||
Test.specify "construct and fill with type" <|
|
||||
group_builder.specify "construct and fill with type" <|
|
||||
pair = Meta.atom_with_hole (e -> My_Ascribed_Type.Value_With_Type 1 e 3)
|
||||
|
||||
atom = pair.value
|
||||
@ -375,17 +376,17 @@ spec =
|
||||
fill 10 # no change
|
||||
atom.bar . should_equal 2
|
||||
|
||||
Test.specify "reject wrong type" <|
|
||||
group_builder.specify "reject wrong type" <|
|
||||
pair = Meta.atom_with_hole (e -> My_Ascribed_Type.Value_With_Type 1 e 3)
|
||||
fill = pair.fill
|
||||
Panic.catch Any (fill "Two") err->err.convert_to_dataflow_error . should_fail_with Type_Error
|
||||
|
||||
Test.specify "direct use of pair.fill" <|
|
||||
group_builder.specify "direct use of pair.fill" <|
|
||||
pair = Meta.atom_with_hole (e -> My_Ascribed_Type.Value_With_Type 1 e 3)
|
||||
pair.fill 2
|
||||
pair.value.bar . should_equal 2
|
||||
|
||||
Test.specify "fail if atom_with_hole isn't used" <|
|
||||
group_builder.specify "fail if atom_with_hole isn't used" <|
|
||||
key = Panic.catch Uninitialized_State handler=(caught_panic-> caught_panic.payload.key) <|
|
||||
Meta.atom_with_hole (_ -> My_Type.Value 1 2 3)
|
||||
case key of
|
||||
@ -394,14 +395,14 @@ spec =
|
||||
t.bar . should_equal 2
|
||||
t.baz . should_equal 3
|
||||
|
||||
Test.specify "fail if non-atom is created" <|
|
||||
group_builder.specify "fail if non-atom is created" <|
|
||||
key = Panic.catch Uninitialized_State handler=(caught_panic-> caught_panic.payload.key) <|
|
||||
Meta.atom_with_hole (_ -> 2)
|
||||
case key of
|
||||
t : Number ->
|
||||
t . should_equal 2
|
||||
|
||||
Test.specify "only one atom_with_hole is used" <|
|
||||
group_builder.specify "only one atom_with_hole is used" <|
|
||||
pair = Meta.atom_with_hole (e -> My_Type.Value e e e)
|
||||
atom = pair.value
|
||||
fill = pair.fill
|
||||
@ -430,22 +431,22 @@ spec =
|
||||
case atom.bar of
|
||||
n : Number -> Test.fail "Not changed to number: "+n
|
||||
_ -> Nothing
|
||||
Test.group "Unresolved symbol"
|
||||
Test.specify "should be treated as a Function" <|
|
||||
suite_builder.group "Unresolved symbol" group_builder->
|
||||
group_builder.specify "should be treated as a Function" <|
|
||||
(_.is_nothing) . is_a Function . should_equal True
|
||||
(.is_nothing) . is_a Function . should_equal True
|
||||
Meta.type_of (_.is_nothing) . should_equal Function
|
||||
Meta.type_of (.is_nothing) . should_equal Function
|
||||
|
||||
Test.group "Meta.Type.find"
|
||||
Test.specify "find boolean and use it" <|
|
||||
suite_builder.group "Meta.Type.find" group_builder->
|
||||
group_builder.specify "find boolean and use it" <|
|
||||
fqn = "Standard.Base.Data.Boolean.Boolean"
|
||||
typ = Meta.Type.find fqn
|
||||
typ . is_a Meta.Type . should_be_true
|
||||
typ.name . should_equal "Boolean"
|
||||
Meta.Type.find typ.qualified_name . should_equal typ
|
||||
|
||||
Test.specify "find boolean via associated type and use it" <|
|
||||
group_builder.specify "find boolean via associated type and use it" <|
|
||||
fqn = "Standard.Base.Data.Boolean"
|
||||
typ = Meta.Type.find fqn
|
||||
typ . is_a Meta.Type . should_be_true
|
||||
@ -453,16 +454,20 @@ spec =
|
||||
typ.qualified_name . should_equal fqn
|
||||
Meta.Type.find typ.qualified_name . should_equal typ
|
||||
|
||||
Test.specify "unknown type" <|
|
||||
group_builder.specify "unknown type" <|
|
||||
Meta.Type.find "Unknown.Type" . should_fail_with Not_Found
|
||||
Meta.Type.find "UnknownType" . should_fail_with Not_Found
|
||||
Meta.Type.find "" . should_fail_with Not_Found
|
||||
|
||||
Test.specify "My ascribed type" <|
|
||||
group_builder.specify "My ascribed type" <|
|
||||
meta_type = Meta.meta My_Ascribed_Type
|
||||
fqn = meta_type . qualified_name
|
||||
fqn.should_contain "Meta_Spec.My_Ascribed_Type"
|
||||
typ = Meta.Type.find fqn
|
||||
typ . should_equal meta_type
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -3,8 +3,8 @@ from Standard.Base import all
|
||||
from project.Semantic.Definitions.Names import another_method, another_constant, method_with_local_vars, Bar
|
||||
import project.Semantic.Definitions.Names
|
||||
|
||||
from Standard.Test import Test
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
|
||||
Names.Foo.my_method self = case self of
|
||||
Names.Foo.Value x y z -> x * y * z
|
||||
@ -15,43 +15,43 @@ constant = 1
|
||||
|
||||
add_one (x = 0) = x + 1
|
||||
|
||||
spec =
|
||||
Test.group "Qualified Names" <|
|
||||
Test.specify "should allow to call constructors in a qualified manner" <|
|
||||
add_specs suite_builder =
|
||||
suite_builder.group "Qualified Names" group_builder->
|
||||
group_builder.specify "should allow to call constructors in a qualified manner" <|
|
||||
Names.Foo.Value 1 2 3 . sum . should_equal 6
|
||||
Test.specify "should allow pattern matching in a qualified manner" <|
|
||||
group_builder.specify "should allow pattern matching in a qualified manner" <|
|
||||
v = Names.Foo.Value 1 2 3
|
||||
res = case v of
|
||||
Names.Foo.Value a b c -> a + b + c
|
||||
res.should_equal 6
|
||||
Test.specify "should allow defining methods on qualified names" <|
|
||||
group_builder.specify "should allow defining methods on qualified names" <|
|
||||
v = Names.Foo.Value 2 3 5
|
||||
v.my_method.should_equal 30
|
||||
Test.group "Lowercase Methods" <|
|
||||
Test.specify "should allow calling methods without a target" <|
|
||||
suite_builder.group "Lowercase Methods" group_builder->
|
||||
group_builder.specify "should allow calling methods without a target" <|
|
||||
v = constant
|
||||
v.should_equal 1
|
||||
Test.specify "should allow calling this module's methods" <|
|
||||
group_builder.specify "should allow calling this module's methods" <|
|
||||
add_one.should_equal 1
|
||||
add_one 100 . should_equal 101
|
||||
Test.specify "should allow calling methods imported from another module without name mangling" <|
|
||||
group_builder.specify "should allow calling methods imported from another module without name mangling" <|
|
||||
another_method 10 . should_equal 10
|
||||
another_constant . should_equal 10
|
||||
Test.specify "should allow calling methods with fully qualified module name" <|
|
||||
group_builder.specify "should allow calling methods with fully qualified module name" <|
|
||||
(Names.another_method 10).should_equal 10
|
||||
v = Names.another_method
|
||||
v 10 . should_equal 10
|
||||
Test.specify "should be resolved correctly in the presence of variables with the same name" <|
|
||||
group_builder.specify "should be resolved correctly in the presence of variables with the same name" <|
|
||||
method_with_local_vars 1 . should_equal 13
|
||||
Test.group "Methods" <|
|
||||
Test.specify "should be correctly resolved on instances" <|
|
||||
suite_builder.group "Methods" group_builder->
|
||||
group_builder.specify "should be correctly resolved on instances" <|
|
||||
b = Bar.Value 1
|
||||
b.meh 2 . should_equal 3
|
||||
Test.specify "should be allowed to be called statically" pending="Needs changes to method dispatch logic" <|
|
||||
group_builder.specify "should be allowed to be called statically" pending="Needs changes to method dispatch logic" <|
|
||||
b = Bar.Value 1
|
||||
Bar.meh b 2 . should_equal 3
|
||||
Test.group "Fully Qualified Names" <|
|
||||
Test.specify "should be correctly resolved" <|
|
||||
suite_builder.group "Fully Qualified Names" group_builder->
|
||||
group_builder.specify "should be correctly resolved" <|
|
||||
a = Standard.Base.Data.Vector.Vector.new 10 _->Nothing
|
||||
a.length . should_equal 10
|
||||
Standard.Base.Errors.Problem_Behavior.Problem_Behavior.Report_Error.to_text . should_equal "Report_Error"
|
||||
|
@ -1,7 +1,7 @@
|
||||
from Standard.Base import all
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
|
||||
foreign python my_method a b = """
|
||||
return a + b
|
||||
@ -77,24 +77,24 @@ foreign python make_null = """
|
||||
foreign python does_not_parse = """
|
||||
if? cxcc 531 6
|
||||
|
||||
spec =
|
||||
add_specs suite_builder =
|
||||
pending = if Polyglot.is_language_installed "python" then Nothing else """
|
||||
Can't run Python tests, Python is not installed.
|
||||
Test.group "Polyglot Python" pending=pending <|
|
||||
Test.specify "should allow declaring module-level methods in Python" <|
|
||||
suite_builder.group "Polyglot Python" pending=pending group_builder->
|
||||
group_builder.specify "should allow declaring module-level methods in Python" <|
|
||||
my_method 1 2 . should_equal 3
|
||||
|
||||
Test.specify "should allow mutual calling of instance-level methods" <|
|
||||
group_builder.specify "should allow mutual calling of instance-level methods" <|
|
||||
My_Type.Value 3 4 . my_method_3 5 . should_equal 36
|
||||
|
||||
Test.specify "should expose methods and fields of Python objects" <|
|
||||
group_builder.specify "should expose methods and fields of Python objects" <|
|
||||
obj = make_object
|
||||
obj.x . should_equal 10
|
||||
obj.y . should_be_false
|
||||
obj.compare 5 . should_be_false
|
||||
obj.compare 11 . should_be_true
|
||||
|
||||
Test.specify "should expose array interfaces for Python arrays" <|
|
||||
group_builder.specify "should expose array interfaces for Python arrays" <|
|
||||
vec = Vector.from_polyglot_array make_array
|
||||
vec.map .x . should_equal [30, 10, 20]
|
||||
|
||||
@ -109,17 +109,17 @@ spec =
|
||||
sorted_2 . should_equal [10, 20, 30]
|
||||
|
||||
|
||||
Test.specify "should recognize Text as Python string" <|
|
||||
group_builder.specify "should recognize Text as Python string" <|
|
||||
py_is_str "Hello" . should_be_true
|
||||
py_is_str 10 . should_be_false
|
||||
py_is_str Nothing . should_be_false
|
||||
|
||||
|
||||
Test.specify "should correctly marshall strings" <|
|
||||
group_builder.specify "should correctly marshall strings" <|
|
||||
str = make_str "x" + " baz"
|
||||
str.should_equal "foo x bar baz"
|
||||
|
||||
Test.specify "should make Python strings type pattern-matchable" <|
|
||||
group_builder.specify "should make Python strings type pattern-matchable" <|
|
||||
str = make_str "x"
|
||||
t = case str of
|
||||
Text -> False
|
||||
@ -127,7 +127,7 @@ spec =
|
||||
_ -> False
|
||||
t.should_be_true
|
||||
|
||||
Test.specify "should make Python booleans type pattern-matchable" <|
|
||||
group_builder.specify "should make Python booleans type pattern-matchable" <|
|
||||
bool = make_true
|
||||
t = case bool of
|
||||
True -> True
|
||||
@ -147,7 +147,7 @@ spec =
|
||||
_ -> False
|
||||
c_2.should_be_true
|
||||
|
||||
Test.specify "should make Python lists type pattern-matchable as arrays" <|
|
||||
group_builder.specify "should make Python lists type pattern-matchable as arrays" <|
|
||||
arr = make_array
|
||||
r = case arr of
|
||||
Array -> False
|
||||
@ -155,7 +155,7 @@ spec =
|
||||
_ -> False
|
||||
r.should_be_true
|
||||
|
||||
Test.specify "should make Python numbers type pattern-matchable" <|
|
||||
group_builder.specify "should make Python numbers type pattern-matchable" <|
|
||||
int_match = case make_int of
|
||||
_ : Integer -> True
|
||||
_ -> False
|
||||
@ -173,43 +173,47 @@ spec =
|
||||
_ -> False
|
||||
num_double_match.should_be_true
|
||||
|
||||
Test.specify "should make Python number values equal to Enso ints" <|
|
||||
group_builder.specify "should make Python number values equal to Enso ints" <|
|
||||
py_10 = make_int
|
||||
py_10 . should_equal 10
|
||||
|
||||
Test.specify "should make Python number values equal to Enso doubles" <|
|
||||
group_builder.specify "should make Python number values equal to Enso doubles" <|
|
||||
py_d = make_double
|
||||
py_d . should_equal 10.5
|
||||
|
||||
Test.specify "should make Python None values equal to Nothing" <|
|
||||
group_builder.specify "should make Python None values equal to Nothing" <|
|
||||
py_null = make_null
|
||||
py_null . should_equal Nothing
|
||||
|
||||
Test.specify "should allow Enso to catch Python exceptions" <|
|
||||
group_builder.specify "should allow Enso to catch Python exceptions" <|
|
||||
value = My_Type.Value 1 2
|
||||
result = Panic.recover Any <| value.my_throw
|
||||
err = result.catch
|
||||
err.args.at 0 . should_equal 'Error!'
|
||||
err.to_text . should_equal "RuntimeError('Error!')"
|
||||
|
||||
Test.specify "should allow Python to catch Enso exceptions" <|
|
||||
group_builder.specify "should allow Python to catch Enso exceptions" <|
|
||||
value = My_Type.Value 7 2
|
||||
result = value.do_catch
|
||||
result . should_equal 7
|
||||
|
||||
Test.specify "should properly handle parse errors" <|
|
||||
group_builder.specify "should properly handle parse errors" <|
|
||||
err = Panic.recover Any does_not_parse . catch
|
||||
err.args.at 0 . should_contain 'invalid syntax'
|
||||
|
||||
Test.specify "should perform maths with mixed numbers" <|
|
||||
group_builder.specify "should perform maths with mixed numbers" <|
|
||||
py_num = make_int
|
||||
enso_num = 10
|
||||
(enso_num + py_num) . should_equal 20
|
||||
(py_num - enso_num) . should_equal 0
|
||||
|
||||
Test.specify "should propagate dataflow errors" <|
|
||||
group_builder.specify "should propagate dataflow errors" <|
|
||||
error = Error.throw 42
|
||||
my_method error 0 . should_fail_with Integer
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
from Standard.Base import all
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
|
||||
foreign js my_method a b = """
|
||||
return a + b;
|
||||
@ -57,24 +57,24 @@ foreign r make_false = """
|
||||
foreign r make_null = """
|
||||
NULL
|
||||
|
||||
spec =
|
||||
add_specs suite_builder =
|
||||
pending = if Polyglot.is_language_installed "R" then Nothing else """
|
||||
Can't run R tests, R is not installed.
|
||||
Test.group "Polyglot R" pending=pending <|
|
||||
Test.specify "should allow declaring module-level methods in R" <|
|
||||
suite_builder.group "Polyglot R" pending=pending group_builder->
|
||||
group_builder.specify "should allow declaring module-level methods in R" <|
|
||||
my_method 1 2 . should_equal 3
|
||||
|
||||
Test.specify "should allow mutual calling of instance-level methods" <|
|
||||
group_builder.specify "should allow mutual calling of instance-level methods" <|
|
||||
My_Type.Value 3 4 . my_method_3 5 . should_equal 36
|
||||
|
||||
Test.specify "should expose methods and fields of R objects" <|
|
||||
group_builder.specify "should expose methods and fields of R objects" <|
|
||||
obj = make_object
|
||||
obj.x . should_equal 10
|
||||
obj.y . should_be_false
|
||||
obj.compare 5 . should_be_false
|
||||
obj.compare 11 . should_be_true
|
||||
|
||||
Test.specify "should expose array interfaces for R arrays" <|
|
||||
group_builder.specify "should expose array interfaces for R arrays" <|
|
||||
vec = Vector.from_polyglot_array make_array
|
||||
vec.map .x . should_equal [30, 10, 20]
|
||||
|
||||
@ -88,11 +88,11 @@ spec =
|
||||
arr_2 . should_equal [30, 10, 20]
|
||||
sorted_2 . should_equal [10, 20, 30]
|
||||
|
||||
Test.specify "should correctly marshall strings" <|
|
||||
group_builder.specify "should correctly marshall strings" <|
|
||||
str = make_str "x" + " baz"
|
||||
str.should_equal "foo x bar baz"
|
||||
|
||||
Test.specify "should make R strings type pattern-matchable" <|
|
||||
group_builder.specify "should make R strings type pattern-matchable" <|
|
||||
str = make_str "x"
|
||||
t = case str of
|
||||
Text -> False
|
||||
@ -100,7 +100,7 @@ spec =
|
||||
_ -> False
|
||||
t.should_be_true
|
||||
|
||||
Test.specify "should make R booleans type pattern-matchable" <|
|
||||
group_builder.specify "should make R booleans type pattern-matchable" <|
|
||||
bool = make_true
|
||||
t = case bool of
|
||||
True -> True
|
||||
@ -120,7 +120,7 @@ spec =
|
||||
_ -> False
|
||||
c_2.should_be_true
|
||||
|
||||
Test.specify "should make R arrays type pattern-matchable as arrays" <|
|
||||
group_builder.specify "should make R arrays type pattern-matchable as arrays" <|
|
||||
arr = make_array
|
||||
r = case arr of
|
||||
Array -> False
|
||||
@ -128,7 +128,7 @@ spec =
|
||||
_ -> False
|
||||
r.should_be_true
|
||||
|
||||
Test.specify "should make R numbers type pattern-matchable" <|
|
||||
group_builder.specify "should make R numbers type pattern-matchable" <|
|
||||
int_match = case make_int of
|
||||
_ : Integer -> True
|
||||
_ -> False
|
||||
@ -146,11 +146,11 @@ spec =
|
||||
_ -> False
|
||||
num_double_match.should_be_true
|
||||
|
||||
Test.specify "should make R null objects equal to Nothing" <|
|
||||
group_builder.specify "should make R null objects equal to Nothing" <|
|
||||
r_null = make_null
|
||||
r_null . should_equal Nothing
|
||||
|
||||
Test.specify "should allow Enso to catch R exceptions" <|
|
||||
group_builder.specify "should allow Enso to catch R exceptions" <|
|
||||
value = My_Type.Value 1 2
|
||||
result = Panic.recover Any <| value.my_throw
|
||||
err = result.catch
|
||||
@ -158,24 +158,28 @@ spec =
|
||||
err.to_display_text.should_equal "Error: error in R code!"
|
||||
|
||||
pending="R does not support catching polyglot exceptions"
|
||||
Test.specify "should allow R to catch Enso exceptions" pending=pending <|
|
||||
group_builder.specify "should allow R to catch Enso exceptions" pending=pending <|
|
||||
value = My_Type.Value 7 2
|
||||
result = value.do_catch
|
||||
result . should_equal 7
|
||||
|
||||
Test.specify "should properly report parse errors" <|
|
||||
group_builder.specify "should properly report parse errors" <|
|
||||
err = Panic.recover Any does_not_parse . catch
|
||||
err.to_text.should_contain 'parse exception'
|
||||
err.to_display_text.should_contain 'parse exception'
|
||||
|
||||
Test.specify "should perform maths with mixed numbers" <|
|
||||
group_builder.specify "should perform maths with mixed numbers" <|
|
||||
r_num = make_int
|
||||
enso_num = 10
|
||||
(enso_num + r_num) . should_equal 20
|
||||
(r_num - enso_num) . should_equal 0
|
||||
|
||||
Test.specify "should propagate dataflow errors" <|
|
||||
group_builder.specify "should propagate dataflow errors" <|
|
||||
error = Error.throw 42
|
||||
my_method error 0 . should_fail_with Integer
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -5,8 +5,8 @@ import Standard.Base.Panic.Panic
|
||||
|
||||
from Standard.Base.Runtime.Context import Input, Output
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
|
||||
in_fn : Integer -> Integer
|
||||
in_fn a = Input.if_enabled (a * 2)
|
||||
@ -14,22 +14,22 @@ in_fn a = Input.if_enabled (a * 2)
|
||||
out_fn : Integer -> Integer
|
||||
out_fn a = Output.if_enabled (a + 1)
|
||||
|
||||
spec =
|
||||
Test.group "Inlining Helpers" <|
|
||||
Test.specify "should allow to run an action" <|
|
||||
add_specs suite_builder =
|
||||
suite_builder.group "Inlining Helpers" group_builder->
|
||||
group_builder.specify "should allow to run an action" <|
|
||||
x = Runtime.no_inline (2 + 3)
|
||||
x . should_equal 5
|
||||
Test.specify "should allow to call a function" <|
|
||||
group_builder.specify "should allow to call a function" <|
|
||||
x = Runtime.no_inline_with_arg (x -> x + 4) 3
|
||||
x . should_equal 7
|
||||
Test.group "Contexts and Execution Environment" <|
|
||||
Test.specify "should not prevent execution in the default live environment" <|
|
||||
suite_builder.group "Contexts and Execution Environment" group_builder->
|
||||
group_builder.specify "should not prevent execution in the default live environment" <|
|
||||
res = Panic.catch Any (in_fn 1) p-> p.payload.to_text
|
||||
res . should_equal 2
|
||||
Test.specify "should prevent execution with explicitly disabled context" <|
|
||||
group_builder.specify "should prevent execution with explicitly disabled context" <|
|
||||
res = Panic.catch Any (Runtime.with_disabled_context Input environment=Runtime.current_execution_environment (in_fn 1)) p-> p.payload.to_text
|
||||
res . should_equal "(Forbidden_Operation.Error 'Input')"
|
||||
Test.specify "should be configurable" <|
|
||||
group_builder.specify "should be configurable" <|
|
||||
r1 = Runtime.with_enabled_context Input environment=Runtime.current_execution_environment <|
|
||||
Runtime.with_enabled_context Output environment=Runtime.current_execution_environment <|
|
||||
in_fn (out_fn 10)
|
||||
@ -38,4 +38,8 @@ spec =
|
||||
r2 = Panic.catch Any (Runtime.with_disabled_context Input environment=Runtime.current_execution_environment <| in_fn (out_fn 10)) p-> p.payload.to_text
|
||||
r2 . should_equal "(Forbidden_Operation.Error 'Input')"
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
from Standard.Base import all
|
||||
from Standard.Test import Test
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
|
||||
type My_Type
|
||||
Cons_A x
|
||||
@ -23,18 +23,18 @@ type My_Type
|
||||
_ : Self -> "it's a Self"
|
||||
_ -> "it's a something else"
|
||||
|
||||
spec = Test.group "the Self construct" <|
|
||||
Test.specify "should allow calling statics and constructors from static methods" <|
|
||||
add_specs suite_builder = suite_builder.group "the Self construct" group_builder->
|
||||
group_builder.specify "should allow calling statics and constructors from static methods" <|
|
||||
My_Type.static_use.should_equal 128
|
||||
Test.specify "should allow calling statics and constructors from instance methods" <|
|
||||
group_builder.specify "should allow calling statics and constructors from instance methods" <|
|
||||
My_Type.Cons_A 10 . instance_use . should_equal 138
|
||||
Test.specify "should work in instance pattern matches" <|
|
||||
group_builder.specify "should work in instance pattern matches" <|
|
||||
My_Type.Cons_A 10 . matching_method . should_equal 12
|
||||
Test.specify "should work in type pattern matches" <|
|
||||
group_builder.specify "should work in type pattern matches" <|
|
||||
My_Type.static_match My_Type . should_equal "it matched"
|
||||
My_Type.static_match Boolean . should_equal "it didn't match"
|
||||
Test.specify "should work in by-type pattern matches" <|
|
||||
group_builder.specify "should work in by-type pattern matches" <|
|
||||
My_Type.match_by_type (My_Type.Cons_A 10) . should_equal "it's a Self"
|
||||
My_Type.match_by_type 123 . should_equal "it's a something else"
|
||||
Test.specify "should work in constructor defaulted arguments" <|
|
||||
group_builder.specify "should work in constructor defaulted arguments" <|
|
||||
My_Type.Cons_B . y . x . should_equal 10
|
||||
|
@ -7,8 +7,8 @@ polyglot java import java.lang.Long
|
||||
polyglot java import java.util.function.Function as Java_Function
|
||||
polyglot java import org.enso.base_test_helpers.CallbackHelper
|
||||
|
||||
from Standard.Test import Test, Test_Suite, Problems
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
|
||||
type My_Warning
|
||||
Value reason
|
||||
@ -95,14 +95,14 @@ do_fold_non_tail v =
|
||||
res = v.fold 0 (+)
|
||||
res
|
||||
|
||||
spec = Test.group "Dataflow Warnings" <|
|
||||
Test.specify "should allow to attach multiple warnings and read them back" <|
|
||||
add_specs suite_builder = suite_builder.group "Dataflow Warnings" group_builder->
|
||||
group_builder.specify "should allow to attach multiple warnings and read them back" <|
|
||||
x = 1233
|
||||
y = Warning.attach "don't do this" x
|
||||
z = Warning.attach "I'm serious" y
|
||||
Warning.get_all z . map .value . should_equal ["I'm serious", "don't do this"]
|
||||
|
||||
Test.specify "should thread warnings through constructor calls" <|
|
||||
group_builder.specify "should thread warnings through constructor calls" <|
|
||||
z = Warning.attach (My_Warning.Value "warn!!!") 3
|
||||
y = Warning.attach (My_Warning.Value "warn!!") 2
|
||||
x = Warning.attach (My_Warning.Value "warn!") 1
|
||||
@ -110,21 +110,21 @@ spec = Test.group "Dataflow Warnings" <|
|
||||
mtp.should_equal (My_Type.Value 1 2 3)
|
||||
Warning.get_all mtp . map .value . should_equal [My_Warning.Value "warn!", My_Warning.Value "warn!!", My_Warning.Value "warn!!!"]
|
||||
|
||||
Test.specify "should thread warnings through method calls" <|
|
||||
group_builder.specify "should thread warnings through method calls" <|
|
||||
mtp = My_Type.Value 1 2 3
|
||||
warned = Warning.attach "omgggg" mtp
|
||||
r = warned.my_method
|
||||
r.should_equal 6
|
||||
Warning.get_all r . map .value . should_equal ["omgggg"]
|
||||
|
||||
Test.specify "should thread warnings through polyglot calls" <|
|
||||
group_builder.specify "should thread warnings through polyglot calls" <|
|
||||
y = Warning.attach "warn!!" 2
|
||||
x = Warning.attach "warn!" 1
|
||||
r = Long.sum x y
|
||||
r.should_equal 3
|
||||
Warning.get_all r . map .value . should_equal ['warn!', 'warn!!']
|
||||
|
||||
Test.specify "should be passed correctly when combined with warnings added in branches" <|
|
||||
group_builder.specify "should be passed correctly when combined with warnings added in branches" <|
|
||||
one = Warning.attach "first" "1"
|
||||
|
||||
b = case one of
|
||||
@ -138,7 +138,7 @@ spec = Test.group "Dataflow Warnings" <|
|
||||
v . should_equal 0
|
||||
Warning.get_all v . map .value . should_equal ["a", "first"]
|
||||
|
||||
Test.specify "should thread warnings through case expressions" <|
|
||||
group_builder.specify "should thread warnings through case expressions" <|
|
||||
z = Warning.attach (My_Warning.Value "warn!!!") 3
|
||||
y = Warning.attach (My_Warning.Value "warn!!") 2
|
||||
x = Warning.attach (My_Warning.Value "warn!") 1
|
||||
@ -148,19 +148,19 @@ spec = Test.group "Dataflow Warnings" <|
|
||||
r.should_equal 6
|
||||
Warning.get_all r . map .value . should_equal [My_Warning.Value "warn!", My_Warning.Value "warn!!", My_Warning.Value "warn!!!"]
|
||||
|
||||
Test.specify "should thread warnings through conversions" <|
|
||||
group_builder.specify "should thread warnings through conversions" <|
|
||||
z = Wrap.Value (Warning.attach 'warn!' 1)
|
||||
i = Integer.from z
|
||||
Warning.get_all i . map .value . should_equal ['warn!']
|
||||
|
||||
Test.specify "should attach correct stacktraces" <|
|
||||
group_builder.specify "should attach correct stacktraces" <|
|
||||
current = Runtime.get_stack_trace
|
||||
warned = foo "value"
|
||||
warning_stack = Warning.get_all warned . first . origin
|
||||
relevant = warning_stack . drop (Last current.length)
|
||||
relevant.map .name . should_equal (['baz', 'bar', 'foo'].map ('Warnings_Spec.'+))
|
||||
|
||||
Test.specify "should attach reassignment info in the last-reassigned-first order" <|
|
||||
group_builder.specify "should attach reassignment info in the last-reassigned-first order" <|
|
||||
x = Warning.attach "warn!" 1
|
||||
r = reassign_test x
|
||||
warn = Warning.get_all r . first
|
||||
@ -175,7 +175,7 @@ spec = Test.group "Dataflow Warnings" <|
|
||||
+ [ 'Wrap.Value' ]
|
||||
reassignments.should_equal expected_stack.to_vector
|
||||
|
||||
Test.specify "should allow to set all warnings" <|
|
||||
group_builder.specify "should allow to set all warnings" <|
|
||||
warned = Warning.attach 1 <| Warning.attach 2 <| Warning.attach 3 <| Warning.attach 4 "foo"
|
||||
warnings = Warning.get_all warned
|
||||
filtered = warnings.filter x-> x.value % 2 == 0
|
||||
@ -183,7 +183,7 @@ spec = Test.group "Dataflow Warnings" <|
|
||||
rewarned.should_equal 'foo'
|
||||
Warning.get_all rewarned . map .value . should_contain_the_same_elements_as [2,4]
|
||||
|
||||
Test.specify "should allow checking for any warnings" <|
|
||||
group_builder.specify "should allow checking for any warnings" <|
|
||||
Warning.has_warnings "foo" . should_be_false
|
||||
"foo".has_warnings.should_be_false
|
||||
|
||||
@ -191,7 +191,7 @@ spec = Test.group "Dataflow Warnings" <|
|
||||
warned.has_warnings.should_be_true
|
||||
Warning.has_warnings warned . should_be_true
|
||||
|
||||
Test.specify "should allow to clear warnings" <|
|
||||
group_builder.specify "should allow to clear warnings" <|
|
||||
warned = Warning.attach 1 <| Warning.attach 2 <| Warning.attach 3 <| Warning.attach 4 "foo"
|
||||
cleared = Warning.clear warned
|
||||
cleared.should_equal 'foo'
|
||||
@ -201,7 +201,7 @@ spec = Test.group "Dataflow Warnings" <|
|
||||
clear_2.should_equal 'foo'
|
||||
Warning.get_all clear_2 . map .value . should_equal []
|
||||
|
||||
Test.specify "should allow to run a function suspending warnings attached to an argument and reattach them to the result" <|
|
||||
group_builder.specify "should allow to run a function suspending warnings attached to an argument and reattach them to the result" <|
|
||||
x = Warning.attach 1 <| Warning.attach 2 <| Warning.attach 3 <| Warning.attach 4 "foo"
|
||||
y = Warning.with_suspended x x->
|
||||
count = Warning.get_all x . length
|
||||
@ -209,13 +209,13 @@ spec = Test.group "Dataflow Warnings" <|
|
||||
y . should_equal 0
|
||||
Warning.get_all y . map .value . should_contain_the_same_elements_as [4, 3, 2, 1, "BAZ"]
|
||||
|
||||
Test.specify "should allow to map the warnings, selectively" <|
|
||||
group_builder.specify "should allow to map the warnings, selectively" <|
|
||||
warned = attach_four_warnings "foo"
|
||||
mapped = map_odd_warnings warned
|
||||
mapped . should_equal 'foo'
|
||||
Warning.get_all mapped . map .value . should_contain_the_same_elements_as [11, 2, 13, 4]
|
||||
|
||||
Test.specify "should allow to map warnings and errors, selectively" <|
|
||||
group_builder.specify "should allow to map warnings and errors, selectively" <|
|
||||
warned = attach_four_warnings "foo"
|
||||
mapped = map_odd_warnings_and_errors warned
|
||||
mapped . should_equal 'foo'
|
||||
@ -233,14 +233,14 @@ spec = Test.group "Dataflow Warnings" <|
|
||||
mapped_3.stack_trace.first.name . should_equal "Warnings_Spec.throw_a_bar"
|
||||
Warning.get_all mapped_3 . catch . should_equal "bar"
|
||||
|
||||
Test.specify "should allow to detach warnings, selectively" <|
|
||||
group_builder.specify "should allow to detach warnings, selectively" <|
|
||||
warned = attach_four_warnings "foo"
|
||||
detached_pair = Warning.detach_selected_warnings warned odd_warning_filter
|
||||
detached_pair . first . should_equal "foo"
|
||||
Warning.get_all (detached_pair . first) . map .value . should_equal [2,4]
|
||||
detached_pair.second . map .value . should_equal [1,3]
|
||||
|
||||
Test.specify "should allow to checking for warnings, by type" <|
|
||||
group_builder.specify "should allow to checking for warnings, by type" <|
|
||||
warned = Warning.attach 1 <| Warning.attach "Alpha" <| Warning.attach Nothing <| Warning.attach (Unimplemented.Error "An Error Here") "foo"
|
||||
|
||||
warned.has_warnings.should_be_true
|
||||
@ -251,7 +251,7 @@ spec = Test.group "Dataflow Warnings" <|
|
||||
warned.has_warnings warning_type=Nothing . should_be_true
|
||||
warned.has_warnings warning_type=Unimplemented . should_be_true
|
||||
|
||||
Test.specify "should allow to remove warnings, by type" <|
|
||||
group_builder.specify "should allow to remove warnings, by type" <|
|
||||
warned = Warning.attach 1 <| Warning.attach "Alpha" <| Warning.attach Nothing <| Warning.attach (Unimplemented.Error "An Error Here") "foo"
|
||||
|
||||
no_int = warned.remove_warnings warning_type=Integer . first
|
||||
@ -266,7 +266,7 @@ spec = Test.group "Dataflow Warnings" <|
|
||||
no_error = warned.remove_warnings Unimplemented
|
||||
Warning.get_all no_error . map .value . should_equal [1, "Alpha", Nothing]
|
||||
|
||||
Test.specify "should allow to throwing warnings, by type" <|
|
||||
group_builder.specify "should allow to throwing warnings, by type" <|
|
||||
warned = Warning.attach 1 <| Warning.attach "Alpha" <| Warning.attach Nothing <| Warning.attach (Unimplemented.Error "An Error Here") "foo"
|
||||
|
||||
warned.throw_on_warning . should_fail_with Integer
|
||||
@ -274,12 +274,12 @@ spec = Test.group "Dataflow Warnings" <|
|
||||
warned.throw_on_warning warning_type=Nothing . should_fail_with Nothing
|
||||
warned.throw_on_warning warning_type=Unimplemented . should_fail_with Unimplemented
|
||||
|
||||
Test.specify "should allow to map the warnings, selectively" <|
|
||||
group_builder.specify "should allow to map the warnings, selectively" <|
|
||||
x = Warning.attach "foo" 1
|
||||
result = x.is_static_nothing x
|
||||
result . should_equal False
|
||||
|
||||
Test.specify "should be allowed in Vector" <|
|
||||
group_builder.specify "should be allowed in Vector" <|
|
||||
a = Warning.attach "a" 1
|
||||
b = Warning.attach "b" 2
|
||||
c = [a, b]
|
||||
@ -288,7 +288,7 @@ spec = Test.group "Dataflow Warnings" <|
|
||||
Warning.get_all c wrap_errors=True . map .value . should_equal [(Map_Error.Error 1 'b'), (Map_Error.Error 0 'a')]
|
||||
Warning.get_all d wrap_errors=True . map .value . should_equal [(Map_Error.Error 1 'b'), (Map_Error.Error 0 'a'), 'd']
|
||||
|
||||
Test.specify "should be preserved after operations on Vector" <|
|
||||
group_builder.specify "should be preserved after operations on Vector" <|
|
||||
even x = (Warning.attach "warn"+x.to_text x) % 2
|
||||
res = [1,2,3,4].map even
|
||||
|
||||
@ -308,7 +308,7 @@ spec = Test.group "Dataflow Warnings" <|
|
||||
r.should_equal [0, 1, 20, 3, 40, 5, 60, 7, 80, 9]
|
||||
Warning.get_all r wrap_errors=True . map .value . should_contain_the_same_elements_as [Map_Error.Error 1 100, Map_Error.Error 3 300, Map_Error.Error 5 500, Map_Error.Error 7 700, Map_Error.Error 9 900]
|
||||
|
||||
Test.specify "should be preserved after operations on multi-dimensional Vector" <|
|
||||
group_builder.specify "should be preserved after operations on multi-dimensional Vector" pending="https://github.com/enso-org/enso/issues/8910" <|
|
||||
even x = (Warning.attach x x) % 2
|
||||
nested_range_even x =
|
||||
(0.up_to x).to_vector.map even
|
||||
@ -319,7 +319,7 @@ spec = Test.group "Dataflow Warnings" <|
|
||||
Warning.get_all res wrap_errors=True . map .value . should_equal [(Map_Error.Error 3 (Map_Error.Error 3 3)), (Map_Error.Error 3 (Map_Error.Error 2 2)), (Map_Error.Error 3 (Map_Error.Error 1 1)), (Map_Error.Error 3 (Map_Error.Error 0 0)), (Map_Error.Error 2 (Map_Error.Error 2 2)), (Map_Error.Error 2 (Map_Error.Error 1 1)), (Map_Error.Error 2 (Map_Error.Error 0 0)), (Map_Error.Error 1 (Map_Error.Error 1 1)), (Map_Error.Error 1 (Map_Error.Error 0 0)), (Map_Error.Error 0 (Map_Error.Error 0 0))]
|
||||
Warning.get_all (res.at 2) . map .value . should_equal [3, 2, 1, 0, 2, 1, 0, 1, 0, 0]
|
||||
|
||||
Test.specify "should be preserved over arbitrary dataflow dependencies between values introduced using `if_not_error`" <|
|
||||
group_builder.specify "should be preserved over arbitrary dataflow dependencies between values introduced using `if_not_error`" <|
|
||||
a = Warning.attach "a" 1
|
||||
b = Warning.attach "b" 2
|
||||
|
||||
@ -329,7 +329,7 @@ spec = Test.group "Dataflow Warnings" <|
|
||||
Warning.get_all a . map .value . should_contain_the_same_elements_as ["a"]
|
||||
Warning.get_all b . map .value . should_contain_the_same_elements_as ["b"]
|
||||
|
||||
Test.specify "should be preserved around polyglot calls" <|
|
||||
group_builder.specify "should be preserved around polyglot calls" <|
|
||||
x = Warning.attach "x" 1
|
||||
|
||||
java_id = Java_Function.identity
|
||||
@ -355,7 +355,7 @@ spec = Test.group "Dataflow Warnings" <|
|
||||
#h x = Warning.attach "h("+x.to_text+")" "{x="+x.to_text+"}"
|
||||
#i x = Warning.attach "i("+x.to_text+")" Nothing
|
||||
|
||||
Test.specify "should be better preserved around polyglot calls expecting a Value" <|
|
||||
group_builder.specify "should be better preserved around polyglot calls expecting a Value" <|
|
||||
x = Warning.attach "x" 1
|
||||
|
||||
f x = Warning.attach "f("+x.to_text+")" <| Pair.new "A" x+10
|
||||
@ -379,7 +379,7 @@ spec = Test.group "Dataflow Warnings" <|
|
||||
r4.should_equal Nothing
|
||||
Warning.get_all r4 . map .value . should_contain_the_same_elements_as ["i(1)", "x"]
|
||||
|
||||
Test.specify "should not affect method dispatch" <|
|
||||
group_builder.specify "should not affect method dispatch" <|
|
||||
a = My_Fancy_Collection.Value 42
|
||||
b = Warning.attach "WARN" <| My_Fancy_Collection.Value 23
|
||||
|
||||
@ -390,7 +390,7 @@ spec = Test.group "Dataflow Warnings" <|
|
||||
a.remove_warnings . should_equal 42
|
||||
b.remove_warnings . should_equal 42
|
||||
|
||||
Test.specify "should not automatically propagate from suspended arguments" <|
|
||||
group_builder.specify "should not automatically propagate from suspended arguments" <|
|
||||
x1 = 33
|
||||
x2 = Warning.attach "WARN" 44
|
||||
x3 = Error.throw (Illegal_State.Error "ERR")
|
||||
@ -407,7 +407,7 @@ spec = Test.group "Dataflow Warnings" <|
|
||||
Problems.assume_no_problems <| condition_1 False x3
|
||||
Problems.assume_no_problems <| condition_2 False x3
|
||||
|
||||
Test.specify "should only report unique warnings" <|
|
||||
group_builder.specify "should only report unique warnings" <|
|
||||
a = 1
|
||||
b = Warning.attach "Foo!" a
|
||||
c = Warning.attach "Bar!" b
|
||||
@ -425,7 +425,7 @@ spec = Test.group "Dataflow Warnings" <|
|
||||
result_4 = f a 1 + f a 2 + f a 3
|
||||
Warning.get_all result_4 . map (x-> x.value.to_text) . should_equal ["Baz!", "Baz!", "Baz!"]
|
||||
|
||||
Test.specify "should only report the first 100 unique warnings" <|
|
||||
group_builder.specify "should only report the first 100 unique warnings" <|
|
||||
vec = (0.up_to 500).map(e -> Warning.attach "Foo!" e)
|
||||
vec_plus_1 = vec.map(e -> e+1)
|
||||
Warning.get_all vec_plus_1 . length . should_equal 100
|
||||
@ -436,7 +436,7 @@ spec = Test.group "Dataflow Warnings" <|
|
||||
Warning.get_all vec_2 . length . should_equal 31
|
||||
Warning.limit_reached vec_2 . should_equal False
|
||||
|
||||
Test.specify "should preserve warnings in tail calls" <|
|
||||
group_builder.specify "should preserve warnings in tail calls" <|
|
||||
v = Warning.attach "Foo" [1, 2, 3]
|
||||
|
||||
result_tail = do_fold_tail v
|
||||
@ -447,7 +447,7 @@ spec = Test.group "Dataflow Warnings" <|
|
||||
result_non_tail . should_equal 6
|
||||
Warning.get_all result_non_tail . map .value . should_equal ["Foo"]
|
||||
|
||||
Test.specify "should not break TCO when warnings are attached to arguments" <|
|
||||
group_builder.specify "should not break TCO when warnings are attached to arguments" <|
|
||||
vec = Vector.new 10000 (i-> i+1)
|
||||
elem1 = Warning.attach "WARNING1" 998
|
||||
vec.contains 998 . should_equal True
|
||||
@ -463,4 +463,8 @@ spec = Test.group "Dataflow Warnings" <|
|
||||
res2 . should_equal True
|
||||
Warning.get_all res2 . map .value . should_equal ["WARNING2"]
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -1,11 +1,12 @@
|
||||
from Standard.Base import all
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
import Standard.Test.Test_Environment
|
||||
from Standard.Test_New import all
|
||||
import Standard.Test_New.Test_Environment
|
||||
|
||||
spec = Test.group "Environment" <|
|
||||
Test.specify "should allow to internally override environment variables for testing purposes" <|
|
||||
|
||||
|
||||
add_specs suite_builder = suite_builder.group "Environment" group_builder->
|
||||
group_builder.specify "should allow to internally override environment variables for testing purposes" <|
|
||||
old = Environment.get "foobar"
|
||||
|
||||
result_0 = Test_Environment.unsafe_with_environment_override "foobar" "value1" 23
|
||||
@ -32,4 +33,8 @@ spec = Test.group "Environment" <|
|
||||
x
|
||||
result_3 . should_equal ["2", "3", "4"]
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -2,53 +2,53 @@ from Standard.Base import all
|
||||
import Standard.Base.Errors.Encoding_Error.Encoding_Error
|
||||
import Standard.Base.Errors.File_Error.File_Error
|
||||
|
||||
from Standard.Test import Test, Test_Suite, Problems
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
spec =
|
||||
|
||||
add_specs suite_builder =
|
||||
sample_xxx = enso_project.data / "sample.xxx"
|
||||
sample_txt = enso_project.data / "helloworld.txt"
|
||||
windows_log = enso_project.data / "windows.log"
|
||||
|
||||
Test.group "Auto_Detect" <|
|
||||
Test.specify "should raise an error when reading an unknown file" <|
|
||||
suite_builder.group "Auto_Detect" group_builder->
|
||||
group_builder.specify "should raise an error when reading an unknown file" <|
|
||||
bytes = sample_xxx.read
|
||||
bytes.should_fail_with File_Error
|
||||
bytes.catch.should_be_a File_Error.Unsupported_Type
|
||||
|
||||
Test.specify "should be able to read a text file" <|
|
||||
group_builder.specify "should be able to read a text file" <|
|
||||
content = sample_txt.read
|
||||
content.should_equal "Hello World!"
|
||||
|
||||
Test.specify "should raise a not-found error when reading a nonexistent file even of unknown format" <|
|
||||
group_builder.specify "should raise a not-found error when reading a nonexistent file even of unknown format" <|
|
||||
r1 = (File.new "nonexistent.file.of.weird-format").read
|
||||
r1.should_fail_with File_Error
|
||||
r1.catch.should_be_a File_Error.Not_Found
|
||||
|
||||
Test.group "Bytes" <|
|
||||
Test.specify "should be able to read a file as Bytes" <|
|
||||
suite_builder.group "Bytes" group_builder->
|
||||
group_builder.specify "should be able to read a file as Bytes" <|
|
||||
bytes = sample_xxx.read Bytes
|
||||
bytes.should_equal [72, 101, 108, 108, 111, 32, 87, 111, 114, 108, 100, 33]
|
||||
|
||||
Test.specify "should be able to read a file as Bytes by method" <|
|
||||
group_builder.specify "should be able to read a file as Bytes by method" <|
|
||||
bytes = Data.read (sample_xxx) Bytes
|
||||
bytes.should_equal [72, 101, 108, 108, 111, 32, 87, 111, 114, 108, 100, 33]
|
||||
|
||||
Test.specify "should be able to read a file as Bytes by path" <|
|
||||
group_builder.specify "should be able to read a file as Bytes by path" <|
|
||||
path = sample_xxx.path
|
||||
bytes = Data.read path Bytes
|
||||
bytes.should_equal [72, 101, 108, 108, 111, 32, 87, 111, 114, 108, 100, 33]
|
||||
|
||||
Test.group "Plain_Text" <|
|
||||
Test.specify "should be able to read a file as Text" <|
|
||||
suite_builder.group "Plain_Text" group_builder->
|
||||
group_builder.specify "should be able to read a file as Text" <|
|
||||
text = sample_xxx.read Plain_Text
|
||||
text.should_equal "Hello World!"
|
||||
|
||||
Test.specify "should be able to read a file as Text with Encoding" <|
|
||||
group_builder.specify "should be able to read a file as Text with Encoding" <|
|
||||
text = windows_log.read (Plain_Text Encoding.windows_1252)
|
||||
text.should_equal "Hello World! $¢¤¥"
|
||||
|
||||
Test.specify "should raise a warning when invalid encoding in a Text file" <|
|
||||
group_builder.specify "should raise a warning when invalid encoding in a Text file" <|
|
||||
action = windows_log.read (Plain_Text Encoding.ascii) on_problems=_
|
||||
tester result = result . should_equal 'Hello World! $\uFFFD\uFFFD\uFFFD'
|
||||
problems = [Encoding_Error.Error "Encoding issues at 14, 15, 16."]
|
||||
@ -58,8 +58,8 @@ spec =
|
||||
Problems.expect_only_warning Encoding_Error <|
|
||||
windows_log.read (Plain_Text Encoding.ascii)
|
||||
|
||||
Test.group "JSON_Format" <|
|
||||
Test.specify "should be able to read a file as Json" <|
|
||||
suite_builder.group "JSON_Format" group_builder->
|
||||
group_builder.specify "should be able to read a file as Json" <|
|
||||
f1 = enso_project.data / "sample.json"
|
||||
j1 = f1.read
|
||||
j1.at "arr" . should_equal [1, 2, 3]
|
||||
@ -73,10 +73,14 @@ spec =
|
||||
j2.at 2 . should_equal Nothing
|
||||
j2.at 3 . should_equal "baz"
|
||||
|
||||
Test.specify "should gracefully handle malformed files" <|
|
||||
group_builder.specify "should gracefully handle malformed files" <|
|
||||
f1 = enso_project.data / "sample-malformed.json"
|
||||
r1 = f1.read
|
||||
r1.should_fail_with File_Error
|
||||
r1.catch.should_be_a File_Error.Corrupted_Format
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -9,28 +9,28 @@ import Standard.Base.Runtime.Context
|
||||
|
||||
polyglot java import org.enso.base_test_helpers.FileSystemHelper
|
||||
|
||||
from Standard.Test import Test, Test_Suite, Problems
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
|
||||
set_writable file writable =
|
||||
path = file.absolute.path
|
||||
FileSystemHelper.setWritable path writable
|
||||
|
||||
spec =
|
||||
add_specs suite_builder =
|
||||
sample_file = enso_project.data / "sample.txt"
|
||||
windows_file = enso_project.data / "windows.txt"
|
||||
non_existent_file = File.new "does_not_exist.txt"
|
||||
|
||||
Test.group "File Operations" <|
|
||||
Test.specify "should allow creating a new file" <|
|
||||
suite_builder.group "File Operations" group_builder->
|
||||
group_builder.specify "should allow creating a new file" <|
|
||||
path = sample_file.path
|
||||
File.new path
|
||||
|
||||
Test.specify "should have `new` be a no-op on a file" <|
|
||||
group_builder.specify "should have `new` be a no-op on a file" <|
|
||||
file = File.new sample_file
|
||||
file . should_equal sample_file
|
||||
|
||||
Test.specify "should allow joining sections" <|
|
||||
group_builder.specify "should allow joining sections" <|
|
||||
f_1 = File.new "foo/bar"
|
||||
(File.new "foo" / "bar") . normalize . should_equal f_1
|
||||
File.new "foo" . join "bar" . should_equal f_1
|
||||
@ -39,11 +39,11 @@ spec =
|
||||
File.new "foo" . join "a" . join "b" . join "c" . join "d" . join "e" . should_equal f_2
|
||||
File.new "foo" . join ["a", "b", "c", "d", "e"] . should_equal f_2
|
||||
|
||||
Test.specify "should check if file exists" <|
|
||||
group_builder.specify "should check if file exists" <|
|
||||
non_existent_file.exists.should_be_false
|
||||
sample_file.exists.should_be_true
|
||||
|
||||
Test.specify "should get file size" <|
|
||||
group_builder.specify "should get file size" <|
|
||||
file_size = sample_file.size
|
||||
## Allows for having line endings replaced.
|
||||
valid_size = file_size==403 || file_size==406
|
||||
@ -57,35 +57,35 @@ spec =
|
||||
folder.should_fail_with File_Error
|
||||
folder.catch.should_be_a File_Error.IO_Error
|
||||
|
||||
Test.specify "should get file name" <|
|
||||
group_builder.specify "should get file name" <|
|
||||
sample_file.name.should_equal "sample.txt"
|
||||
|
||||
Test.specify "should get file extension" <|
|
||||
group_builder.specify "should get file extension" <|
|
||||
sample_file.extension.should_equal ".txt"
|
||||
(File.new "this.is.a.silly.name.txt").extension.should_equal ".txt"
|
||||
(File.new "no_extension").extension.should_equal ""
|
||||
(File.new "file.").extension.should_equal ""
|
||||
|
||||
Test.specify "should convert a file to absolute" <|
|
||||
group_builder.specify "should convert a file to absolute" <|
|
||||
abs = File.new "foo.txt" . absolute
|
||||
through_cwd = (File.current_directory / "foo.txt")
|
||||
abs.should_equal through_cwd
|
||||
|
||||
Test.specify "should normalize file" <|
|
||||
group_builder.specify "should normalize file" <|
|
||||
f_1 = File.new "foo"
|
||||
File.new "bar/../baz/../foo" . normalize . should_equal f_1
|
||||
(File.new "bar" / ".." / "baz" / ".." / "foo") . normalize . should_equal f_1
|
||||
File.new "bar" . join ["..", "baz", "..", "foo"] . should_equal f_1
|
||||
|
||||
Test.specify "should handle `==` on files" <|
|
||||
group_builder.specify "should handle `==` on files" <|
|
||||
(File.new "foo").should_equal (File.new "foo")
|
||||
(File.new "bar").should_not_equal (File.new "foo")
|
||||
|
||||
Test.specify "should allow checking in parent path with `starts_with`" <|
|
||||
group_builder.specify "should allow checking in parent path with `starts_with`" <|
|
||||
(File.new "foo/bar").starts_with (File.new "foo") . should_be_true
|
||||
(File.new "foo/bar").starts_with (File.new "f") . should_be_false
|
||||
|
||||
Test.specify "should allow creating a directory" <|
|
||||
group_builder.specify "should allow creating a directory" <|
|
||||
f = enso_project.data / "good_dir"
|
||||
f.delete_if_exists
|
||||
f.exists.should_be_false
|
||||
@ -104,7 +104,7 @@ spec =
|
||||
g.parent.delete_if_exists
|
||||
f.delete_if_exists
|
||||
|
||||
Test.specify "should only allow creating a directory if Output is enabled" <|
|
||||
group_builder.specify "should only allow creating a directory if Output is enabled" <|
|
||||
f = enso_project.data / "bad_dir"
|
||||
f.delete_if_exists
|
||||
f.exists.should_be_false
|
||||
@ -113,7 +113,7 @@ spec =
|
||||
f.create_directory . should_fail_with Forbidden_Operation
|
||||
f.exists.should_be_false
|
||||
|
||||
Test.specify "should allow reading a file byte by byte" <|
|
||||
group_builder.specify "should allow reading a file byte by byte" <|
|
||||
f = enso_project.data / "short.txt"
|
||||
f.delete_if_exists
|
||||
f.exists.should_be_false
|
||||
@ -126,7 +126,7 @@ spec =
|
||||
f.delete
|
||||
f.exists.should_be_false
|
||||
|
||||
Test.specify "should only allow with_output_stream if Output is enabled" <|
|
||||
group_builder.specify "should only allow with_output_stream if Output is enabled" <|
|
||||
f = enso_project.data / "short.txt"
|
||||
f.delete_if_exists
|
||||
|
||||
@ -145,7 +145,7 @@ spec =
|
||||
|
||||
f.delete_if_exists
|
||||
|
||||
Test.specify "should only allow deleting a file if Output is enabled" <|
|
||||
group_builder.specify "should only allow deleting a file if Output is enabled" <|
|
||||
f = enso_project.data / "short.txt"
|
||||
f.delete_if_exists
|
||||
"Cup".write f on_existing_file=Existing_File_Behavior.Overwrite
|
||||
@ -157,7 +157,7 @@ spec =
|
||||
f.delete.should_succeed
|
||||
f.exists.should_be_false
|
||||
|
||||
Test.specify "should only allow copying a file if Output is enabled" <|
|
||||
group_builder.specify "should only allow copying a file if Output is enabled" <|
|
||||
f = enso_project.data / "short.txt"
|
||||
f.delete_if_exists
|
||||
"Cup".write f on_existing_file=Existing_File_Behavior.Overwrite
|
||||
@ -189,7 +189,7 @@ spec =
|
||||
f.delete_if_exists
|
||||
g.delete_if_exists
|
||||
|
||||
Test.specify "should only allow moving a file if Output is enabled" <|
|
||||
group_builder.specify "should only allow moving a file if Output is enabled" <|
|
||||
f = enso_project.data / "short.txt"
|
||||
f.delete_if_exists
|
||||
"Cup".write f on_existing_file=Existing_File_Behavior.Overwrite
|
||||
@ -223,14 +223,14 @@ spec =
|
||||
f.delete_if_exists
|
||||
g.delete_if_exists
|
||||
|
||||
Test.specify "should handle exceptions when deleting a missing file" <|
|
||||
group_builder.specify "should handle exceptions when deleting a missing file" <|
|
||||
file = File.new "does_not_exist.txt"
|
||||
result = file.delete
|
||||
result.should_fail_with File_Error
|
||||
result.catch.should_be_a File_Error.Not_Found
|
||||
|
||||
if Platform.is_unix then
|
||||
Test.specify "should allow to check file permissions" <|
|
||||
group_builder.specify "should allow to check file permissions" <|
|
||||
f = enso_project.data / "transient" / "permissions.txt"
|
||||
f.delete_if_exists
|
||||
"foobar".write f
|
||||
@ -247,7 +247,7 @@ spec =
|
||||
|
||||
f.delete
|
||||
|
||||
Test.specify "should gracefully handle permission errors" <|
|
||||
group_builder.specify "should gracefully handle permission errors" <|
|
||||
f = enso_project.data / "transient" / "permissions2.txt"
|
||||
f.delete_if_exists
|
||||
"foobar".write f
|
||||
@ -265,17 +265,17 @@ spec =
|
||||
Process.run "chmod" ["0744", f.absolute.path] . exit_code . should_equal Exit_Code.Success
|
||||
f.delete
|
||||
|
||||
Test.group "read_bytes" <|
|
||||
Test.specify "should allow reading a file to byte vector" <|
|
||||
suite_builder.group "read_bytes" group_builder->
|
||||
group_builder.specify "should allow reading a file to byte vector" <|
|
||||
contents = sample_file.read_bytes
|
||||
contents.take (First 6) . should_equal [67, 117, 112, 99, 97, 107]
|
||||
|
||||
Test.specify "should allow reading a file to byte vector via path" <|
|
||||
group_builder.specify "should allow reading a file to byte vector via path" <|
|
||||
full_path = sample_file . path
|
||||
contents = File.new full_path . read_bytes
|
||||
contents.take (First 6) . should_equal [67, 117, 112, 99, 97, 107]
|
||||
|
||||
Test.specify "should allow to read last n bytes from a file" <|
|
||||
group_builder.specify "should allow to read last n bytes from a file" <|
|
||||
file = enso_project.data / "transient" / "bytes.txt"
|
||||
data = [1, 0, 0, 1, 2, 100, 20]
|
||||
Context.Output.with_enabled <|
|
||||
@ -287,14 +287,14 @@ spec =
|
||||
file.read_last_bytes 1000 . should_equal data
|
||||
file.delete
|
||||
|
||||
Test.specify "should handle exceptions when reading a non-existent file" <|
|
||||
group_builder.specify "should handle exceptions when reading a non-existent file" <|
|
||||
file = File.new "does_not_exist.txt"
|
||||
file.read_bytes . should_fail_with File_Error
|
||||
file.read_bytes.catch.should_be_a File_Error.Not_Found
|
||||
non_existent_file.read_bytes . should_fail_with File_Error
|
||||
non_existent_file.read_bytes.catch.should_be_a File_Error.Not_Found
|
||||
|
||||
Test.specify "should open and read the file in one shot" <|
|
||||
group_builder.specify "should open and read the file in one shot" <|
|
||||
path_name = sample_file.path
|
||||
contents = File.new path_name . read_bytes
|
||||
contents.take (First 6) . should_equal [67, 117, 112, 99, 97, 107]
|
||||
@ -302,25 +302,25 @@ spec =
|
||||
contents_2 = File.new file . read_bytes
|
||||
contents_2.take (First 6) . should_equal [67, 117, 112, 99, 97, 107]
|
||||
|
||||
Test.specify "no parent of dot implies nothing" <|
|
||||
group_builder.specify "no parent of dot implies nothing" <|
|
||||
no_parent = (File.new '.').parent
|
||||
no_parent.is_nothing . should_be_true
|
||||
|
||||
Test.group "read_text" <|
|
||||
Test.specify "should allow reading a UTF-8 file" <|
|
||||
suite_builder.group "read_text" group_builder->
|
||||
group_builder.specify "should allow reading a UTF-8 file" <|
|
||||
contents = sample_file.read_text
|
||||
contents.should_start_with "Cupcake ipsum dolor sit amet."
|
||||
|
||||
Test.specify "should allow reading a UTF-8 file via path" <|
|
||||
group_builder.specify "should allow reading a UTF-8 file via path" <|
|
||||
full_path = sample_file . path
|
||||
contents = Data.read_text full_path
|
||||
contents.should_start_with "Cupcake ipsum dolor sit amet."
|
||||
|
||||
Test.specify "should allow reading a Windows file via path" <|
|
||||
group_builder.specify "should allow reading a Windows file via path" <|
|
||||
contents = windows_file.read_text Encoding.windows_1252
|
||||
contents.should_equal "Hello World! $¢¤¥"
|
||||
|
||||
Test.specify "should raise warnings when reading invalid characters" <|
|
||||
group_builder.specify "should raise warnings when reading invalid characters" <|
|
||||
action = windows_file.read_text Encoding.ascii on_problems=_
|
||||
tester result = result.should_equal 'Hello World! $\uFFFD\uFFFD\uFFFD'
|
||||
problems = [Encoding_Error.Error "Encoding issues at 14, 15, 16."]
|
||||
@ -330,7 +330,7 @@ spec =
|
||||
Problems.expect_only_warning Encoding_Error <|
|
||||
windows_file.read_text Encoding.ascii
|
||||
|
||||
Test.specify "should handle exceptions when reading a non-existent file" <|
|
||||
group_builder.specify "should handle exceptions when reading a non-existent file" <|
|
||||
file = File.new "does_not_exist.txt"
|
||||
r1 = Data.read_text "does_not_exist.txt"
|
||||
r1.should_fail_with File_Error
|
||||
@ -342,7 +342,7 @@ spec =
|
||||
r3.should_fail_with File_Error
|
||||
r3.catch.should_be_a File_Error.Not_Found
|
||||
|
||||
Test.specify "should open and read the file in one shot" <|
|
||||
group_builder.specify "should open and read the file in one shot" <|
|
||||
path_name = sample_file.path
|
||||
contents = Data.read_text path_name
|
||||
contents.should_start_with "Cupcake ipsum dolor sit amet."
|
||||
@ -350,12 +350,12 @@ spec =
|
||||
contents_2 = Data.read_text file
|
||||
contents_2.should_start_with "Cupcake ipsum dolor sit amet."
|
||||
|
||||
Test.group "write operations" <|
|
||||
suite_builder.group "write operations" group_builder->
|
||||
data = [32, 127, -128, 0]
|
||||
data_2 = [10, 15, 20, 30]
|
||||
|
||||
transient = enso_project.data / "transient"
|
||||
Test.specify "should allow to writing bytes to a new file and return that file's descriptor on success" <|
|
||||
group_builder.specify "should allow to writing bytes to a new file and return that file's descriptor on success" <|
|
||||
f = transient / "new_file.dat"
|
||||
f.delete_if_exists
|
||||
f.exists.should_be_false
|
||||
@ -366,7 +366,7 @@ spec =
|
||||
f.read_bytes.should_equal data
|
||||
f.delete_if_exists
|
||||
|
||||
Test.specify "should backup a file when overwriting with new bytes" <|
|
||||
group_builder.specify "should backup a file when overwriting with new bytes" <|
|
||||
f = transient / "work.txt"
|
||||
f.delete_if_exists
|
||||
f_bak = transient / "work.txt.bak"
|
||||
@ -379,7 +379,7 @@ spec =
|
||||
f.delete_if_exists
|
||||
f_bak.delete_if_exists
|
||||
|
||||
Test.specify "should allow overwriting a file with new bytes" <|
|
||||
group_builder.specify "should allow overwriting a file with new bytes" <|
|
||||
f = transient / "work.txt"
|
||||
f.delete_if_exists
|
||||
f_bak = transient / "work.txt.bak"
|
||||
@ -393,7 +393,7 @@ spec =
|
||||
f_bak.exists.should_be_false
|
||||
f.delete_if_exists
|
||||
|
||||
Test.specify "should allow appending bytes to a new file" <|
|
||||
group_builder.specify "should allow appending bytes to a new file" <|
|
||||
f = transient / "new_file.dat"
|
||||
f.delete_if_exists
|
||||
f.exists.should_be_false
|
||||
@ -404,14 +404,14 @@ spec =
|
||||
f.read_bytes.should_equal (data + data_2)
|
||||
f.delete_if_exists
|
||||
|
||||
Test.specify "should fail with Illegal_Argument when trying to write invalid byte vector" <|
|
||||
group_builder.specify "should fail with Illegal_Argument when trying to write invalid byte vector" <|
|
||||
f = transient / "work.txt"
|
||||
f.delete_if_exists
|
||||
f.exists.should_be_false
|
||||
[0, 1, 256].write_bytes f . should_fail_with Illegal_Argument
|
||||
[0, 1, Nothing].write_bytes f . should_fail_with Illegal_Argument
|
||||
|
||||
Test.specify "should not change the file when trying to write an invalid byte vector" <|
|
||||
group_builder.specify "should not change the file when trying to write an invalid byte vector" <|
|
||||
f = transient / "work.txt"
|
||||
f.delete_if_exists
|
||||
f_bak = transient / "work.txt.bak"
|
||||
@ -426,7 +426,7 @@ spec =
|
||||
f.read_bytes.should_equal data
|
||||
f.delete_if_exists
|
||||
|
||||
Test.specify "should allow writing text to a new file and return this file's descriptor on success" <|
|
||||
group_builder.specify "should allow writing text to a new file and return this file's descriptor on success" <|
|
||||
f = transient / "work.txt"
|
||||
f.delete_if_exists
|
||||
f.exists.should_be_false
|
||||
@ -438,7 +438,7 @@ spec =
|
||||
f.delete
|
||||
f.exists.should_be_false
|
||||
|
||||
Test.specify "should perform a dry run writing text to a new file if Context.Output is disabled" <|
|
||||
group_builder.specify "should perform a dry run writing text to a new file if Context.Output is disabled" <|
|
||||
f = transient / "work.txt"
|
||||
f.delete_if_exists
|
||||
f.exists.should_be_false
|
||||
@ -452,7 +452,7 @@ spec =
|
||||
|
||||
r.delete_if_exists
|
||||
|
||||
Test.specify "should allow appending text to a file" <|
|
||||
group_builder.specify "should allow appending text to a file" <|
|
||||
f = transient / "work.txt"
|
||||
f.delete_if_exists
|
||||
"line 1!".write f on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed . should_equal f
|
||||
@ -461,7 +461,7 @@ spec =
|
||||
f.delete
|
||||
f.exists.should_be_false
|
||||
|
||||
Test.specify "should perform a dry run appending text to a file if Context.Output is disabled" <|
|
||||
group_builder.specify "should perform a dry run appending text to a file if Context.Output is disabled" <|
|
||||
f = transient / "work.txt"
|
||||
f.delete_if_exists
|
||||
"line 1!".write f on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed . should_equal f
|
||||
@ -475,7 +475,7 @@ spec =
|
||||
|
||||
Context.Output.with_enabled <| r.delete_if_exists
|
||||
|
||||
Test.specify "if Context.Output is disabled, will always start from the file given - so the effects of previous dry run are not visible" <|
|
||||
group_builder.specify "if Context.Output is disabled, will always start from the file given - so the effects of previous dry run are not visible" <|
|
||||
f = transient / "dry_append.txt"
|
||||
f.delete_if_exists
|
||||
|
||||
@ -498,7 +498,7 @@ spec =
|
||||
|
||||
Context.Output.with_enabled r.delete_if_exists
|
||||
|
||||
Test.specify "if Context.Output is disabled, will append to the dry run file if the dry run file descriptor is passed as the write target" <|
|
||||
group_builder.specify "if Context.Output is disabled, will append to the dry run file if the dry run file descriptor is passed as the write target" <|
|
||||
f = transient / "dry_append.txt"
|
||||
f.delete_if_exists
|
||||
|
||||
@ -521,7 +521,7 @@ spec =
|
||||
|
||||
Context.Output.with_enabled dry_run_file.delete_if_exists
|
||||
|
||||
Test.specify "should allow to overwrite files" <|
|
||||
group_builder.specify "should allow to overwrite files" <|
|
||||
f = transient / "work.txt"
|
||||
f.delete_if_exists
|
||||
f.exists.should_be_false
|
||||
@ -533,7 +533,7 @@ spec =
|
||||
f.delete
|
||||
f.exists.should_be_false
|
||||
|
||||
Test.specify "should not overwrite original file if Context.Output is disabled" <|
|
||||
group_builder.specify "should not overwrite original file if Context.Output is disabled" <|
|
||||
f = transient / "work.txt"
|
||||
f.delete_if_exists
|
||||
f.exists.should_be_false
|
||||
@ -549,7 +549,7 @@ spec =
|
||||
f.read_text.should_equal "line 1!"
|
||||
f.delete_if_exists
|
||||
|
||||
Test.specify "should fail if a file already exists, depending on the settings" <|
|
||||
group_builder.specify "should fail if a file already exists, depending on the settings" <|
|
||||
f = transient / "work.txt"
|
||||
f.delete_if_exists
|
||||
f.exists.should_be_false
|
||||
@ -571,7 +571,7 @@ spec =
|
||||
f.delete
|
||||
f.exists.should_be_false
|
||||
|
||||
Test.specify "should create a backup when writing a file" <|
|
||||
group_builder.specify "should create a backup when writing a file" <|
|
||||
f = transient / "work.txt"
|
||||
f.delete_if_exists
|
||||
f.exists.should_be_false
|
||||
@ -602,7 +602,7 @@ spec =
|
||||
n.read_text . should_equal "new content"
|
||||
[f, bak, n0, n1, n2, n4].each .delete
|
||||
|
||||
Test.specify "should not create a backup when writing a dry run file with Context.Output disabled" <|
|
||||
group_builder.specify "should not create a backup when writing a dry run file with Context.Output disabled" <|
|
||||
f = transient / "work.txt"
|
||||
f.delete_if_exists
|
||||
f.exists.should_be_false
|
||||
@ -627,7 +627,7 @@ spec =
|
||||
bak.delete_if_exists
|
||||
r.delete_if_exists
|
||||
|
||||
Test.specify "should correctly handle failure of the write operation when working with the backup" <|
|
||||
group_builder.specify "should correctly handle failure of the write operation when working with the backup" <|
|
||||
f = transient / "work.txt"
|
||||
"OLD".write f on_existing_file=Existing_File_Behavior.Overwrite
|
||||
bak_file = transient / "work.txt.bak"
|
||||
@ -691,7 +691,7 @@ spec =
|
||||
Test.fail "The temporary file should have been cleaned up."
|
||||
f.delete
|
||||
|
||||
Test.specify "should respect file access permissions and fail with an error if denied, in all modes" <|
|
||||
group_builder.specify "should respect file access permissions and fail with an error if denied, in all modes" <|
|
||||
f = transient / "permissions3.txt"
|
||||
if f.exists then set_writable f True
|
||||
f.delete_if_exists
|
||||
@ -722,7 +722,7 @@ spec =
|
||||
set_writable f True
|
||||
f.delete
|
||||
|
||||
Test.specify "should fail if the parent directory does not exist" <|
|
||||
group_builder.specify "should fail if the parent directory does not exist" <|
|
||||
parent = transient / "nonexistent"
|
||||
parent.exists.should_be_false
|
||||
|
||||
@ -737,7 +737,7 @@ spec =
|
||||
r2.should_fail_with File_Error
|
||||
r2.catch.should_be_a File_Error.Not_Found
|
||||
|
||||
Test.specify "Should handle parent-less file paths" <|
|
||||
group_builder.specify "Should handle parent-less file paths" <|
|
||||
transient = enso_project.data / "transient"
|
||||
f1 = transient / "./test1.txt"
|
||||
f2_filename = "parentlesstest" + Date_Time.now.to_unix_epoch_milliseconds.to_text + ".txt"
|
||||
@ -763,7 +763,7 @@ spec =
|
||||
f1.read_text . should_equal txt2
|
||||
f2.read_text . should_equal txt2
|
||||
|
||||
Test.specify "should warn about not-encodable characters according to the problem behaviour" <|
|
||||
group_builder.specify "should warn about not-encodable characters according to the problem behaviour" <|
|
||||
f = transient / "encoding-errors.txt"
|
||||
|
||||
encoding = Encoding.ascii
|
||||
@ -786,17 +786,17 @@ spec =
|
||||
f.read . should_equal "Initial Content"
|
||||
f.delete
|
||||
|
||||
Test.group "folder operations" <|
|
||||
suite_builder.group "folder operations" group_builder->
|
||||
resolve files =
|
||||
base = enso_project.data
|
||||
files.map str->
|
||||
(base / str) . to_text
|
||||
|
||||
Test.specify "should check if file is a directory" <|
|
||||
group_builder.specify "should check if file is a directory" <|
|
||||
sample_file.is_directory.should_be_false
|
||||
enso_project.root.is_directory.should_be_true
|
||||
|
||||
Test.specify "should list files in a directory" <|
|
||||
group_builder.specify "should list files in a directory" <|
|
||||
immediate = enso_project.data.list . map .to_text
|
||||
immediate.sort.should_equal (resolve ["books.json", "helloworld.txt", "sample-json.weird-extension", "sample-malformed.json", "sample.json", "sample.png", "sample.txt", "sample.xxx", "transient", "tree", "windows.log", "windows.txt", 'xml'])
|
||||
|
||||
@ -806,7 +806,7 @@ spec =
|
||||
filtered2 = Data.list_directory enso_project.data name_filter="*re*" . map .to_text
|
||||
filtered2.should_equal (resolve ["tree"])
|
||||
|
||||
Test.specify "should list files in a directory recursively" <|
|
||||
group_builder.specify "should list files in a directory recursively" <|
|
||||
root = enso_project.data / "tree"
|
||||
resolve files = files.map str-> (root / str) . to_text
|
||||
|
||||
@ -828,4 +828,8 @@ spec =
|
||||
filtered4 = root.list name_filter="nested/*.txt" recursive=True . map .to_text
|
||||
filtered4.sort.should_equal (resolve ["subdirectory/nested/b.txt"])
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -1,18 +1,18 @@
|
||||
from Standard.Base import all
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
spec =
|
||||
Test.group "Process" <|
|
||||
Test.specify "should call simple command" <|
|
||||
|
||||
add_specs suite_builder =
|
||||
suite_builder.group "Process" group_builder->
|
||||
group_builder.specify "should call simple command" <|
|
||||
result = case Platform.os of
|
||||
Platform.OS.Windows ->
|
||||
Process.run "PowerShell" ["-Command", "exit 0"]
|
||||
_ ->
|
||||
Process.run "bash" ["-c", "exit 0"]
|
||||
result.exit_code.should_equal Exit_Code.Success
|
||||
Test.specify "should return exit code" <|
|
||||
group_builder.specify "should return exit code" <|
|
||||
case Platform.os of
|
||||
Platform.OS.Unknown ->
|
||||
Test.fail "Unsupported platform."
|
||||
@ -28,7 +28,7 @@ spec =
|
||||
|
||||
s = Process.run "bash" ["-c", "exit 0"]
|
||||
s.exit_code.should_equal <| Exit_Code.Success
|
||||
Test.specify "should return stdout" <|
|
||||
group_builder.specify "should return stdout" <|
|
||||
case Platform.os of
|
||||
Platform.OS.Unknown ->
|
||||
Test.fail "Unsupported platform."
|
||||
@ -54,7 +54,7 @@ spec =
|
||||
run_result.exit_code.to_number . should_equal 0
|
||||
run_result.stdout . should_equal "Hello"
|
||||
run_result.stderr . should_equal ""
|
||||
Test.specify "should return stderr" <|
|
||||
group_builder.specify "should return stderr" <|
|
||||
case Platform.os of
|
||||
Platform.OS.Unknown ->
|
||||
Test.fail "Unsupported platform."
|
||||
@ -80,7 +80,7 @@ spec =
|
||||
run_result.exit_code.to_number . should_equal 0
|
||||
run_result.stdout . should_equal ""
|
||||
run_result.stderr . should_equal "Error"
|
||||
Test.specify "should feed stdin" <|
|
||||
group_builder.specify "should feed stdin" <|
|
||||
case Platform.os of
|
||||
Platform.OS.Unknown ->
|
||||
Test.fail "Unsupported platform."
|
||||
@ -107,4 +107,8 @@ spec =
|
||||
run_result.stdout . should_equal 'sample'
|
||||
run_result.stderr . should_equal ""
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -3,14 +3,14 @@ import Standard.Base.Errors.Encoding_Error.Encoding_Error
|
||||
|
||||
polyglot java import java.nio.CharBuffer
|
||||
|
||||
from Standard.Test import Test, Test_Suite, Problems
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
spec =
|
||||
|
||||
add_specs suite_builder =
|
||||
windows_file = enso_project.data / "windows.txt"
|
||||
|
||||
Test.group "ReportingStreamDecoder" <|
|
||||
Test.specify "should allow reading a file character by character" <|
|
||||
suite_builder.group "ReportingStreamDecoder" group_builder->
|
||||
group_builder.specify "should allow reading a file character by character" <|
|
||||
f = enso_project.data / "short.txt"
|
||||
f.delete_if_exists
|
||||
f.exists.should_be_false
|
||||
@ -24,7 +24,7 @@ spec =
|
||||
f.delete
|
||||
f.exists.should_be_false
|
||||
|
||||
Test.specify "should work correctly when reading chunks of varying sizes" <|
|
||||
group_builder.specify "should work correctly when reading chunks of varying sizes" <|
|
||||
f = enso_project.data / "transient" / "varying_chunks.txt"
|
||||
fragment = 'Hello 😎🚀🚧!'
|
||||
contents = 1.up_to 1000 . map _->fragment . join '\n'
|
||||
@ -73,7 +73,7 @@ spec =
|
||||
result.should_succeed
|
||||
f.delete
|
||||
|
||||
Test.specify "should allow reading a UTF-8 file" <|
|
||||
group_builder.specify "should allow reading a UTF-8 file" <|
|
||||
f = enso_project.data / "transient" / "utf8.txt"
|
||||
encoding = Encoding.utf_8
|
||||
((0.up_to 100).map _->'Hello World!' . join '\n').write f . should_succeed
|
||||
@ -81,13 +81,13 @@ spec =
|
||||
contents = read_file_one_by_one f encoding expected_contents.length
|
||||
contents.should_equal expected_contents
|
||||
|
||||
Test.specify "should allow reading a Windows file" <|
|
||||
group_builder.specify "should allow reading a Windows file" <|
|
||||
encoding = Encoding.windows_1252
|
||||
expected_contents = "Hello World! $¢¤¥"
|
||||
contents = read_file_one_by_one windows_file encoding expected_contents.length
|
||||
contents.should_equal expected_contents
|
||||
|
||||
Test.specify "should raise warnings when reading invalid characters" <|
|
||||
group_builder.specify "should raise warnings when reading invalid characters" <|
|
||||
encoding = Encoding.ascii
|
||||
expected_contents = 'Hello World! $\uFFFD\uFFFD\uFFFD'
|
||||
expected_problems = [Encoding_Error.Error "Encoding issues at bytes 14, 15, 16."]
|
||||
@ -106,7 +106,7 @@ spec =
|
||||
contents_2.should_equal expected_contents
|
||||
Problems.get_attached_warnings contents_2 . should_equal expected_problems
|
||||
|
||||
Test.specify "should work correctly if no data is read from it" <|
|
||||
group_builder.specify "should work correctly if no data is read from it" <|
|
||||
result = windows_file.with_input_stream [File_Access.Read] stream->
|
||||
stream.with_stream_decoder Encoding.ascii Problem_Behavior.Report_Error _->Nothing
|
||||
result.should_succeed
|
||||
@ -134,4 +134,8 @@ read_characters decoder n =
|
||||
transfer_codepoints Nothing
|
||||
v.to_vector
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -5,12 +5,12 @@ import Standard.Base.Errors.Illegal_State.Illegal_State
|
||||
polyglot java import org.enso.base.Encoding_Utils
|
||||
polyglot java import java.nio.CharBuffer
|
||||
|
||||
from Standard.Test import Test, Test_Suite, Problems
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
spec =
|
||||
Test.group "ReportingStreamEncoder" <|
|
||||
Test.specify "should allow writing a file codepoint by codepoint" <|
|
||||
|
||||
add_specs suite_builder =
|
||||
suite_builder.group "ReportingStreamEncoder" group_builder->
|
||||
group_builder.specify "should allow writing a file codepoint by codepoint" <|
|
||||
f = enso_project.data / "transient" / "char-by-char.txt"
|
||||
f.delete_if_exists
|
||||
f.exists.should_be_false
|
||||
@ -21,7 +21,7 @@ spec =
|
||||
reporting_stream_encoder.write char
|
||||
f.read_text.should_equal contents
|
||||
|
||||
Test.specify "should work correctly when writing chunks of varying sizes" <|
|
||||
group_builder.specify "should work correctly when writing chunks of varying sizes" <|
|
||||
f = enso_project.data / "transient" / "varying-utf16.txt"
|
||||
f.delete_if_exists
|
||||
f.exists.should_be_false
|
||||
@ -41,7 +41,7 @@ spec =
|
||||
contents = 'AAbcDefghiO\u0301X' + big + "YŹ"
|
||||
f.read_text encoding . should_equal contents
|
||||
|
||||
Test.specify "should allow writing a Windows file" <|
|
||||
group_builder.specify "should allow writing a Windows file" <|
|
||||
f = enso_project.data / "transient" / "windows.txt"
|
||||
encoding = Encoding.windows_1252
|
||||
contents = "Hello World! $¢¤¥"
|
||||
@ -53,7 +53,7 @@ spec =
|
||||
|
||||
f.read_text encoding . should_equal contents
|
||||
|
||||
Test.specify "should raise warnings when writing characters that cannot be encoded and replace them with the Unicode replacement character or a question mark" <|
|
||||
group_builder.specify "should raise warnings when writing characters that cannot be encoded and replace them with the Unicode replacement character or a question mark" <|
|
||||
f = enso_project.data / "transient" / "ascii.txt"
|
||||
encoding = Encoding.ascii
|
||||
contents = 'Sło\u0301wka!'
|
||||
@ -78,7 +78,7 @@ spec =
|
||||
Problems.get_attached_warnings result_2 . should_equal [Encoding_Error.Error "Encoding issues at codepoints 3, 9."]
|
||||
f.read_text encoding . should_equal "ABC?foo -?- bar"
|
||||
|
||||
Test.specify "should work correctly if no data is written to it" <|
|
||||
group_builder.specify "should work correctly if no data is written to it" <|
|
||||
f = enso_project.data / "transient" / "empty.txt"
|
||||
encoding = Encoding.ascii
|
||||
f.delete_if_exists
|
||||
@ -87,7 +87,7 @@ spec =
|
||||
result.should_succeed
|
||||
f.read_text encoding . should_equal ""
|
||||
|
||||
Test.specify "should forward any dataflow errors raised in the inner action" <|
|
||||
group_builder.specify "should forward any dataflow errors raised in the inner action" <|
|
||||
f = enso_project.data / "transient" / "error.txt"
|
||||
encoding = Encoding.ascii
|
||||
f.delete_if_exists
|
||||
@ -99,7 +99,7 @@ spec =
|
||||
result.catch.message.should_equal "FOO"
|
||||
f.delete_if_exists
|
||||
|
||||
Test.specify "should forward any warnings attached to the result of the inner action (1)" <|
|
||||
group_builder.specify "should forward any warnings attached to the result of the inner action (1)" <|
|
||||
f = enso_project.data / "transient" / "warning.txt"
|
||||
encoding = Encoding.ascii
|
||||
f.delete_if_exists
|
||||
@ -114,7 +114,7 @@ spec =
|
||||
f.read_text encoding . should_equal "BAZ23"
|
||||
f.delete_if_exists
|
||||
|
||||
Test.specify "should forward any warnings attached to the result of the inner action (2)" <|
|
||||
group_builder.specify "should forward any warnings attached to the result of the inner action (2)" <|
|
||||
f = enso_project.data / "transient" / "warning.txt"
|
||||
encoding = Encoding.ascii
|
||||
f.delete_if_exists
|
||||
@ -129,4 +129,8 @@ spec =
|
||||
f.read_text encoding . should_equal "BAZ23"
|
||||
f.delete_if_exists
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -1,16 +1,20 @@
|
||||
from Standard.Base import all
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
|
||||
spec = Test.group "System" <|
|
||||
Test.specify "should provide nanosecond timer" <|
|
||||
|
||||
add_specs suite_builder = suite_builder.group "System" group_builder->
|
||||
group_builder.specify "should provide nanosecond timer" <|
|
||||
result = System.nano_time
|
||||
(result > 0).should_equal True
|
||||
|
||||
if Platform.is_unix then
|
||||
Test.specify "should be able to create a process, returning an exit code" <|
|
||||
group_builder.specify "should be able to create a process, returning an exit code" <|
|
||||
result = System.create_process "echo" ["foo", "bar"] "" False False False
|
||||
result.exit_code . should_equal 0
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
|
@ -4,20 +4,24 @@ import Standard.Base.Errors.Illegal_State.Illegal_State
|
||||
import Standard.Base.System.File.Advanced.Temporary_File.Temporary_File
|
||||
import Standard.Base.System.Input_Stream.Input_Stream
|
||||
|
||||
from Standard.Test import Test, Test_Suite
|
||||
from Standard.Test.Execution_Context_Helpers import run_with_and_without_output
|
||||
import Standard.Test.Extensions
|
||||
from Standard.Test_New import all
|
||||
from Standard.Test_New.Execution_Context_Helpers import run_with_and_without_output
|
||||
|
||||
|
||||
polyglot java import java.io.File as Java_File
|
||||
polyglot java import java.io.ByteArrayInputStream
|
||||
polyglot java import java.io.FileInputStream
|
||||
polyglot java import java.io.InputStream
|
||||
|
||||
main = Test_Suite.run_main spec
|
||||
main =
|
||||
suite = Test.build suite_builder->
|
||||
add_specs suite_builder
|
||||
suite.run_with_filter
|
||||
|
||||
spec =
|
||||
Test.group "Temporary_File facility" <|
|
||||
Test.specify "should allow to create a new file and allow to dispose it manually" <|
|
||||
|
||||
add_specs suite_builder =
|
||||
suite_builder.group "Temporary_File facility" group_builder->
|
||||
group_builder.specify "should allow to create a new file and allow to dispose it manually" <|
|
||||
tmp = Temporary_File.new
|
||||
tmp.with_file f->
|
||||
"test".write f
|
||||
@ -33,7 +37,7 @@ spec =
|
||||
Test.expect_panic Illegal_State <| tmp.with_file (f->f.read Plain_Text)
|
||||
raw_file.exists . should_be_false
|
||||
|
||||
Test.specify "should allow to create a new file and allow to dispose it once the reference is dropped" <|
|
||||
group_builder.specify "should allow to create a new file and allow to dispose it once the reference is dropped" <|
|
||||
f foo =
|
||||
# The tmp file is limited to the scope of the function.
|
||||
tmp = Temporary_File.new
|
||||
@ -63,20 +67,20 @@ spec =
|
||||
repeat_gc 999
|
||||
raw_file.exists . should_be_false
|
||||
|
||||
Test.specify "should allow to materialize an input stream, regardless of Output Context settings" <|
|
||||
group_builder.specify "should allow to materialize an input stream, regardless of Output Context settings" <|
|
||||
run_with_and_without_output <|
|
||||
stream = make_stream "test payload 1"
|
||||
tmp = Temporary_File.from_stream stream
|
||||
tmp.with_file f->
|
||||
f.read Plain_Text . should_equal "test payload 1"
|
||||
|
||||
Test.specify "will fail if materializing an already closed input stream" <|
|
||||
group_builder.specify "will fail if materializing an already closed input stream" <|
|
||||
stream = Input_Stream.new (InputStream.nullInputStream) (File_Error.handle_java_exceptions Nothing)
|
||||
stream.close
|
||||
|
||||
Test.expect_panic File_Error <| Temporary_File.from_stream stream
|
||||
|
||||
Test.specify "should be able to be converted to text, and indicate if it was disposed" <|
|
||||
group_builder.specify "should be able to be converted to text, and indicate if it was disposed" <|
|
||||
tmp = Temporary_File.new "pref" ".suf"
|
||||
tmp.to_text.should_contain "Temporary_File"
|
||||
tmp.to_text.should_contain "pref"
|
||||
@ -88,7 +92,7 @@ spec =
|
||||
tmp.to_text . should_not_contain "pref"
|
||||
tmp.to_text . should_not_contain "suf"
|
||||
|
||||
Test.specify "should allow to materialize an input stream that is already associated with a temporary file without copying it" <|
|
||||
group_builder.specify "should allow to materialize an input stream that is already associated with a temporary file without copying it" <|
|
||||
tmp = Temporary_File.new
|
||||
tmp.with_file f->
|
||||
"test payload 3" . write f
|
||||
|
Loading…
Reference in New Issue
Block a user