mirror of
https://github.com/enso-org/enso.git
synced 2025-01-08 13:36:57 +03:00
0e9821519d
- Added `to_display_text` for `S3_File` and `Enso_File`. - Improved widget for `AWS_Credential` allowing use of Enso secrets. - Adjust `S3.list_objects` to return `S3_File` objects, allowing easier drill down. - Fix for merging inherited config with direct config in widgets. - Add missing constant types to Date.Diff widget. ![image](https://github.com/user-attachments/assets/ea125a09-5067-4dee-bef2-3d7c8d551260)
596 lines
31 KiB
Plaintext
596 lines
31 KiB
Plaintext
from Standard.Base import all
|
|
from Standard.Base.Runtime import assert
|
|
import Standard.Base.Enso_Cloud.Data_Link.Data_Link
|
|
import Standard.Base.Errors.Common.Forbidden_Operation
|
|
import Standard.Base.Errors.File_Error.File_Error
|
|
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
|
|
import Standard.Base.Errors.Illegal_State.Illegal_State
|
|
import Standard.Base.Runtime.Context
|
|
|
|
from Standard.AWS import S3, S3_File, AWS_Credential
|
|
from Standard.AWS.Errors import AWS_SDK_Error, More_Records_Available, S3_Error, S3_Bucket_Not_Found, S3_Key_Not_Found
|
|
import Standard.AWS.Internal.S3_Path.S3_Path
|
|
|
|
# Needed for custom formats test
|
|
from Standard.Table import Table, Excel_Format, Delimited_Format
|
|
# Needed for correct `Table.should_equal`
|
|
import enso_dev.Table_Tests.Util
|
|
|
|
from Standard.Test import all
|
|
|
|
import enso_dev.Base_Tests.Network.Enso_Cloud.Cloud_Tests_Setup.Cloud_Tests_Setup
|
|
import enso_dev.Base_Tests.System.File_Spec as Local_File_Spec
|
|
|
|
test_credentials -> AWS_Credential ! Illegal_State =
|
|
access_key_id = Environment.get "ENSO_LIB_S3_AWS_ACCESS_KEY_ID" ""
|
|
secret_access_key = Environment.get "ENSO_LIB_S3_AWS_SECRET_ACCESS_KEY" ""
|
|
credentials_set = access_key_id.not_empty && secret_access_key.not_empty
|
|
if credentials_set then AWS_Credential.Key access_key_id secret_access_key else
|
|
Error.throw (Illegal_State.Error "No AWS credentials found in the environment.")
|
|
|
|
## Runs the action, overriding `AWS_Credential.Default` to point to `test_credentials`.
|
|
with_default_credentials ~action =
|
|
AWS_Credential.with_default_override test_credentials action
|
|
|
|
api_pending = if test_credentials.is_error then test_credentials.catch.message else Nothing
|
|
|
|
bucket_name = "enso-data-samples"
|
|
writable_bucket_name = "enso-ci-s3-test-stage"
|
|
writable_root = S3_File.new "s3://"+writable_bucket_name+"/" test_credentials
|
|
not_a_bucket_name = "not_a_bucket_enso"
|
|
|
|
delete_on_panic file ~action =
|
|
handler caught_panic =
|
|
file.delete
|
|
Panic.throw caught_panic
|
|
Panic.catch Any action handler
|
|
|
|
delete_afterwards file ~action =
|
|
Panic.with_finalizer file.delete action
|
|
|
|
|
|
## Reads the datalink as plain text and replaces the placeholder username with
|
|
actual one. It then writes the new contents to a temporary file and returns
|
|
it.
|
|
replace_username_in_data_link base_file =
|
|
content = Data_Link.read_raw_config base_file
|
|
new_content = content.replace "USERNAME" Enso_User.current.name
|
|
temp_file = File.create_temporary_file prefix=base_file.name suffix=base_file.extension
|
|
Data_Link.write_raw_config temp_file new_content replace_existing=True . if_not_error temp_file
|
|
|
|
## To run these tests you need to set the following environment variables:
|
|
- ENSO_LIB_S3_AWS_ACCESS_KEY_ID
|
|
- ENSO_LIB_S3_AWS_SECRET_ACCESS_KEY
|
|
- ENSO_LIB_S3_AWS_REGION
|
|
|
|
We add the `ENSO_LIB_S3` prefix to ensure that our tests do not rely on some
|
|
default behaviour and correctly test customizing credentials. In fact, in the
|
|
past we had a bug precisely due to relying on the defaults: https://github.com/enso-org/enso/issues/9284
|
|
add_specs suite_builder =
|
|
cloud_setup = Cloud_Tests_Setup.prepare
|
|
suite_builder.group "S3 Path handling" group_builder->
|
|
# Testing just path handling can be done on instances without any credentials:
|
|
root = S3_File.new "s3://"+bucket_name+"/"
|
|
hello_txt = S3_File.new "s3://"+bucket_name+"/examples/folder 2/hello.txt"
|
|
|
|
group_builder.specify "parse bucket only uris" <|
|
|
S3_Path.parse "s3://" . should_equal (S3_Path.Value "" "")
|
|
S3_Path.parse "s3://asda" . should_equal (S3_Path.Value "asda" "")
|
|
S3_Path.parse "s3://banana/" . should_equal (S3_Path.Value "banana" "")
|
|
|
|
group_builder.specify "parse full paths uris" <|
|
|
S3_Path.parse "s3://banana/apple" . should_equal (S3_Path.Value "banana" "apple")
|
|
S3_Path.parse "s3://banana/apple/orange" . should_equal (S3_Path.Value "banana" "apple/orange")
|
|
|
|
group_builder.specify "reject invalid urils" <|
|
|
S3_Path.parse "asda" . should_fail_with Illegal_Argument
|
|
S3_Path.parse "s3:///" . should_fail_with Illegal_Argument
|
|
S3_Path.parse "s3:///apple/orange" . should_fail_with Illegal_Argument
|
|
|
|
group_builder.specify "should support path traversal using `/` and `parent`" <|
|
|
root.name . should_equal "/"
|
|
(root / "foo") . path . should_equal "s3://"+bucket_name+"/foo"
|
|
(root / "foo") . name . should_equal "foo"
|
|
|
|
(root / "foo/").is_directory . should_be_true
|
|
(root / "foo/" / "bar") . path . should_equal "s3://"+bucket_name+"/foo/bar"
|
|
# It also works if the trailing slash is missing:
|
|
(root / "foo" / "bar") . path . should_equal "s3://"+bucket_name+"/foo/bar"
|
|
# Even though it is not normally treated as directory:
|
|
(root / "foo").is_directory . should_be_false
|
|
|
|
(root / "foo/../././bar") . should_equal (root / "bar")
|
|
err = (root / "..")
|
|
err.should_fail_with Illegal_Argument
|
|
err.catch.to_display_text . should_contain "Cannot move above root"
|
|
|
|
hello_txt.parent.parent.parent . should_equal root
|
|
(hello_txt / "../../..") . should_equal root
|
|
hello_txt.parent . should_equal (root / "examples/" / "folder 2/")
|
|
hello_txt.parent.is_directory . should_be_true
|
|
|
|
# Leading slash will mean starting back from bucket root:
|
|
(hello_txt / "/foo/bar") . should_equal (root / "foo/bar")
|
|
(hello_txt / "/") . should_equal root
|
|
(hello_txt / "////") . should_equal root
|
|
|
|
group_builder.specify "should support path traversal using `join`" <|
|
|
root.join ["foo", "bar"] . path . should_equal "s3://"+bucket_name+"/foo/bar"
|
|
root.join ["a/b/", "c/d", "e"] . path . should_equal "s3://"+bucket_name+"/a/b/c/d/e"
|
|
hello_txt.join ["../..", ".."] . should_equal root
|
|
hello_txt.join ["..", "a", ".."] . should_equal hello_txt.parent
|
|
|
|
group_builder.specify "will normalize paths upon parsing" <|
|
|
S3_File.new "s3://bucketA/a/b/c/../././d/../e" . path . should_equal "s3://bucketA/a/b/e"
|
|
|
|
group_builder.specify "should allow to check if one path is inside of another" <|
|
|
hello_txt.is_descendant_of root . should_be_true
|
|
root.is_descendant_of hello_txt . should_be_false
|
|
|
|
# A file or directory is not a descendant of itself:
|
|
hello_txt.is_descendant_of hello_txt . should_be_false
|
|
root.is_descendant_of root . should_be_false
|
|
hello_txt.parent.is_descendant_of hello_txt.parent . should_be_false
|
|
|
|
hello_txt.is_descendant_of hello_txt.parent . should_be_true
|
|
(root / "foo").is_descendant_of root . should_be_true
|
|
(root / "foo/bar").is_descendant_of (root / "foo/") . should_be_true
|
|
(root / "foo/bar").is_descendant_of (root / "fo/") . should_be_false
|
|
(root / "foo/bar").is_descendant_of (root / "fo") . should_be_false
|
|
|
|
# Correct path but different bucket will yield false:
|
|
(S3_File.new "s3://bucketA/foo/bar").is_descendant_of (S3_File.new "s3://bucketB/foo/") . should_be_false
|
|
|
|
object_name = "Bus_Stop_Benches.geojson"
|
|
root = S3_File.new "s3://"+bucket_name+"/" test_credentials
|
|
hello_txt = S3_File.new "s3://"+bucket_name+"/examples/folder 2/hello.txt" test_credentials
|
|
|
|
suite_builder.group "S3.list_buckets" pending=api_pending group_builder->
|
|
group_builder.specify "should be able to list buckets" <|
|
|
bucket_list = S3.list_buckets credentials=test_credentials . should_succeed
|
|
if bucket_name != Nothing then bucket_list . should_contain bucket_name
|
|
|
|
group_builder.specify "should handle auth issues" <|
|
|
S3.list_buckets (AWS_Credential.Profile "NoSuchProfile") . should_fail_with AWS_SDK_Error
|
|
|
|
group_builder.specify "should not work with invalid credentials" <|
|
|
S3.list_buckets (AWS_Credential.Key "foo" "bar") . should_fail_with S3_Error
|
|
|
|
group_builder.specify "should allow to use Enso secrets within credentials" pending=cloud_setup.pending <| cloud_setup.with_prepared_environment <|
|
|
secret_key_id = Enso_Secret.create "my_test_secret-AWS-keyid" test_credentials.access_key_id
|
|
secret_key_id.should_succeed
|
|
Panic.with_finalizer secret_key_id.delete <|
|
|
secret_key_value = Enso_Secret.create "my_test_secret-AWS-secretkey" test_credentials.secret_access_key
|
|
secret_key_value.should_succeed
|
|
Panic.with_finalizer secret_key_value.delete <| Test.with_retries <|
|
|
r2 = S3.list_buckets (AWS_Credential.Key secret_key_id secret_key_value)
|
|
r2.should_succeed
|
|
r2.should_be_a Vector
|
|
|
|
suite_builder.group "S3.head (bucket)" pending=api_pending group_builder->
|
|
group_builder.specify "should be able to head a bucket" <|
|
|
S3.head bucket_name credentials=test_credentials . should_be_a Dictionary
|
|
S3.head not_a_bucket_name credentials=test_credentials . should_fail_with S3_Bucket_Not_Found
|
|
|
|
suite_builder.group "S3.read_bucket" pending=api_pending group_builder->
|
|
group_builder.specify "should be able to read bucket" <|
|
|
objects_and_folders = S3.read_bucket bucket_name credentials=test_credentials
|
|
folders = objects_and_folders.first
|
|
folders . should_contain "examples/"
|
|
|
|
objects = objects_and_folders.second
|
|
objects . should_contain object_name
|
|
|
|
group_builder.specify "should be able to read sub folder" <|
|
|
objects_and_folders = S3.read_bucket bucket_name "examples/" credentials=test_credentials
|
|
folders = objects_and_folders.first
|
|
folders . should_contain "examples/folder 1/"
|
|
|
|
group_builder.specify "should attach a warning if not a complete list" <|
|
|
objects = S3.read_bucket bucket_name max_count=1 credentials=test_credentials
|
|
|
|
warnings = Warning.get_all objects
|
|
warnings.length . should_equal 1
|
|
|
|
warning = warnings.first
|
|
warning.value.should_be_a More_Records_Available
|
|
|
|
group_builder.specify "should handle missing bucket gracefully" <|
|
|
S3.read_bucket not_a_bucket_name credentials=test_credentials . should_fail_with S3_Bucket_Not_Found
|
|
|
|
group_builder.specify "should handle auth issues" <|
|
|
S3.read_bucket bucket_name credentials=(AWS_Credential.Profile "NoSuchProfile") . should_fail_with AWS_SDK_Error
|
|
|
|
suite_builder.group "S3.list_objects" pending=api_pending group_builder->
|
|
group_builder.specify "should be able to list objects" <|
|
|
objects = S3.list_objects bucket_name credentials=test_credentials
|
|
objects . map .name . should_contain object_name
|
|
|
|
group_builder.specify "should attach a warning if not a complete list" <|
|
|
objects = S3.list_objects bucket_name max_count=1 credentials=test_credentials
|
|
|
|
warnings = Warning.get_all objects
|
|
warnings.length . should_equal 1
|
|
|
|
warning = warnings.first
|
|
warning.value.should_be_a More_Records_Available
|
|
|
|
group_builder.specify "should handle missing bucket gracefully" <|
|
|
S3.list_objects not_a_bucket_name credentials=test_credentials . should_fail_with S3_Bucket_Not_Found
|
|
|
|
group_builder.specify "should handle auth issues" <|
|
|
S3.list_objects bucket_name credentials=(AWS_Credential.Profile "NoSuchProfile") . should_fail_with AWS_SDK_Error
|
|
|
|
suite_builder.group "S3.head (object)" pending=api_pending group_builder->
|
|
group_builder.specify "should be able to head an object" <|
|
|
S3.head bucket_name object_name credentials=test_credentials . should_succeed
|
|
S3.head not_a_bucket_name object_name credentials=test_credentials . should_fail_with S3_Key_Not_Found
|
|
S3.head bucket_name "not_an_object" credentials=test_credentials . should_fail_with S3_Key_Not_Found
|
|
|
|
group_builder.specify "should handle auth issues" <|
|
|
S3.list_objects bucket_name object_name credentials=(AWS_Credential.Profile "NoSuchProfile") . should_fail_with AWS_SDK_Error
|
|
|
|
suite_builder.group "S3.get_object" pending=api_pending group_builder->
|
|
group_builder.specify "should be able to get an object" <|
|
|
response = S3.get_object bucket_name object_name credentials=test_credentials
|
|
response.should_succeed
|
|
response.decode_as_json.should_succeed
|
|
|
|
S3.get_object not_a_bucket_name object_name credentials=test_credentials . should_fail_with S3_Bucket_Not_Found
|
|
S3.get_object bucket_name "not_an_object" credentials=test_credentials . should_fail_with S3_Key_Not_Found
|
|
|
|
group_builder.specify "should handle auth issues" <|
|
|
S3.get_object bucket_name object_name credentials=(AWS_Credential.Profile "NoSuchProfile") . should_fail_with AWS_SDK_Error
|
|
|
|
suite_builder.group "S3_File reading" pending=api_pending group_builder->
|
|
group_builder.specify "should be able to list the bucket's root directory" <|
|
|
r = root.list
|
|
r.should_succeed
|
|
|
|
r.map .name . should_contain object_name
|
|
|
|
group_builder.specify "will fail if no credentials are provided and no Default credentials are available" pending=(if AWS_Credential.is_default_credential_available then "Default AWS credentials are defined in the environment and this test has no way of overriding them, so it is impossible to test this scenario in such environment.") <|
|
|
root_without_credentials = S3_File.new "s3://"+bucket_name+"/"
|
|
r = root_without_credentials.list
|
|
r.should_fail_with AWS_SDK_Error
|
|
|
|
group_builder.specify "should be able to read a file" <|
|
|
f = root / "locations.json"
|
|
r = f.read
|
|
r.should_be_a Vector
|
|
r.at 0 . get "name" . should_equal "Green St Green"
|
|
|
|
group_builder.specify "should work with Data.read" <|
|
|
with_default_credentials <|
|
|
r = Data.read "s3://"+bucket_name+"/examples/folder 2/hello.txt"
|
|
r.should_equal "Hello WORLD!"
|
|
|
|
group_builder.specify "should be able to read a file as bytes or stream" <|
|
|
bytes = hello_txt.read_bytes
|
|
bytes.should_equal "Hello WORLD!".utf_8
|
|
|
|
bytes2 = hello_txt.with_input_stream [File_Access.Read] stream->
|
|
stream.read_all_bytes
|
|
|
|
bytes2.should_equal bytes
|
|
|
|
group_builder.specify "returns a valid name for files and 'directories'" <|
|
|
hello_txt.name . should_equal "hello.txt"
|
|
root.name . should_equal "/"
|
|
|
|
(root / "foo/" / "bar") . name . should_equal "bar"
|
|
|
|
# The trailing slash is removed for 'directories':
|
|
(root / "foo/") . name . should_equal "foo"
|
|
|
|
group_builder.specify "should be able to read file metadata" <|
|
|
root.exists . should_be_true
|
|
root.is_directory . should_be_true
|
|
root.is_regular_file . should_be_false
|
|
|
|
hello_txt.exists . should_be_true
|
|
hello_txt.is_directory . should_be_false
|
|
hello_txt.is_regular_file . should_be_true
|
|
|
|
parent = hello_txt.parent
|
|
parent.exists . should_be_true
|
|
parent.is_directory . should_be_true
|
|
parent.is_regular_file . should_be_false
|
|
|
|
root.extension . should_fail_with S3_Error
|
|
hello_txt.extension . should_equal ".txt"
|
|
|
|
root.size.should_fail_with S3_Error
|
|
|
|
hello_txt.size.should_equal 12
|
|
hello_txt.last_modified_time . should_be_a Date_Time
|
|
|
|
# AWS S3 does not record creation time, only last modified time.
|
|
hello_txt.creation_time . should_fail_with S3_Error
|
|
|
|
my_writable_dir = writable_root / ("test-run-"+(Date_Time.now.format "yyyy-MM-dd_HHmmss.fV" . replace "/" "|")+"/")
|
|
|
|
suite_builder.group "S3_File writing" pending=api_pending group_builder->
|
|
assert my_writable_dir.is_directory
|
|
group_builder.teardown (my_writable_dir.delete_if_exists recursive=True)
|
|
|
|
group_builder.specify "should be able to write and delete a new file" <|
|
|
new_file = my_writable_dir / "new_file1.txt"
|
|
"Hello".write new_file on_existing_file=Existing_File_Behavior.Overwrite . should_succeed
|
|
|
|
delete_on_panic new_file <|
|
|
new_file.exists . should_be_true
|
|
new_file.read . should_equal "Hello"
|
|
new_file.size.should_equal 5
|
|
|
|
my_writable_dir.list . should_contain new_file
|
|
|
|
# But the file cannot be listed:
|
|
err = new_file.list
|
|
err.should_fail_with Illegal_Argument
|
|
err.catch.to_display_text . should_contain "Cannot `list` a non-directory."
|
|
|
|
new_file.delete . should_succeed
|
|
|
|
Test.with_retries <|
|
|
new_file.exists . should_be_false
|
|
my_writable_dir.list . should_not_contain new_file
|
|
|
|
group_builder.specify "should be able to overwrite a file" <|
|
|
new_file = my_writable_dir / "new_file-overwrite.txt"
|
|
"Hello".write new_file . should_succeed
|
|
delete_afterwards new_file <|
|
|
new_file.read . should_equal "Hello"
|
|
|
|
"World".write new_file on_existing_file=Existing_File_Behavior.Overwrite . should_succeed
|
|
Test.with_retries <|
|
|
new_file.read . should_equal "World"
|
|
|
|
group_builder.specify "should not be able to append to a file" <|
|
|
new_file = my_writable_dir / "new_file-append.txt"
|
|
"Hello".write new_file . should_succeed
|
|
delete_afterwards new_file <|
|
|
r = "World".write new_file on_existing_file=Existing_File_Behavior.Append
|
|
r.should_fail_with S3_Error
|
|
r.catch.to_display_text . should_contain "you may read it, modify and then write the new contents"
|
|
|
|
group_builder.specify "will error if the file existed prior to writing, if Existing_File_Behavior.Error" <|
|
|
new_file = my_writable_dir / "new_file-exists.txt"
|
|
"Hello".write new_file . should_succeed
|
|
delete_afterwards new_file <|
|
|
r = "World".write new_file on_existing_file=Existing_File_Behavior.Error
|
|
r.should_fail_with File_Error
|
|
r.catch.should_be_a File_Error.Already_Exists
|
|
r.catch.to_display_text . should_contain "already exists"
|
|
r.catch.to_display_text . should_contain "new_file-exists.txt"
|
|
|
|
group_builder.specify "should be able to write a raw stream" <|
|
|
new_file = my_writable_dir / "new_file-stream.txt"
|
|
r = new_file.with_output_stream [File_Access.Write] stream->
|
|
stream.write_bytes [1, 2, 3]
|
|
r.should_succeed
|
|
|
|
delete_afterwards new_file <|
|
|
new_file.read_bytes.should_equal [1, 2, 3]
|
|
|
|
group_builder.specify "fails if unsupported File_Access options are selected" <|
|
|
new_file = my_writable_dir / "new_file-stream-file-access.txt"
|
|
r = new_file.with_output_stream [File_Access.Write, File_Access.Append] _->Nothing
|
|
r.should_fail_with S3_Error
|
|
r.catch.to_display_text . should_contain "read it, modify and then write the new contents"
|
|
|
|
r2 = new_file.with_output_stream [File_Access.Read] _->Nothing
|
|
r2.should_fail_with Illegal_Argument
|
|
r2.catch.to_display_text . should_contain "Invalid open options for `with_output_stream`"
|
|
|
|
group_builder.specify "will respect the File_Access.Create_New option and fail if the file already exists" <|
|
|
new_file = my_writable_dir / "new_file-stream-create-new.txt"
|
|
r = new_file.with_output_stream [File_Access.Write] stream->
|
|
stream.write_bytes [1, 2, 3]
|
|
r.should_succeed
|
|
|
|
delete_afterwards new_file <|
|
|
r2 = new_file.with_output_stream [File_Access.Write, File_Access.Create_New] _->Nothing
|
|
r2.should_fail_with File_Error
|
|
r2.catch.should_be_a File_Error.Already_Exists
|
|
r2.catch.to_display_text . should_contain "already exists"
|
|
|
|
group_builder.specify "should be able to write a vector of bytes" <|
|
|
new_file = my_writable_dir / "new_file-bytes.txt"
|
|
[4, 5, 6].write_bytes new_file . should_succeed
|
|
delete_afterwards new_file <|
|
|
new_file.read_bytes.should_equal [4, 5, 6]
|
|
|
|
group_builder.specify "should support .bak logic" <|
|
|
my_file = my_writable_dir / "bak-test/my_file.txt"
|
|
bak_file = my_writable_dir / "bak-test/my_file.txt.bak"
|
|
my_file.exists.should_be_false
|
|
bak_file.exists.should_be_false
|
|
|
|
"version1".write my_file . should_succeed
|
|
delete_on_panic my_file <|
|
|
my_file.read . should_equal "version1"
|
|
bak_file.exists . should_be_false
|
|
|
|
# Backup is the default behavior:
|
|
"version2".write my_file . should_succeed
|
|
|
|
bak_file.exists . should_be_true
|
|
delete_afterwards bak_file <|
|
|
my_file.read . should_equal "version2"
|
|
|
|
# ensure correct format is used for reading the .bak file - Auto_Detect fails because it does not know the `.bak` extension
|
|
bak_file.read ..Plain_Text . should_equal "version1"
|
|
|
|
"version3".write my_file . should_succeed
|
|
|
|
my_file.read . should_equal "version3"
|
|
bak_file.read ..Plain_Text . should_equal "version2"
|
|
|
|
# No new file was created
|
|
parent_dir = my_file.parent
|
|
parent_dir.list . should_equal_ignoring_order [my_file, bak_file]
|
|
|
|
# If the original file is deleted and the backup file remains, the original file should _not_ count as existing (this used to fail).
|
|
my_file.delete
|
|
bak_file.exists . should_be_true
|
|
my_file.exists . should_be_false
|
|
files = my_file.parent.list
|
|
files . should_contain bak_file
|
|
files . should_not_contain my_file
|
|
|
|
group_builder.specify "should fail cleanly if Auto_Detect fails to detect a format" <|
|
|
weird_ext = my_writable_dir / "weird-ext.unknown"
|
|
"Hello".write weird_ext . should_succeed
|
|
|
|
delete_afterwards weird_ext <|
|
|
r = weird_ext.read
|
|
r.should_fail_with File_Error
|
|
r.catch.should_be_a File_Error.Unsupported_Type
|
|
r.catch.to_display_text . should_contain ".unknown"
|
|
|
|
group_builder.specify "should be able to write and then read custom types, like Table as Excel" <|
|
|
file = my_writable_dir / "table.xlsx"
|
|
table = Table.new [["X", [1, 2, 3, 4]], ["Y", ["a", "b", "c", "d"]]]
|
|
table.write file . should_equal file
|
|
delete_afterwards file <|
|
|
workbook1 = file.read
|
|
workbook1.sheet_names . should_equal ['EnsoSheet']
|
|
workbook1.read "EnsoSheet" . should_equal table
|
|
|
|
# And should be able to add another sheet
|
|
table2 = Table.new [["A", [9, 10, 11, 12]], ["B", ["i", "j", "k", "l"]]]
|
|
table2.write file (..Sheet "MySheet2") . should_succeed
|
|
|
|
workbook2 = file.read
|
|
workbook2.sheet_names . should_equal ['EnsoSheet', 'MySheet2']
|
|
workbook2.read "MySheet2" . should_equal table2
|
|
# The old table is kept intact
|
|
workbook2.read "EnsoSheet" . should_equal table
|
|
|
|
group_builder.specify "should be able to write and then read custom types, like Table as CSV" <|
|
|
file = my_writable_dir / "table.csv"
|
|
table = Table.new [["X", [1, 2, 3, 4]], ["Y", ["a", "b", "c", "d"]]]
|
|
table.write file . should_succeed
|
|
delete_afterwards file <|
|
|
file.read . should_equal table
|
|
|
|
# Append works through download and re-upload:
|
|
table2 = Table.new [["X", [5, 6, 7, 8]], ["Y", ["e", "f", "g", "h"]]]
|
|
r = table2.write file on_existing_file=Existing_File_Behavior.Append
|
|
r.should_fail_with S3_Error
|
|
r.catch.to_display_text . should_contain "you may read it, modify and then write the new contents"
|
|
|
|
group_builder.specify "should fail to write a file if permissions are lacking" <|
|
|
new_file = S3_File.new "s3://"+bucket_name+"/examples/no-write-permissions-here.txt" credentials=test_credentials
|
|
r = "Hello".write new_file
|
|
r.should_fail_with S3_Error
|
|
r.catch.to_display_text . should_contain "AccessDenied"
|
|
|
|
group_builder.specify "should fail to open an output stream if Output context is not enabled" <|
|
|
Context.Output.with_disabled <|
|
|
new_file = my_writable_dir / "new_file-ctx.txt"
|
|
new_file.with_output_stream [File_Access.Write] _->Nothing . should_fail_with Forbidden_Operation
|
|
|
|
group_builder.specify "should fail to write if Output context is not enabled" <|
|
|
Context.Output.with_disabled <|
|
|
new_file = my_writable_dir / "new_file-ctx.txt"
|
|
"Hello".write new_file . should_fail_with Forbidden_Operation
|
|
new_file.exists . should_be_false
|
|
|
|
group_builder.specify "should fail to delete a file if the Output context is not enabled" <|
|
|
Context.Output.with_disabled <|
|
|
hello_txt = S3_File.new "s3://"+bucket_name+"/examples/folder 2/hello.txt" credentials=test_credentials
|
|
hello_txt.delete . should_fail_with Forbidden_Operation
|
|
|
|
group_builder.specify "may fail with Not_Found if the file to delete does not exist, even if the Output Context is disabled" <|
|
|
Context.Output.with_disabled <|
|
|
new_file = my_writable_dir / "nonexistent-file.txt"
|
|
r = new_file.delete
|
|
r.should_fail_with File_Error
|
|
r.catch.should_be_a File_Error.Not_Found
|
|
|
|
group_builder.specify "does not raise an exception if the file being `delete_if_exists` did not exist in the first place" <|
|
|
new_file = my_writable_dir / "nonexistent-file.txt"
|
|
new_file.delete_if_exists . should_succeed
|
|
|
|
group_builder.specify "fails if the file being deleted did not exist" <|
|
|
new_file = my_writable_dir / "nonexistent-file2.txt"
|
|
r = new_file.delete
|
|
r.should_fail_with File_Error
|
|
r.catch.should_be_a File_Error.Not_Found
|
|
|
|
Local_File_Spec.add_copy_edge_cases_specs group_builder my_writable_dir file_from_path=(S3_File.new _ test_credentials)
|
|
|
|
suite_builder.group "DataLinks to S3_File" pending=api_pending group_builder->
|
|
group_builder.specify "should be able to read a data link of an S3 File" <| with_default_credentials <|
|
|
# It reads the datalink description and then reads the actual S3 file contents:
|
|
(enso_project.data / "simple.datalink") . read . should_equal "Hello WORLD!"
|
|
|
|
group_builder.specify "should be able to read a data link with custom credentials and secrets" pending=cloud_setup.pending <| cloud_setup.with_prepared_environment <|
|
|
transformed_data_link_file = replace_username_in_data_link (enso_project.data / "credentials-with-secrets.datalink")
|
|
|
|
secret_key_id = Enso_Secret.create "datalink-secret-AWS-keyid" test_credentials.access_key_id
|
|
secret_key_id.should_succeed
|
|
Panic.with_finalizer secret_key_id.delete <|
|
|
secret_key_value = Enso_Secret.create "datalink-secret-AWS-secretkey" test_credentials.secret_access_key
|
|
secret_key_value.should_succeed
|
|
Panic.with_finalizer secret_key_value.delete <| Test.with_retries <|
|
|
transformed_data_link_file.read . should_equal "Hello WORLD!"
|
|
|
|
group_builder.specify "should be able to read a data link with a custom file format set" <| with_default_credentials <|
|
|
r = (enso_project.data / "format-delimited.datalink") . read
|
|
# The datalink itself is configured to interpret the data as a Table delimited by space
|
|
r.should_be_a Table
|
|
r.column_names . should_equal ["Column 1", "Column 2"]
|
|
r.rows.at 0 . to_vector . should_equal ["Hello", "WORLD!"]
|
|
|
|
group_builder.specify "should be able to read a data link stored on S3" <| with_default_credentials <|
|
|
s3_link = my_writable_dir / "my-simple.datalink"
|
|
raw_content = Data_Link.read_raw_config (enso_project.data / "simple.datalink")
|
|
Data_Link.write_raw_config s3_link raw_content replace_existing=True . should_succeed
|
|
Panic.with_finalizer s3_link.delete <|
|
|
s3_link.read . should_equal "Hello WORLD!"
|
|
|
|
group_builder.specify "should be able to read an S3 data link overriding the format" <| with_default_credentials <|
|
|
s3_link = my_writable_dir / "my-simple.datalink"
|
|
raw_content = Data_Link.read_raw_config (enso_project.data / "simple.datalink")
|
|
Data_Link.write_raw_config s3_link raw_content replace_existing=True . should_succeed
|
|
Panic.with_finalizer s3_link.delete <|
|
|
r = s3_link.read (..Delimited " " headers=False)
|
|
r.should_be_a Table
|
|
r.column_names . should_equal ["Column 1", "Column 2"]
|
|
r.rows.at 0 . to_vector . should_equal ["Hello", "WORLD!"]
|
|
|
|
table = Table.new [["X", [1, 2, 3]], ["Y", ["a", "b", "c"]]]
|
|
test_write_table_to_datalink extension check_read_back = group_builder.specify "should be able to write a Table to an S3 data link ("+extension+")" <| with_default_credentials <|
|
|
link_target = my_writable_dir / ("linked-sheet."+extension)
|
|
s3_link = my_writable_dir / ("my-simple-write-"+extension+".datalink")
|
|
Problems.assume_no_problems <| Data_Link.write_config s3_link <|
|
|
JS_Object.from_pairs <|
|
|
[["type", "S3"]]
|
|
+ [["libraryName", "Standard.AWS"]]
|
|
+ [["uri", link_target.path]]
|
|
+ [["auth", JS_Object.from_pairs [["type", "aws_auth"], ["subType", "default"]]]]
|
|
|
|
# The write operation should return the data link file, not the target linked file:
|
|
table.write s3_link . should_equal s3_link
|
|
check_read_back s3_link.read
|
|
|
|
# Deleting the datalink should not delete the linked file:
|
|
s3_link.delete . should_equal Nothing
|
|
s3_link.exists . should_be_false
|
|
link_target.exists . should_be_true
|
|
|
|
test_write_table_to_datalink "xlsx" got_workbook->
|
|
got_workbook.read "EnsoSheet" . should_equal table
|
|
test_write_table_to_datalink "csv" got_table->
|
|
got_table.should_equal table
|
|
test_write_table_to_datalink "json" got_json->
|
|
Table.from got_json . should_equal table
|
|
|
|
main filter=Nothing =
|
|
suite = Test.build suite_builder->
|
|
add_specs suite_builder
|
|
suite.run_with_filter filter
|