S3 Read Access, Input Stream based reading (#7776)

- Added a `FileSystemSPI` allowing protocol resolution to a target type.
- Separated `Input_Stream` and `Output_Stream` from `File` to allow use in other spaces.
- `File_Format` types `read_web` changed to be `read_stream` working with `InputStream`.
- Added directory listing to `Auto_Detect` allowing for `Data.read` to list a folder.
- Adjusted HTTP to return an `InputStream` not a `byte[]`:
- `Response_Body` adjusted to wrap an `InputStream`.
- Added ability to materialize to either and in-memory vector (<4KB) or a temporary file.
- `Data.fetch` will materialize if not a recognized mime-type.
- Added `HTTP_Error` to handle IO exceptions from the stream.
- `Excel_Format` now supports mime-type and reading a stream.
- `Excel_Workbook` can now get a `Excel_Section` using `read_section`.
- Added S3 APIs:
- `parse_uri`: splits an S3 URI into bucket and key.
- `list_objects`: list the items in a S3 bucket with specified prefix.
- `read_bucket`: list prefixes and keys with a delimiter in a S3 bucket with specified prefix.
- `head`: either head_bucket (tests existance) or head_object API (reads object meta data).
- `get_object`: gets an object from S3 returning as a `Response_Body`.
- Added `S3_File` type acting like a `File`:
- No support for writing in this PR.
- **ToDo:** recursive listing, glob filtering, exists, size.
- Fixed a few invalid type signature line.
- Moved `create` methods for `Postgres_Connection` and `SQLite_Connection` into type instead of module.
- Renamed `Column_Fetcher.Builder` to `Column_Fetcher_Builder`.
- Fixed bug with `select_into` in Dry Run mode creating permanent tables.

**ToDo:** Unit tests.
This commit is contained in:
James Dunkerley 2023-09-20 16:09:11 +01:00 committed by GitHub
parent 2e1791b63e
commit 74d1d0861c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
54 changed files with 1285 additions and 572 deletions

View File

@ -574,6 +574,7 @@
- [Added `delete_rows` method to Database Table, changed the
`update_database_table` API into `update_rows`.][7709]
- [Added `Data.post` method to write to HTTP endpoints.][7700]
- [Added support for S3. Using `Input_Stream` more for reading.][7776]
- [Renamed `Decimal` to `Float`.][7807]
[debug-shortcuts]:
@ -814,6 +815,7 @@
[7637]: https://github.com/enso-org/enso/pull/7637
[7700]: https://github.com/enso-org/enso/pull/7700
[7709]: https://github.com/enso-org/enso/pull/7709
[7776]: https://github.com/enso-org/enso/pull/7776
[7807]: https://github.com/enso-org/enso/pull/7807
#### Enso Compiler

View File

@ -33,7 +33,7 @@ ide_ci::define_env_var! {
pub const EDITION_FILE_ARTIFACT_NAME: &str = "Edition File";
pub const LIBRARIES_TO_TEST: [&str; 6] = [
pub const LIBRARIES_TO_TEST: [&str; 7] = [
"Examples_Tests",
"Geo_Tests",
"Image_Tests",
@ -41,6 +41,7 @@ pub const LIBRARIES_TO_TEST: [&str; 6] = [
// "Meta_Test_Suite_Tests",
"Table_Tests",
"Tests",
"AWS_Tests",
"Visualization_Tests",
];

View File

@ -60,7 +60,7 @@ type Redshift_Details
## PRIVATE
Provides the properties for the connection.
jdbc_properties : [Pair Text Text]
jdbc_properties : Vector (Pair Text Text)
jdbc_properties self =
credentials = case self.credentials of
Nothing -> Pgpass.read self.host self.port self.schema

View File

@ -1,10 +1,48 @@
from Standard.Base import all
polyglot java import software.amazon.awssdk.core.exception.SdkClientException
## An error in the core AWS SDK
type AWS_SDK_Error
## PRIVATE
Error message:Text
## PRIVATE
to_display_text : Text
to_display_text self = "AWS SDK Error: " + self.message
## PRIVATE
handle_java_errors : AWS_SDK_Error
handle_java_errors ~action =
Panic.catch SdkClientException action caught_panic->
Error.throw (AWS_SDK_Error.Error caught_panic.payload.getMessage)
## An error accessing S3
type S3_Error
## PRIVATE
Error message:Text code:Text
Error message:(Text|Nothing) code:(Text|Nothing)
## PRIVATE
to_display_text : Text
to_display_text self = "S3 Error: " + self.message + " (" + self.code + ")"
to_display_text self =
msg = if self.message.is_nothing then "S3 Error: Unknown Error" else "S3 Error: " + self.message
code = if self.code.is_nothing then "" else " (" + self.code + ")"
msg + code
## An error when a S3 bucket was not found.
type S3_Bucket_Not_Found
## PRIVATE
Error bucket:Text
## PRIVATE
to_display_text : Text
to_display_text self = "Bucket '" + self.bucket + "' not found."
## A warning that more records are available
type More_Records_Available
## PRIVATE
Warning message:Text
## PRIVATE
to_display_text : Text
to_display_text self = self.message

View File

@ -2,15 +2,20 @@ from Standard.Base import all
import project.AWS_Credential.AWS_Credential
polyglot java import software.amazon.awssdk.auth.credentials.AwsBasicCredentials
polyglot java import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider
polyglot java import software.amazon.awssdk.auth.credentials.DefaultCredentialsProvider
polyglot java import software.amazon.awssdk.auth.credentials.ProfileCredentialsProvider
polyglot java import software.amazon.awssdk.auth.credentials.StaticCredentialsProvider
## PRIVATE
Create the Java credentialsProvider object.
create_provider : (AWS_Credential | Nothing) -> ProfileCredentialsProvider
create_provider : (AWS_Credential | Nothing) -> AwsCredentialsProvider
create_provider (credentials : AWS_Credential | Nothing) = case credentials of
AWS_Credential.Profile p ->
ProfileCredentialsProvider.builder.profileName p . build
AWS_Credential.Key k s ->
ProfileCredentialsProvider.builder.accessKeyId k . secretAccessKey s . build
AWS_Credential.Key key secret ->
credentials = AwsBasicCredentials.create key secret
StaticCredentialsProvider.create credentials
Nothing ->
ProfileCredentialsProvider.builder.build
DefaultCredentialsProvider.builder.build

View File

@ -1,7 +1,9 @@
import project.AWS_Credential.AWS_Credential
import project.Database.Redshift.Redshift_Details.Redshift_Details
import project.S3.S3
import project.S3.S3_File.S3_File
export project.AWS_Credential.AWS_Credential
export project.Database.Redshift.Redshift_Details.Redshift_Details
export project.S3.S3
export project.S3.S3_File.S3_File

View File

@ -1,14 +1,31 @@
from Standard.Base import all
import Standard.Base.Errors.No_Such_Key.No_Such_Key
import Standard.Base.Network.HTTP.Response_Body.Response_Body
import Standard.Base.System.Input_Stream.Input_Stream
import project.AWS_Credential.AWS_Credential
import project.Errors.AWS_SDK_Error
import project.Errors.More_Records_Available
import project.Errors.S3_Bucket_Not_Found
import project.Errors.S3_Error
import project.Internal.Auth
polyglot java import java.io.IOException
polyglot java import software.amazon.awssdk.core.exception.SdkClientException
polyglot java import software.amazon.awssdk.services.s3.model.GetObjectRequest
polyglot java import software.amazon.awssdk.services.s3.model.HeadBucketRequest
polyglot java import software.amazon.awssdk.services.s3.model.HeadObjectRequest
polyglot java import software.amazon.awssdk.services.s3.model.ListObjectsV2Request
polyglot java import software.amazon.awssdk.services.s3.model.NoSuchBucketException
polyglot java import software.amazon.awssdk.services.s3.model.NoSuchKeyException
polyglot java import software.amazon.awssdk.services.s3.model.S3Exception
polyglot java import software.amazon.awssdk.services.s3.S3Client
## GROUP Standard.Base.Input
Gets the list of the S3 bucket names.
## Gets the list of the S3 bucket names.
Arguments:
- credentials: AWS credentials. If not provided, the default credentials will
be used.
@credentials AWS_Credential.default_widget
list_buckets : AWS_Credential | Nothing -> Vector Text ! S3_Error
list_buckets credentials:(AWS_Credential | Nothing)=Nothing = handle_s3_errors <|
@ -16,15 +33,120 @@ list_buckets credentials:(AWS_Credential | Nothing)=Nothing = handle_s3_errors <
buckets = client.listBuckets.buckets
buckets.map b->b.name
## Gets the list of the items inside a bucket.
Arguments:
- bucket: the name of the bucket.
- prefix: the prefix of keys to match.
- max_count: the maximum number of items to return. The default is 1000.
- credentials: AWS credentials. If not provided, the default credentials will
be used.
@credentials AWS_Credential.default_widget
list_objects : Text -> Text -> AWS_Credential | Nothing -> Integer -> Vector Text ! S3_Error
list_objects bucket prefix="" credentials:(AWS_Credential | Nothing)=Nothing max_count=1000 =
read_bucket bucket prefix credentials delimiter="" max_count=max_count . second
## Reads an S3 bucket returning a pair of vectors, one with common prefixes and
one with object keys.
@credentials AWS_Credential.default_widget
read_bucket : Text -> Text -> AWS_Credential | Nothing -> Integer -> Text -> Pair Vector Vector ! S3_Error
read_bucket bucket prefix="" credentials:(AWS_Credential | Nothing)=Nothing delimiter="/" max_count=1000 = handle_s3_errors bucket=bucket <|
client = make_client credentials
per_request = Math.min max_count 1000
request = ListObjectsV2Request.builder.bucket bucket . maxKeys per_request . delimiter delimiter . prefix prefix . build
iterator request count current prefixes first =
response = client.listObjectsV2 request
if response.is_error then response else
## Note the AWS API does not limit the count of common prefixes.
common_prefixes = if first then response.commonPrefixes.map _.prefix else prefixes
result = current + (response.contents.map _.key)
if response.isTruncated.not then Pair.new common_prefixes result else
new_count = count + result.length
if new_count >= max_count then (Warning.attach (More_Records_Available.Warning "Not all keys returned. Additional objects found.") (Pair.new common_prefixes result)) else
new_items = Math.min (Math.max 0 max_count-new_count) 1000
new_request = request.toBuilder.continuationToken response.nextContinuationToken . maxKeys new_items . build
@Tail_Call iterator new_request new_count result common_prefixes False
iterator request 0 [] [] True
## Gets the metadata of a bucket or object.
Arguments:
- bucket: the name of the bucket.
- key: the key of the object.
- credentials: AWS credentials. If not provided, the default credentials will
be used.
head : Text -> Text -> AWS_Credential | Nothing -> Map Text Any ! S3_Error
head bucket key="" credentials:(AWS_Credential | Nothing)=Nothing =
client = make_client credentials
response = case key == "" of
True ->
request = HeadBucketRequest.builder.bucket bucket . build
handle_s3_errors bucket=bucket <| client.headBucket request
False ->
request = HeadObjectRequest.builder.bucket bucket . key key . build
handle_s3_errors bucket=bucket key=key <| client.headObject request
pairs = response.sdkFields.map f-> [f.memberName, f.getValueOrDefault response]
Map.from_vector pairs
## ADVANCED
Gets an object from an S3 bucket.
Returns a raw stream which can be read once.
Arguments:
- bucket: the name of the bucket.
- key: the key of the object.
- credentials: AWS credentials. If not provided, the default credentials will
be used.
get_object : Text -> Text -> AWS_Credential | Nothing -> Any ! S3_Error
get_object bucket key credentials:(AWS_Credential | Nothing)=Nothing = handle_s3_errors bucket=bucket key=key <|
request = GetObjectRequest.builder.bucket bucket . key key . build
client = make_client credentials
response = client.getObject request
inner_response = response.response
mime_type = inner_response.contentType
s3_uri = URI.parse ("s3://" + bucket + "/" + key)
input_stream = Input_Stream.new response (handle_io_errors s3_uri)
Response_Body.Raw_Stream input_stream mime_type s3_uri
## PRIVATE
handle_s3_errors : Function -> Any ! S3_Error
handle_s3_errors ~action =
Panic.catch S3Exception action caught_panic->
aws_error = caught_panic.payload.awsErrorDetails
Error.throw (S3_Error.Error aws_error.errorMessage aws_error.errorCode)
Splits a S3 URI into bucket and key.
parse_uri : Text -> Pair Text Text | Nothing
parse_uri uri =
if uri.starts_with "s3://" . not then Nothing else
no_prefix = uri.drop 5
index_of = no_prefix.index_of "/"
if index_of == 0 then Nothing else
if index_of.is_nothing then Pair.new no_prefix "" else
Pair.new (no_prefix.take index_of) (no_prefix.drop index_of+1)
## PRIVATE
handle_s3_errors : Text -> Text -> Function -> Any ! S3_Error | AWS_SDK_Error
handle_s3_errors ~action bucket="" key="" =
s3_inner_handler caught_panic =
error = if bucket!="" && caught_panic.payload.is_a NoSuchBucketException then S3_Bucket_Not_Found.Error bucket else
if bucket!="" && key!="" && caught_panic.payload.is_a NoSuchKeyException then No_Such_Key.Error bucket key else
aws_error = caught_panic.payload.awsErrorDetails
S3_Error.Error aws_error.errorMessage aws_error.errorCode
Error.throw error
s3_handler = Panic.catch S3Exception handler=s3_inner_handler
AWS_SDK_Error.handle_java_errors <| s3_handler <| action
## PRIVATE
make_client : (AWS_Credential | Nothing) -> S3Client
make_client credentials:(AWS_Credential | Nothing) =
provider = Auth.create_provider credentials
S3Client.builder.credentialsProvider provider . build
## PRIVATE
Utility method for running an action with Java exceptions mapping.
handle_io_errors uri:URI ~action =
Panic.catch IOException action caught_panic->
S3_Error.Error ("An IO error has occurred: " + caught_panic.payload.to_text) uri.to_text

View File

@ -0,0 +1,208 @@
from Standard.Base import all
import Standard.Base.Errors.Common.Syntax_Error
import Standard.Base.Errors.File_Error.File_Error
import Standard.Base.Errors.Unimplemented.Unimplemented
import Standard.Base.System.Input_Stream.Input_Stream
import Standard.Base.System.Output_Stream.Output_Stream
import project.AWS_Credential.AWS_Credential
import project.Errors.S3_Error
import project.S3.S3
## Represents an S3 file or folder
If the path ends with a slash, it is a folder. Otherwise, it is a file.
type S3_File
## PUBLIC
new : Text -> AWS_Credential | Nothing -> S3_File
new uri="s3://" credentials=Nothing =
parts = S3.parse_uri uri
if parts.is_nothing then Error.throw (Syntax_Error.Error "Invalid S3 URI.") else
S3_File.Value parts.first parts.second credentials
## PRIVATE
Value bucket:Text prefix:Text credentials:(AWS_Credential | Nothing)
## Gets the URI of this file
uri : Text
uri self = "s3://" + (if self.bucket=="" then "" else (self.bucket+"/"+self.prefix))
## Checks if the folder or file exists
exists : Boolean
exists self = if self.bucket == "" then True else
if self.prefix == "" then S3.head self.bucket "" self.credentials . is_error . not else
pair = S3.read_bucket self.bucket self.prefix self.credentials max_count=1
pair.second.length > 0
## Checks if this is a folder
is_directory : Boolean
is_directory self = self.prefix=="" || self.prefix.ends_with "/"
## GROUP Metadata
Gets the size of a file in bytes.
size : Integer
size self =
if self.is_directory then Error.throw (S3_Error.Error "size can only be called on files." self.uri) else
head = S3.head self.bucket self.prefix self.credentials
content_length = head.get "ContentLength"
if content_length.is_nothing then Error.throw (S3_Error.Error "ContentLength header is missing." self.uri) else content_length
## PRIVATE
ADVANCED
Creates a new output stream for this file and runs the specified action
on it.
The created stream is automatically closed when `action` returns (even
if it returns exceptionally).
Arguments:
- open_options: A vector of `File_Access` objects determining how to open
the stream. These options set the access properties of the stream.
- action: A function that operates on the output stream and returns some
value. The value is returned from this method.
with_output_stream : Vector File_Access -> (Output_Stream -> Any ! File_Error) -> Any ! File_Error
with_output_stream self open_options action =
_ = [open_options, action]
Unimplemented.throw "Writing to S3 is not currently implemented."
## PRIVATE
ADVANCED
Creates a new input stream for this file and runs the specified action
on it.
Arguments:
- open_options: A vector of `File_Access` objects determining how to open
the stream. These options set the access properties of the stream.
- action: A function that operates on the input stream and returns some
value. The value is returned from this method.
The created stream is automatically closed when `action` returns (even
if it returns exceptionally).
with_input_stream : Vector File_Access -> (Input_Stream -> Any ! File_Error) -> Any ! File_Error
with_input_stream self open_options action =
if (open_options != [File_Access.Read]) then Error.throw (S3_Error.Error "S3 files can only be opened for reading." self.uri) else
response_body = S3.get_object self.bucket self.prefix self.credentials
response_body.with_stream action
## ALIAS load, open
GROUP Input
Read a file using the specified file format
Arguments:
- format: A `File_Format` object used to read file into memory.
If `Auto_Detect` is specified; the provided file determines the specific
type and configures it appropriately. If there is no matching type then
a `File_Error.Unsupported_Type` error is returned.
- on_problems: Specifies the behavior when a problem occurs during the
function.
By default, a warning is issued, but the operation proceeds.
If set to `Report_Error`, the operation fails with a dataflow error.
If set to `Ignore`, the operation proceeds without errors or warnings.
@format File_Format.default_widget
read : File_Format -> Problem_Behavior -> Any ! S3_Error
read self format=Auto_Detect (on_problems=Problem_Behavior.Report_Warning) =
_ = on_problems
case format of
Auto_Detect -> if self.is_directory then format.read self on_problems else
response = S3.get_object self.bucket self.prefix self.credentials
response.decode Auto_Detect
_ -> self.with_input_stream [File_Access.Read] format.read_stream
## ALIAS load bytes, open bytes
Reads all bytes in this file into a byte vector.
read_bytes : Vector ! File_Error
read_bytes self =
self.read Bytes
## ALIAS load text, open text
Reads the whole file into a `Text`, with specified encoding.
Arguments:
- encoding: The text encoding to decode the file with. Defaults to UTF-8.
- on_problems: Specifies the behavior when a problem occurs during the
function.
By default, a warning is issued, but the operation proceeds.
If set to `Report_Error`, the operation fails with a dataflow error.
If set to `Ignore`, the operation proceeds without errors or warnings.
@encoding Encoding.default_widget
read_text : Encoding -> Problem_Behavior -> Text ! File_Error
read_text self (encoding=Encoding.utf_8) (on_problems=Problem_Behavior.Report_Warning) =
self.read (Plain_Text encoding) on_problems
## GROUP Operators
Join two path segments together.
Arguments:
- subpath: The path to join to the path of `self`.
/ : Text -> S3_File
/ self subpath = if self.is_directory.not then Error.throw (S3_Error.Error "Only folders can have children." self.uri) else
trimmed = if subpath.starts_with "/" then subpath.drop (First 1) else subpath
parts = trimmed.split "/"
loop current remaining = if remaining.length == 0 then current else
new_current = case remaining.first of
".." ->
last_index = current.lastIndexOf "/"
if last_index == Nothing then (S3_Error.Error "Cannot move above root folder.") else current.take last_index
"." -> current
x -> new_current + "/" + x
@Tail_Call loop new_current (remaining.drop 1)
initial = if subpath.starts_with "/" then "" else self.prefix
path = loop initial parts
S3_File.Value self.bucket path self.credentials
## GROUP Calculations
Join two or more path segments together, normalizing the `..` and `.` subpaths.
Arguments:
- subpaths: The path segment or segments to join to the path of `self`.
join : (Text | Vector) -> S3_File
join self subpaths = case subpaths of
_ : Vector -> (subpaths.fold self c->p-> c / p)
_ -> self.join [subpaths]
## GROUP Metadata
Returns the name of this file.
name : Text
name self = if self.prefix == "" then self.bucket else
trimmed = if self.prefix.ends_with "/" then self.prefix.drop (Last 1) else self.prefix
last_index = trimmed.lastIndexOf "/"
if last_index == Nothing then trimmed else trimmed.drop (First last_index+1)
## GROUP Metadata
Returns the extension of the file.
extension : Text
extension self = if self.is_directory then Error.throw (S3_Error.Error "Directories do not have extensions." self.uri) else
name = self.name
last_dot = name.locate "." mode=Matching_Mode.Last
if last_dot.is_nothing then "" else
extension = name.drop (Index_Sub_Range.First last_dot.start)
if extension == "." then "" else extension
## GROUP Input
Lists files contained in the directory denoted by this file.
Arguments:
- name_filter: A glob pattern that can be used to filter the returned
files. If it is not specified, all files are returned.
- recursive: Specifies whether the returned list of files should include
also files from the subdirectories. If set to `False` (the default),
only the immediate children of the listed directory are considered.
The `name_filter` can contain the following special characters:
If `recursive` is set to True and a `name_filter` does not contain `**`,
it will be automatically prefixed with `**/` to allow matching files in
subdirectories.
list : Text -> Boolean -> Vector S3_File
list self name_filter:Text="" recursive:Boolean=False =
check_name_filter action = if name_filter != "" then Unimplemented.throw "S3 listing with name filter is not currently implemented." else action
check_recursion action = if recursive then Unimplemented.throw "S3 listing with recursion is not currently implemented." else action
check_directory action = if self.is_directory.not then Error.throw (S3_Error.Error "Only folders can have children." self.uri) else action
check_directory <| check_recursion <| check_name_filter <|
if self.bucket == "" then S3.list_buckets self.credentials . map bucket-> S3_File.Value bucket "" self.credentials else
pair = S3.read_bucket self.bucket self.prefix self.credentials
sub_folders = pair.first . map key-> S3_File.Value self.bucket key self.credentials
files = pair.second . map key-> S3_File.Value self.bucket key self.credentials
sub_folders + files

View File

@ -173,7 +173,9 @@ list_directory directory name_filter=Nothing recursive=False =
Data.fetch URL . body . to_file file
fetch : (URI | Text) -> HTTP_Method -> Vector (Header | Pair Text Text) -> Boolean -> Any
fetch (uri:(URI | Text)) (method:HTTP_Method=HTTP_Method.Get) (headers:(Vector (Header | Pair Text Text))=[]) (try_auto_parse_response:Boolean=True) =
HTTP.fetch uri method headers try_auto_parse_response
response = HTTP.fetch uri method headers
if try_auto_parse_response.not then response.with_materialized_body else
response.decode if_unsupported=response.with_materialized_body
## ALIAS upload, http post
GROUP Input
@ -270,7 +272,9 @@ fetch (uri:(URI | Text)) (method:HTTP_Method=HTTP_Method.Get) (headers:(Vector (
response = Data.post url_post (Request_Body.Form_Data form_data url_encoded=True)
post : (URI | Text) -> Request_Body -> HTTP_Method -> Vector (Header | Pair Text Text) -> Boolean -> Any
post (uri:(URI | Text)) (body:Request_Body=Request_Body.Empty) (method:HTTP_Method=HTTP_Method.Post) (headers:(Vector (Header | Pair Text Text))=[]) (try_auto_parse_response:Boolean=True) =
HTTP.post uri body method headers try_auto_parse_response
response = HTTP.post uri body method headers
if try_auto_parse_response.not then response.with_materialized_body else
response.decode if_unsupported=response.with_materialized_body
## ALIAS download, http get
GROUP Input

View File

@ -5,11 +5,11 @@ import project.Data.Text.Text
import project.Data.Vector.Vector
import project.Meta
import project.Metadata.Display
import project.Metadata.Widget
import project.Nothing.Nothing
from project.Data.Boolean import Boolean, False, True
from project.Data.Text.Extensions import all
from project.Metadata.Choice import Option
from project.Metadata.Widget import Single_Choice
polyglot java import java.util.Locale as JavaLocale
@ -430,8 +430,8 @@ type Locale
## PRIVATE
Gets the default drop down option for this encoding.
default_widget : Single_Choice
default_widget = Single_Choice values=Locale.widget_options display=Display.When_Modified
default_widget : Widget
default_widget = Widget.Single_Choice values=Locale.widget_options display=Display.When_Modified
## PRIVATE
predefined_locale_fields : Vector Text

View File

@ -141,7 +141,7 @@ type Map key value
remove : Any -> Map ! No_Such_Key
remove self key =
Panic.catch Any (self.remove_builtin key) _->
Error.throw No_Such_Key.Error self key
Error.throw (No_Such_Key.Error self key)
## GROUP Selections
Gets the value associated with `key` in this map, or throws a

View File

@ -1,12 +1,11 @@
import project.Data.Text.Extensions
import project.Data.Text.Text
import project.Error.Error
import project.Errors.Common.Arithmetic_Error
import project.Function.Function
import project.Meta
import project.Nothing.Nothing
import project.Panic.Panic
import project.Function.Function
import project.Data.Text.Extensions
from project.Data.Boolean import Boolean, False, True
polyglot java import java.lang.ArithmeticException

View File

@ -35,7 +35,8 @@ polyglot java import java.nio.file.Path
polyglot java import org.enso.base.Http_Utils
type HTTP
## ADVANCED
## PRIVATE
ADVANCED
Create a new instance of the HTTP client.
Arguments:
@ -66,7 +67,6 @@ type HTTP
HTTP.Value timeout follow_redirects proxy version
## PRIVATE
An HTTP client.
Arguments:
@ -77,99 +77,17 @@ type HTTP
Value timeout follow_redirects proxy version
## ADVANCED
Create a request
Sends a request using the HTTP client.
The response body will contain a raw Input_Stream which can be read once.
Please note, this must be closed after use (either directly or via the
helpers on Response_Body).
Arguments:
- req: The HTTP request to send using `self` HTTP client.
- try_auto_parse_response: If true, attempt to auto-parse the result
body.
> Example
Send a Get request with headers.
Note: This example will send a network request.
import Standard.Base.Network.HTTP.HTTP
import Standard.Base.Network.HTTP.HTTP_Method.HTTP_Method
import Standard.Base.Network.HTTP.Request.Request
example_request =
req = Request.new HTTP_Method.Get "http://httpbin.org/get" . with_header "X-Trace-Id" "00000"
res = Examples.http_client.request req
res.body
> Example
Open a connection and send a Post request with form.
Note: This example will send a network request.
import Standard.Base.Network.HTTP.Request.Request
import Standard.Base.Network.HTTP.Request_Body.Request_Body
import Standard.Examples
example_request =
req = Request.post "http://httpbin.org/post" Body.Empty
with_form = req.with_form (Map.from_vector [["key", "value"]])
with_header = with_form.with_header "X-Trace-Id" "123456789"
res = Examples.http_client.request with_header
res.code
> Example
Send a Post request with urlencoded form data.
Note: This example will send a network request.
import Standard.Base.Network.HTTP.Request.Request
import Standard.Base.Network.HTTP.Request_Body.Request_Body
import Standard.Examples
example_request =
form = Map.from_vector [["name", "John Doe"], ["license.txt", enso_project.root / "LICENSE"]]
req = Request.post "http://httpbin.org/post" Request_Body.Empty . with_form form
Examples.http_client.request req
> Example
Send a Post request with form encoded as "multipart/form-data".
Note: This example will send a network request.
import Standard.Base.Network.HTTP.Header.Header
import Standard.Base.Network.HTTP.Request.Request
import Standard.Base.Network.HTTP.Request_Body.Request_Body
import Standard.Examples
example_request =
form = Map.from_vector [["name", "John Doe"], ["license.txt", enso_project.root / "LICENSE"]]
req = Request.post "http://httpbin.org/post" Request_Body.Empty
with_form = req.with_form form
with_headers = with_form.with_headers [Header.multipart_form_data]
Examples.http_client.request with_headers
> Example
Configure HTTP client and send a Post request with form.
Note: This example will send a network request.
import Standard.Base.Data.Time.Duration.Duration
import Standard.Base.Network.HTTP.HTTP
import Standard.Base.Network.HTTP.HTTP_Method.HTTP_Method
import Standard.Base.Network.HTTP.Request.Request
example_request =
form = Map.from_vector [["name", "John Doe"]]
req = Request.new HTTP_Method.Post "http://httpbin.org/post" . with_form form
http = HTTP.new (timeout = (Duration.new seconds=30))
http.request req
> Example
Send a Get request with headers, and auto-parse the result
Note: This example will send a network request.
import Standard.Base.Network.HTTP.HTTP
import Standard.Base.Network.HTTP.HTTP_Method.HTTP_Method
import Standard.Base.Network.HTTP.Request.Request
example_request =
req = Request.new HTTP_Method.Get "http://httpbin.org/get" try_auto_parse_response=True
res = Examples.http_client.request req
res.body
- error_on_failure_code: Whether or not to throw an error if the response
code is not a success code.
request : Request -> Boolean -> Response ! Request_Error
request self req try_auto_parse_response=False =
request self req error_on_failure_code=True =
# Prevent request if the method is a write-like method and output context is disabled.
check_output_context ~action =
if fetch_methods.contains req.method || Context.Output.is_enabled then action else
@ -222,31 +140,27 @@ type HTTP
all_headers.map h-> builder.header h.name h.value
http_request = builder.build
body_handler = HttpResponse.BodyHandlers . ofByteArray
body_handler = HttpResponse.BodyHandlers . ofInputStream
response = Response.Value (self.internal_http_client.send http_request body_handler)
if response.code.is_success.not then Error.throw (Request_Error.Error "Status Code" ("Request failed with status code: " + response.code.to_text + ". " + response.body.decode_as_text)) else
if try_auto_parse_response.not then response else
response.decode if_unsupported=response . catch handler=(_->response)
if error_on_failure_code.not || response.code.is_success then response else
Error.throw (Request_Error.Error "Status Code" ("Request failed with status code: " + response.code.to_text + ". " + response.body.decode_as_text))
## PRIVATE
Static helper for get-like methods
fetch : (URI | Text) -> HTTP_Method -> Vector (Header | Pair Text Text) -> Boolean -> Any
fetch (uri:(URI | Text)) (method:HTTP_Method=HTTP_Method.Get) (headers:(Vector (Header | Pair Text Text))=[]) (try_auto_parse_response:Boolean=True) =
fetch (uri:(URI | Text)) (method:HTTP_Method=HTTP_Method.Get) (headers:(Vector (Header | Pair Text Text))=[]) =
check_method fetch_methods method <|
request = Request.new method uri (parse_headers headers) Request_Body.Empty
HTTP.new.request request try_auto_parse_response
HTTP.new.request request
## PRIVATE
Static helper for post-like methods
post : (URI | Text) -> Request_Body -> HTTP_Method -> Vector (Header | Pair Text Text) -> Boolean -> Any
post (uri:(URI | Text)) (body:Request_Body=Request_Body.Empty) (method:HTTP_Method=HTTP_Method.Post) (headers:(Vector (Header | Pair Text Text))=[]) (try_auto_parse_response:Boolean=True) =
post (uri:(URI | Text)) (body:Request_Body=Request_Body.Empty) (method:HTTP_Method=HTTP_Method.Post) (headers:(Vector (Header | Pair Text Text))=[]) =
check_method post_methods method <|
request = Request.new method uri (parse_headers headers) body
HTTP.new.request request try_auto_parse_response
HTTP.new.request request
## PRIVATE

View File

@ -0,0 +1,26 @@
import project.Any.Any
import project.Data.Text.Text
import project.Network.URI.URI
import project.Panic.Panic
polyglot java import java.io.IOException
## Errors when reading from a web response.
type HTTP_Error
## A generic IO error.
Arguments:
- uri: The uri that couldn't be read.
- message: The message for the error.
IO_Error (uri:URI) (message:Text)
## PRIVATE
Convert the HTTP_Error to a human-readable format.
to_display_text : Text
to_display_text self = self.message + " (" + self.uri.to_text + ")."
## PRIVATE
Utility method for running an action with Java exceptions mapping.
handle_java_exceptions uri:URI ~action =
Panic.catch IOException action caught_panic->
HTTP_Error.IO_Error uri ("An IO error has occurred: " + caught_panic.payload.to_text)

View File

@ -1,23 +1,22 @@
import project.Any.Any
import project.Data.Boolean.Boolean
import project.Data.Json.JS_Object
import project.Data.Numbers.Integer
import project.Data.Numbers.Number
import project.Data.Text.Encoding.Encoding
import project.Data.Text.Text
import project.Data.Vector.Vector
import project.Error.Error
import project.Errors.Illegal_Argument.Illegal_Argument
import project.Meta
import project.Metadata.Display
import project.Metadata.Widget
import project.Network.HTTP.Header.Header
import project.Network.HTTP.HTTP_Status_Code.HTTP_Status_Code
import project.Network.HTTP.Response_Body.Response_Body
import project.Network.HTTP.Response_Body.Unsupported_Content_Type
import project.Network.URI.URI
import project.Nothing.Nothing
import project.System.File_Format.Auto_Detect
import project.System.File_Format.File_Format
from project.Data.Text.Extensions import all
from project.Metadata.Choice import Option
from project.Metadata.Widget import Single_Choice
from project.System.File_Format import Auto_Detect, File_Format, format_types
from project.Network.HTTP.Response_Body import decode_format_selector
polyglot java import org.enso.base.Http_Utils
@ -29,7 +28,22 @@ type Response
Arguments:
- internal_http_response: The internal representation of the HTTP
response.
Value internal_http_response
- body_object: The body of the response. If nothing will be read from the
internal_http_response. Allows for materialization.
Value internal_http_response body_object=Nothing
## PRIVATE
Creates a new Response with the body materialized.
with_materialized_body : Response
with_materialized_body self =
Response.Value self.internal_http_response self.body.materialize
## GROUP Metadata
Get the uri for the response.
uri : URI
uri self =
uri_string = self.internal_http_response.uri.toString
URI.parse uri_string
## GROUP Metadata
Get the response headers.
@ -52,6 +66,16 @@ type Response
content_type_optional = self.internal_http_response.headers.firstValue "Content-Type"
if content_type_optional.isPresent then content_type_optional.get else Nothing
## Get the response content length in bytes.
This method uses the `Content-Length` header, and does not read the body.
If the header is not present will return Nothing.
content_length : Integer | Nothing
content_length self =
content_length_optional = self.internal_http_response.headers.firstValue "Content-Length"
if content_length_optional.isPresent.not then Nothing else
length_text = content_length_optional.get
Integer.parse length_text
## Get the response body.
> Example
@ -62,7 +86,7 @@ type Response
example_body = Examples.get_response.body
body : Response_Body
body self = Response_Body.Value (Vector.from_polyglot_array self.internal_http_response.body)
body self = self.body_object.if_nothing (Response_Body.new self.internal_http_response.body self.content_type self.uri)
## Get the response status code.
@ -84,17 +108,7 @@ type Response
@format decode_format_selector
decode : File_Format -> Any -> Any
decode self format=Auto_Detect ~if_unsupported=(Error.throw (Unsupported_Content_Type.Error self.content_type)) =
case format of
Auto_Detect ->
content_type = self.content_type
format = if content_type.is_nothing then Nothing else
Auto_Detect.get_web_parser content_type (self.internal_http_response.uri.toString)
if format.is_nothing then if_unsupported else
format.read_web self
_ ->
type_obj = Meta.type_of format
if can_decode type_obj then format.read_web self else
Error.throw (Illegal_Argument.Error type_obj.to_text+" cannot be used to decode from the web.")
self.body.decode format if_unsupported
## ALIAS parse as text
GROUP Conversions
@ -117,7 +131,7 @@ type Response
@encoding Encoding.default_widget
decode_as_json : Encoding -> JS_Object | Boolean | Number | Nothing | Text | Vector
decode_as_json self encoding=Encoding.utf_8 =
self.decode_as_text encoding . parse_json
self.body.decode_as_json encoding
## PRIVATE
Convert to a JavaScript Object representing this Response.
@ -134,31 +148,3 @@ type Response
type_pair = ["type", "Response"]
cons_pair = ["constructor", "Value"]
JS_Object.from_pairs [type_pair, cons_pair, ["headers", self.headers], ["body", self.body], ["code", self.code]]
## PRIVATE
type Unsupported_Content_Type
## PRIVATE
A type representing an unsupported content type.
Arguments:
- content_type: The content type that is unsupported.
Error (content_type : Text | Nothing)
## PRIVATE
Convert the error to a human readable string.
to_display_text : Text
to_display_text self = case self.content_type of
Nothing -> "The response did not contain a content type."
_ : Text -> "The content type '" + self.content_type +"' cannot be automatically decoded."
## PRIVATE
can_decode : File_Format -> Boolean
can_decode type = Meta.meta type . methods . contains "read_web"
## PRIVATE
Selector for decoding from the web.
decode_format_selector : Widget
decode_format_selector =
all_types = [Auto_Detect] + (format_types.filter can_decode)
make_name type_obj = type_obj.to_text.replace "_Format" "" . replace "_" " "
Single_Choice display=Display.Always values=(all_types.map n->(Option (make_name n) (File_Format.constructor_code n)))

View File

@ -1,31 +1,146 @@
import project.Data.Boolean.Boolean
import project.Any.Any
import project.Data.Json.JS_Object
import project.Data.Json.Json
import project.Data.Numbers.Number
import project.Data.Text.Encoding.Encoding
import project.Data.Text.Text
import project.Data.Vector.Vector
import project.Error.Error
import project.Errors.Illegal_Argument.Illegal_Argument
import project.IO
import project.Meta
import project.Metadata.Display
import project.Metadata.Widget
import project.Network.HTTP.HTTP_Error.HTTP_Error
import project.Network.URI.URI
import project.Nothing.Nothing
import project.Runtime.Context
import project.Runtime.Managed_Resource.Managed_Resource
import project.System.File.Existing_File_Behavior.Existing_File_Behavior
import project.System.File.File
import project.System.File.File_Access.File_Access
import project.System.File.Write_Extensions
import project.System.File_Format.Auto_Detect
import project.System.File_Format.Bytes
import project.System.File_Format.File_Format
import project.System.File_Format.Plain_Text_Format
import project.System.Input_Stream.Input_Stream
from project.Data.Boolean import Boolean, False, True
from project.Data.Text.Extensions import all
from project.Metadata.Choice import Option
from project.Metadata.Widget import Single_Choice
from project.System.File_Format import format_types
polyglot java import java.io.ByteArrayInputStream
polyglot java import java.io.InputStream
## PRIVATE
How large a response body can be before it is written to a temporary file.
maximum_body_in_memory = 4192
## PRIVATE
type Response_Body
## PRIVATE
Response body
## Create a Response_Body.
Arguments:
- bytes: The body of the response as binary data.
Value bytes
- stream: The body of the response as an InputStream.
- content_type: The content type of the response.
- uri: The URI of the response.
new : InputStream -> Text -> URI -> Response_Body
new stream content_type uri =
input_stream = Input_Stream.new stream (HTTP_Error.handle_java_exceptions uri)
Response_Body.Raw_Stream input_stream content_type uri
## PRIVATE
For internal use only. Should be materialised before returning to an end
user.
Raw_Stream (raw_stream:Input_Stream) (content_type:Text|Nothing) uri:URI
## PRIVATE
Byte_Array (bytes:Vector) (content_type:Text|Nothing) uri:URI
## PRIVATE
Temporary_File (file_resource:Managed_Resource) (content_type:Text|Nothing) uri:URI
## PRIVATE
with_stream : (Input_Stream -> Any ! HTTP_Error) -> Any ! HTTP_Error
with_stream self action = case self of
Response_Body.Raw_Stream raw_stream _ _ ->
Managed_Resource.bracket raw_stream (_.close) action
Response_Body.Byte_Array bytes _ _ ->
byte_stream = Input_Stream.new (ByteArrayInputStream.new bytes) (HTTP_Error.handle_java_exceptions self.uri)
Managed_Resource.bracket byte_stream (_.close) action
Response_Body.Temporary_File file_resource _ _ -> file_resource.with file->
opts = [File_Access.Read.to_java]
stream = HTTP_Error.handle_java_exceptions self.uri (file.input_stream_builtin opts)
file_stream = Input_Stream.new stream (HTTP_Error.handle_java_exceptions self.uri)
Managed_Resource.bracket (file_stream) (_.close) action
## PRIVATE
ADVANCED
Materializes the stream into either a byte array or a temporary file and
return a new Response_Body.
materialize : Input_Stream
materialize self = case self of
Response_Body.Raw_Stream _ _ _ ->
self.with_stream body_stream->
body_stream.with_java_stream body_java_stream->
first_block = body_java_stream.readNBytes maximum_body_in_memory
case first_block.length < maximum_body_in_memory of
True -> Response_Body.Byte_Array (Vector.from_polyglot_array first_block) self.content_type self.uri
False ->
file = File.create_temporary_file self.uri.host
## Write contents to temporary file
Context.Output.with_enabled <|
file.with_output_stream [File_Access.Write, File_Access.Create, File_Access.Truncate_Existing] output_stream->
output_stream.with_java_stream java_output_stream->
java_output_stream.write first_block
body_java_stream.transferTo java_output_stream
java_output_stream.flush
Nothing
output_stream.close
## Have a file with the correct set up
resource = Managed_Resource.register file delete_file
Response_Body.Temporary_File resource self.content_type self.uri
_ -> self
## ALIAS parse
GROUP Conversions
Uses the format to decode the body.
If using `Auto_Detect`, the content-type will be used to determine the
format.
Arguments:
- format: The format to use to decode the body.
- if_unsupported: Specifies how to proceed if the format is not supported.
@format decode_format_selector
decode : File_Format -> Any -> Any
decode self format=Auto_Detect ~if_unsupported=(Error.throw (Unsupported_Content_Type.Error self.content_type)) =
case format of
Auto_Detect ->
content_type = self.content_type
content_type_format = if content_type.is_nothing then Nothing else
Auto_Detect.get_web_parser content_type (self.uri.to_text)
if content_type_format.is_nothing then if_unsupported else self.decode content_type_format
_ ->
type_obj = Meta.type_of format
if can_decode type_obj . not then Error.throw (Illegal_Argument.Error type_obj.to_text+" cannot be used to decode from a stream. It must be saved to a file first.") else
self.with_stream format.read_stream
## ALIAS bytes
GROUP Input
Read the body as a byte array.
decode_as_bytes : Vector
decode_as_bytes self = self.decode Bytes
## ALIAS parse as text
GROUP Conversions
Decodes the body to a Text value.
@encoding Encoding.default_widget
decode_as_text : Encoding -> Text
decode_as_text self encoding=Encoding.utf_8 =
Text.from_bytes self.bytes encoding
decode_as_text self encoding=Encoding.utf_8 = self.decode (Plain_Text_Format.Plain_Text encoding)
## ALIAS parse as json, parse json
GROUP Conversions
@ -45,13 +160,14 @@ type Response_Body
## PRIVATE
Convert response body to Text.
to_text : Text
to_text self = "Response_Body [" + self.bytes.length.to_text + " bytes]"
to_text self = "Response_Body"
## GROUP Output
Write response body to a File.
Arguments:
- file: The file to write the bytes to.
- file: The file to write the body to.
- on_existing_file: Specifies how to proceed if the file already exists.
> Examples
Write the contents of the request body to a scratch file on disk. The
@ -61,8 +177,47 @@ type Response_Body
import Standard.Examples
example_to_file =
Examples.get_geo_data.body.to_file Examples.scratch_file
to_file : File -> File
to_file self file =
self.bytes.write_bytes file
file
Examples.get_geo_data.to_file Examples.scratch_file
to_file : File | Text -> Existing_File_Behavior -> File
to_file self file on_existing_file=Existing_File_Behavior.Backup =
self.with_stream body_stream->
f = File.new file
r = on_existing_file.write f output_stream->
output_stream.write_stream body_stream
r.if_not_error file
## PRIVATE
type Unsupported_Content_Type
## PRIVATE
A type representing an unsupported content type.
Arguments:
- content_type: The content type that is unsupported.
Error (content_type : Text | Nothing)
## PRIVATE
Convert the error to a human readable string.
to_display_text : Text
to_display_text self = case self.content_type of
Nothing -> "The response did not contain a content type."
_ : Text -> "The content type '" + self.content_type +"' cannot be automatically decoded."
## PRIVATE
can_decode : File_Format -> Boolean
can_decode type = Meta.meta type . methods . contains "read_stream"
## PRIVATE
Selector for decoding from the web.
decode_format_selector : Widget
decode_format_selector =
all_types = [Auto_Detect] + (format_types.filter can_decode)
make_name type_obj = type_obj.to_text.replace "_Format" "" . replace "_" " "
Single_Choice display=Display.Always values=(all_types.map n->(Option (make_name n) (File_Format.constructor_code n)))
## PRIVATE
Utility method for closing primitive Java streams. Provided to avoid
accidental scope capture with `Managed_Resource` finalizers.
delete_file : Any -> Nothing
delete_file file =
Context.Output.with_enabled <| file.delete
Nothing

View File

@ -12,7 +12,7 @@ import project.Data.Vector.Vector
import project.Error.Error
import project.Errors.Common.Dry_Run_Operation
import project.Errors.Common.Forbidden_Operation
import project.Errors.Encoding_Error.Encoding_Error
import project.Errors.Common.Type_Error
import project.Errors.File_Error.File_Error
import project.Errors.Illegal_Argument.Illegal_Argument
import project.Errors.Problem_Behavior.Problem_Behavior
@ -25,6 +25,8 @@ import project.Runtime.Context
import project.Runtime.Managed_Resource.Managed_Resource
import project.System.File.File_Access.File_Access
import project.System.File.File_Permissions.File_Permissions
import project.System.Input_Stream.Input_Stream
import project.System.Output_Stream.Output_Stream
import project.Warning.Warning
from project.Data.Boolean import Boolean, False, True
from project.Metadata.Choice import Option
@ -39,9 +41,11 @@ polyglot java import java.nio.file.StandardCopyOption
polyglot java import java.nio.file.StandardOpenOption
polyglot java import java.time.ZonedDateTime
polyglot java import org.enso.base.DryRunFileManager
polyglot java import org.enso.base.encoding.ReportingStreamDecoder
polyglot java import org.enso.base.encoding.ReportingStreamEncoder
polyglot java import org.enso.base.Encoding_Utils
polyglot java import org.enso.base.file_system.FileSystemSPI
## PRIVATE
file_types : Vector
file_types = Vector.from_polyglot_array (FileSystemSPI.get_types False)
## Represents a file or folder on the filesystem.
@Builtin_Type
@ -62,11 +66,19 @@ type File
import Standard.Examples
example_new = File.new Examples.csv_path
new : (Text | File) -> File
new (path : Text | File) =
case path of
_ : Text -> get_file path
new : (Text | File) -> Any
new path = case path of
_ : Text -> if path.contains "://" . not then get_file path else
protocol = path.split "://" . first
file_system = FileSystemSPI.get_type protocol False
if file_system.is_nothing then Error.throw (Illegal_Argument.Error "Unsupported protocol "+protocol) else
file_system.new path
_ : File -> path
_ ->
## Check to see if a valid "File" type.
if (file_types.any file_type-> path.is_a file_type) then path else
Error.throw (Type_Error.Error File path "path")
## Creates a temporary file which will be deleted when Enso exits.
create_temporary_file : Text -> Text -> File
@ -125,15 +137,15 @@ type File
Creates a new output stream for this file and runs the specified action
on it.
The created stream is automatically closed when `action` returns (even
if it returns exceptionally).
Arguments:
- open_options: A vector of `File_Access` objects determining how to open
the stream. These options set the access properties of the stream.
- action: A function that operates on the output stream and returns some
value. The value is returned from this method.
The created stream is automatically closed when `action` returns (even
if it returns exceptionally).
> Example
Perform an action on an output stream with the file open for writing.
@ -149,21 +161,16 @@ type File
new_output_stream : File -> Vector File_Access -> Output_Stream ! File_Error
new_output_stream file open_options =
opts = open_options . map (_.to_java)
stream = File_Error.handle_java_exceptions file <|
file.output_stream_builtin opts
## We re-wrap the File Not Found error to return the parent directory
instead of the file itself - because the file that is being written
may not exist and it will not be an error, it is the parent directory
that does not exist is what prevents the write operation from
succeeding.
## Until #5792 properly fixes catch, we cannot catch
`File_Error.Not_Found` specifically, so instead we catch all
`File_Error`s and match the needed one.
stream_2 = stream.catch File_Error error-> case error of
stream = File_Error.handle_java_exceptions file (file.output_stream_builtin opts)
## Re-wrap the File Not Found error to return the parent directory
instead of the file itself, as that is the issue if not present.
Until #5792 fixes catching `File_Error.Not_Found` specifically,
so instead we catch all `File_Error`s and match the needed one.
wrapped = stream.catch File_Error error-> case error of
File_Error.Not_Found file_path -> Error.throw (File_Error.Not_Found file_path.parent)
_ -> stream
resource = Managed_Resource.register stream_2 close_stream
Output_Stream.Value file resource
Output_Stream.new wrapped (File_Error.handle_java_exceptions self)
if Context.Output.is_enabled.not then Error.throw (Forbidden_Operation.Error "File writing is forbidden as the Output context is disabled.") else
Managed_Resource.bracket (new_output_stream self open_options) (_.close) action
@ -175,7 +182,7 @@ type File
Arguments:
- options: A vector of `File_Access` objects determining how to open
the stream. These options set the access properties of the stream.
output_stream_builtin : Vector File_Access -> Output_Stream
output_stream_builtin : Vector File_Access -> Java_Output_Stream
output_stream_builtin self options = @Builtin_Method "File.output_stream_builtin"
## PRIVATE
@ -186,8 +193,8 @@ type File
- open_options: A vector of `StandardOpenOption` polyglot objects
determining how to open the stream. These options set the access
properties of the stream.
input_stream : Vector StandardOpenOption -> Input_Stream
input_stream self options = @Builtin_Method "File.input_stream"
input_stream_builtin : Vector StandardOpenOption -> Java_Input_Stream
input_stream_builtin self options = @Builtin_Method "File.input_stream_builtin"
## PRIVATE
ADVANCED
@ -215,7 +222,13 @@ type File
file.with_input_stream [File_Access.Create, File_Access.Read] action
with_input_stream : Vector File_Access -> (Input_Stream -> Any ! File_Error) -> Any ! File_Error
with_input_stream self open_options action =
Managed_Resource.bracket (self.new_input_stream open_options) (_.close) action
if self.is_directory then Error.throw (File_Error.IO_Error self "File '"+self.path+"' is a directory") else
new_input_stream : File -> Vector File_Access -> Output_Stream ! File_Error
new_input_stream file open_options =
opts = open_options . map (_.to_java)
stream = File_Error.handle_java_exceptions file (file.input_stream_builtin opts)
Input_Stream.new stream (File_Error.handle_java_exceptions self)
Managed_Resource.bracket (new_input_stream self open_options) (_.close) action
## ALIAS load, open
GROUP Input
@ -288,8 +301,7 @@ type File
@encoding Encoding.default_widget
read_text : Encoding -> Problem_Behavior -> Text ! File_Error
read_text self (encoding=Encoding.utf_8) (on_problems=Problem_Behavior.Report_Warning) =
file = File.new self
bytes = file.read_bytes
bytes = self.read_bytes
Text.from_bytes bytes encoding on_problems
## GROUP Operators
@ -640,24 +652,6 @@ type File
delete_if_exists : Nothing ! File_Error
delete_if_exists self = if self.exists then self.delete else Nothing
## PRIVATE
ADVANCED
Returns a new input stream for this file.
Arguments:
- open_options: A vector of `File_Access` objects determining how to open
the stream. These options set the access properties of the stream.
The returned stream should be closed as soon as it is not used anymore.
The `with_input_stream` method should be preferred whenever possible.
new_input_stream : Vector File_Access -> Input_Stream ! File_Error
new_input_stream self open_options =
if self.is_directory then Error.throw (File_Error.IO_Error self "File '"+self.path+"' is a directory") else
opts = open_options . map (_.to_java)
stream = File_Error.handle_java_exceptions self (self.input_stream opts)
resource = Managed_Resource.register stream close_stream
Input_Stream.Value self resource
## PRIVATE
Reads first `n` bytes from the file (or less if the file is too small)
and returns a vector of bytes.
@ -764,241 +758,6 @@ type File
to_display_text : Text
to_display_text self = self.to_text
## PRIVATE
An output stream, allowing for interactive writing of contents into an
open file.
type Output_Stream
## PRIVATE
An output stream, allowing for interactive writing of contents into an
open file.
Arguments:
- file: The file which the output stream will write into.
- stream_resource: The internal resource that represents the underlying
stream.
Value file stream_resource
## PRIVATE
ADVANCED
Writes a vector of bytes into the file at the current stream position.
Arguments:
- contents: A vector of bytes to write into the file.
> Example
Write some bytes through a stream.
import Standard.Base.System.File.File_Access.File_Access
import Standard.Examples
example_write_bytes =
file = Examples.scratch_file
file.with_output_stream [File_Access.Create, File_Access.Write] out_stream->
out_stream.write_bytes "hello".utf_8
out_stream.close
write_bytes : Vector File_Access -> Nothing ! File_Error
write_bytes self contents = self.stream_resource . with java_stream->
File_Error.handle_java_exceptions self.file <|
java_stream.write contents
java_stream.flush
Nothing
## PRIVATE
ADVANCED
Closes this stream.
Even though Streams are closed automatically upon garbage collection, it
is still advised to close streams manually if they are not used within
a bracket pattern.
> Example
Open and close a stream.
import Standard.Base.System.File.File_Access.File_Access
import Standard.Examples
example_write_bytes =
file = Examples.scratch_file
file.with_output_stream [File_Access.Create] out_stream->
out_stream.close
close : Nothing
close self = self.stream_resource . finalize
## PRIVATE
ADVANCED
Exposes operations on the underlying Java output stream.
Arguments:
- f: Applies a function over the internal java stream.
Useful when integrating with polyglot functions requiring an
`OutputStream` as an argument.
with_java_stream : (Java_Output_Stream -> Any) -> Any
with_java_stream self f = self.stream_resource . with f
## PRIVATE
ADVANCED
Runs an action with a `ReportingStreamEncoder` encoding data to the
output stream with the specified encoding.
with_stream_encoder : Encoding -> Problem_Behavior -> (ReportingStreamEncoder -> Any) -> Any
with_stream_encoder self encoding on_problems action = self.with_java_stream java_stream->
## We ignore any warnings raised by the `bytes` method, because if the
original Unicode replacement character failed to encode, the `bytes`
method will have replaced it with the simple `?` sign which should be
available in all encodings. And this is exactly the behavior we want:
if available, we use the `<60>` character and otherwise we fallback to
the `?` character.
replacement_sequence = Encoding_Utils.INVALID_CHARACTER.bytes encoding on_problems=Problem_Behavior.Ignore
java_charset = encoding.to_java_charset
results = Encoding_Utils.with_stream_encoder java_stream java_charset replacement_sequence action
problems = Vector.from_polyglot_array results.problems . map Encoding_Error.Error
on_problems.attach_problems_after results.result problems
## PRIVATE
An input stream, allowing for interactive reading of contents from an open
file.
type Input_Stream
## PRIVATE
An input stream, allowing for interactive reading of contents from an open
file.
Arguments:
- file: The file from which the stream will read.
- stream_resource: The internal resource that represents the underlying
stream.
Value file stream_resource
## PRIVATE
ADVANCED
Reads all the bytes in this file into a vector of bytes.
> Example
Read all of the bytes from a file using a stream.
import Standard.Base.System.File.File_Access.File_Access
import Standard.Examples
example_read_all =
file = Examples.csv
in_stream = file.new_input_stream [File_Access.Read]
bytes = in_stream.read_all_bytes
in_stream.close
bytes
read_all_bytes : Vector ! File_Error
read_all_bytes self = self.stream_resource . with java_stream->
File_Error.handle_java_exceptions self.file <|
Vector.from_polyglot_array java_stream.readAllBytes
## PRIVATE
ADVANCED
Reads _up to_ the provided number of bytes from the stream.
Arguments:
- n: The number of bytes to read from the file.
Makes a best-effort to read as many bytes as provided, however fewer
bytes may be read, if end of stream is encountered.
The length of the returned vector is the same as the number of bytes
read.
> Example
Read 10 of the bytes from a file using a stream.
import Standard.Base.System.File.File_Access.File_Access
import Standard.Examples
example_read_all =
file = Examples.csv
in_stream = file.new_input_stream [File_Access.Read]
bytes = in_stream.read_n_bytes 10
in_stream.close
bytes
read_n_bytes : Integer -> Vector ! File_Error
read_n_bytes self n = self.stream_resource . with java_stream->
File_Error.handle_java_exceptions self.file <|
bytes = java_stream.readNBytes n
Vector.from_polyglot_array bytes
## PRIVATE
ADVANCED
Reads the next byte from the stream.
The returned value is an integer in the range 0-255 representing the
next byte of input, or -1 if end of stream is reached.
> Example
Read byte from a file using a stream.
import Standard.Base.System.File.File_Access.File_Access
import Standard.Examples
example_read_all =
file = Examples.csv
in_stream = file.new_input_stream [File_Access.Read]
bytes = in_stream.read_byte
in_stream.close
bytes
read_byte : Integer ! File_Error
read_byte self = self.stream_resource . with java_stream->
File_Error.handle_java_exceptions self.file <|
java_stream.read
## PRIVATE
ADVANCED
Closes this stream.
Even though Streams are closed automatically upon garbage collection, it
is still advised to close streams manually if they are not used within
a bracket pattern.
> Example
Open and close a stream.
import Standard.Base.System.File.File_Access.File_Access
import Standard.Examples
example_read_all =
file = Examples.csv
in_stream = file.new_input_stream [File_Access.Read]
in_stream.close
close : Nothing
close self = self.stream_resource . finalize
## PRIVATE
Exposes operations on the underlying Java input stream.
Arguments:
- f: Applies a function over the internal java stream.
Useful when integrating with polyglot functions requiring an
`InputStream` as an argument.
with_java_stream : (Java_Input_Stream -> Any) -> Any
with_java_stream self f = self.stream_resource . with f
## PRIVATE
Runs an action with a `ReportingStreamDecoder` decoding data from the
input stream with the specified encoding.
with_stream_decoder : Encoding -> Problem_Behavior -> (ReportingStreamDecoder -> Any) -> Any
with_stream_decoder self encoding on_problems action = self.stream_resource . with java_stream->
java_charset = encoding.to_java_charset
results = Encoding_Utils.with_stream_decoder java_stream java_charset action
problems = Vector.from_polyglot_array results.problems . map Encoding_Error.Error
on_problems.attach_problems_after results.result problems
## PRIVATE
Utility method for closing primitive Java streams. Provided to avoid
accidental scope capture with `Managed_Resource` finalizers.
close_stream : Any -> Nothing
close_stream stream =
stream.close
Nothing
## PRIVATE
Utility function that returns all descendants of the provided file, including

View File

@ -7,7 +7,7 @@ import project.Panic.Panic
import project.Runtime.Context
import project.System.File.File
import project.System.File.File_Access.File_Access
import project.System.File.Output_Stream
import project.System.Output_Stream.Output_Stream
from project.Data.Boolean import Boolean, False, True
## Specifies the behavior of a write operation when the destination file

View File

@ -15,6 +15,7 @@ import project.Network.HTTP.Response.Response
import project.Network.URI.URI
import project.Nothing.Nothing
import project.System.File.File
import project.System.Input_Stream.Input_Stream
from project.Data.Boolean import Boolean, False, True
from project.Data.Json import Invalid_JSON
from project.Data.Text.Extensions import all
@ -44,7 +45,7 @@ type Auto_Detect
## PRIVATE
Implements the `File.read` for this `File_Format`
read : File -> Problem_Behavior -> Any ! File_Error
read self file on_problems =
read self file on_problems = if file.is_directory then file.list else
reader = Auto_Detect.get_reading_format file
if reader == Nothing then Error.throw (File_Error.Unsupported_Type file) else
reader.read file on_problems
@ -155,9 +156,9 @@ type Plain_Text_Format
## PRIVATE
Implements the `Data.parse` for this `File_Format`
read_web : Response -> Any
read_web self response =
Text.from_bytes response.body.bytes self.encoding
read_stream : Input_Stream -> Any
read_stream self stream:Input_Stream =
Text.from_bytes (stream.read_all_bytes) self.encoding
## A file format for reading or writing files as a sequence of bytes.
type Bytes
@ -189,6 +190,12 @@ type Bytes
_ = [on_problems]
file.read_bytes
## PRIVATE
Implements the `Data.parse` for this `File_Format`
read_stream : Input_Stream -> Any
read_stream self stream:Input_Stream =
stream.read_all_bytes
## A file format for reading and writing files as JSON.
type JSON_Format
## PRIVATE
@ -226,9 +233,9 @@ type JSON_Format
## PRIVATE
Implements the `Data.parse` for this `File_Format`
read_web : Response -> Any
read_web self response =
response.body.decode_as_json
read_stream : Input_Stream -> Any
read_stream self stream:Input_Stream =
Text.from_bytes (stream.read_all_bytes) Encoding.utf_8 . parse_json
## A setting to infer the default behaviour of some option.
type Infer

View File

@ -0,0 +1,109 @@
import project.Any.Any
import project.Data.Numbers.Integer
import project.Data.Text.Encoding.Encoding
import project.Data.Vector.Vector
import project.Errors.Encoding_Error.Encoding_Error
import project.Errors.Problem_Behavior.Problem_Behavior
import project.Nothing.Nothing
import project.Runtime.Managed_Resource.Managed_Resource
polyglot java import java.io.InputStream as Java_Input_Stream
polyglot java import org.enso.base.encoding.ReportingStreamDecoder
polyglot java import org.enso.base.Encoding_Utils
## PRIVATE
An input stream, allowing for interactive reading of contents.
type Input_Stream
## PRIVATE
ADVANCED
Given a Java InputStream, wraps as a Managed_Resource and returns a new
Input_Stream.
new : Java_Input_Stream -> Any -> Input_Stream
new java_stream error_handler =
resource = Managed_Resource.register java_stream close_stream
Input_Stream.Value resource error_handler
## PRIVATE
An input stream, allowing for interactive reading of contents.
Arguments:
- stream_resource: The internal resource that represents the underlying
stream.
- error_handler: An error handler for IOExceptions thrown when reading.
Value stream_resource error_handler
## PRIVATE
ADVANCED
Reads all the bytes in this stream into a vector of bytes.
read_all_bytes : Vector
read_all_bytes self = self.stream_resource . with java_stream->
self.error_handler <| Vector.from_polyglot_array java_stream.readAllBytes
## PRIVATE
ADVANCED
Reads up to the provided number of bytes from the stream.
Makes a best-effort to read as many bytes as provided, however fewer
bytes may be read, if end of stream is encountered.
The length of the returned vector is the same as the number of bytes
read.
Arguments:
- n: The number of bytes to read from the stream.
read_n_bytes : Integer -> Vector
read_n_bytes self n = self.stream_resource . with java_stream->
self.error_handler <|
bytes = java_stream.readNBytes n
Vector.from_polyglot_array bytes
## PRIVATE
ADVANCED
Reads the next byte from the stream.
The returned value is an integer in the range 0-255 representing the
next byte of input, or -1 if end of stream is reached.
read_byte : Integer
read_byte self = self.stream_resource . with java_stream->
self.error_handler <|
java_stream.read
## PRIVATE
ADVANCED
Closes this stream.
Even though Streams are closed automatically upon garbage collection, it
is still advised to close streams manually if they are not used within
a bracket pattern.
close : Nothing
close self = self.stream_resource . finalize
## PRIVATE
Exposes operations on the underlying Java input stream.
Useful when integrating with polyglot functions requiring an
`InputStream` as an argument.
Arguments:
- f: Applies a function over the internal java stream.
with_java_stream : (Java_Input_Stream -> Any) -> Any
with_java_stream self f = self.stream_resource . with f
## PRIVATE
Runs an action with a `ReportingStreamDecoder` decoding data from the
input stream with the specified encoding.
with_stream_decoder : Encoding -> Problem_Behavior -> (ReportingStreamDecoder -> Any) -> Any
with_stream_decoder self encoding on_problems action = self.stream_resource . with java_stream->
java_charset = encoding.to_java_charset
results = Encoding_Utils.with_stream_decoder java_stream java_charset action
problems = Vector.from_polyglot_array results.problems . map Encoding_Error.Error
on_problems.attach_problems_after results.result problems
## PRIVATE
Utility method for closing primitive Java streams. Provided to avoid
accidental scope capture with `Managed_Resource` finalizers.
close_stream : Any -> Nothing
close_stream stream =
stream.close
Nothing

View File

@ -0,0 +1,104 @@
import project.Any.Any
import project.Data.Numbers.Integer
import project.Data.Text.Encoding.Encoding
import project.Data.Text.Extensions
import project.Data.Vector.Vector
import project.Errors.Encoding_Error.Encoding_Error
import project.Errors.Problem_Behavior.Problem_Behavior
import project.Nothing.Nothing
import project.Runtime.Managed_Resource.Managed_Resource
import project.System.Input_Stream.Input_Stream
from project.System.Input_Stream import close_stream
polyglot java import java.io.OutputStream as Java_Output_Stream
polyglot java import org.enso.base.encoding.ReportingStreamEncoder
polyglot java import org.enso.base.Encoding_Utils
## PRIVATE
An output stream, allowing for interactive writing of contents.
type Output_Stream
## PRIVATE
ADVANCED
Given a Java OutputStream, wraps as a Managed_Resource and returns a new
Output_Stream.
new : Java_Output_Stream -> Any -> Output_Stream
new java_stream error_handler =
resource = Managed_Resource.register java_stream close_stream
Output_Stream.Value resource error_handler
## PRIVATE
An output stream, allowing for interactive writing of contents.
Arguments:
- stream_resource: The internal resource that represents the underlying
stream.
- error_handler: An error handler for IOExceptions thrown when writing.
Value stream_resource error_handler
## PRIVATE
ADVANCED
Writes a vector of bytes in this stream at the current position.
Arguments:
- contents: A vector of bytes to write.
write_bytes : Vector Integer -> Nothing
write_bytes self contents = self.stream_resource . with java_stream->
self.error_handler <|
java_stream.write contents
java_stream.flush
Nothing
## PRIVATE
ADVANCED
Writes an Input_Stream into this stream at the current position.
Arguments:
- contents: An Input_Stream to write to this stream.
write_stream : Input_Stream -> Nothing
write_stream self contents = self.stream_resource . with java_stream->
self.error_handler <|
contents.with_java_stream contents_stream->
contents_stream.transferTo java_stream
java_stream.flush
Nothing
## PRIVATE
ADVANCED
Closes this stream.
Even though Streams are closed automatically upon garbage collection, it
is still advised to close streams manually if they are not used within
a bracket pattern.
close : Nothing
close self = self.stream_resource . finalize
## PRIVATE
ADVANCED
Exposes operations on the underlying Java output stream.
Useful when integrating with polyglot functions requiring an
`OutputStream` as an argument.
Arguments:
- f: Applies a function over the internal java stream.
with_java_stream : (Java_Output_Stream -> Any) -> Any
with_java_stream self f = self.stream_resource . with f
## PRIVATE
ADVANCED
Runs an action with a `ReportingStreamEncoder` encoding data to the
output stream with the specified encoding.
with_stream_encoder : Encoding -> Problem_Behavior -> (ReportingStreamEncoder -> Any) -> Any
with_stream_encoder self encoding on_problems action = self.with_java_stream java_stream->
## We ignore any warnings raised by the `bytes` method, because if the
original Unicode replacement character failed to encode, the `bytes`
method will have replaced it with the simple `?` sign which should be
available in all encodings. And this is exactly the behavior we want:
if available, we use the `<60>` character and otherwise we fallback to
the `?` character.
replacement_sequence = Encoding_Utils.INVALID_CHARACTER.bytes encoding on_problems=Problem_Behavior.Ignore
java_charset = encoding.to_java_charset
results = Encoding_Utils.with_stream_encoder java_stream java_charset replacement_sequence action
problems = Vector.from_polyglot_array results.problems . map Encoding_Error.Error
on_problems.attach_problems_after results.result problems

View File

@ -128,7 +128,7 @@ type Connection
## PRIVATE
Gets a list of the table types
table_types : [Text]
table_types : Vector Text
table_types self =
self.jdbc_connection.with_metadata metadata->
read_column metadata.getTableTypes "TABLE_TYPE"
@ -355,6 +355,7 @@ type Connection
if Execution_Context.Output.is_enabled.not then Error.throw (Forbidden_Operation.Error "Executing update queries is forbidden as the Output context is disabled.") else
statement_setter = self.dialect.get_statement_setter
self.jdbc_connection.with_prepared_statement query statement_setter stmt->
stmt.toString.write "C:\Temp\MyQueries.sql" on_existing_file=Existing_File_Behavior.Append
result = case self.supports_large_update.get of
True -> Panic.catch UnsupportedOperationException stmt.executeLargeUpdate _->
self.supports_large_update.put False

View File

@ -7,7 +7,7 @@ import project.Connection.Connection_Options.Connection_Options
import project.Connection.Credentials.Credentials
import project.Connection.SSL_Mode.SSL_Mode
import project.Internal.Postgres.Pgpass
import project.Internal.Postgres.Postgres_Connection
import project.Internal.Postgres.Postgres_Connection.Postgres_Connection
polyglot java import org.postgresql.Driver
@ -33,7 +33,7 @@ type Postgres_Details
Arguments:
- options: Overrides for the connection properties.
connect : Connection_Options -> Postgres_Connection.Postgres_Connection
connect : Connection_Options -> Postgres_Connection
connect self options =
if Driver.isRegistered.not then Driver.register
@ -53,7 +53,7 @@ type Postgres_Details
## PRIVATE
Provides the properties for the connection.
jdbc_properties : [Pair Text Text]
jdbc_properties : Vector (Pair Text Text)
jdbc_properties self =
credentials = case self.credentials of
Nothing ->
@ -81,7 +81,7 @@ type Postgres_Details
## PRIVATE
Given an `SSL_Mode`, create the JDBC properties to secure a Postgres-based
connection.
ssl_mode_to_jdbc_properties : SSL_Mode -> [Pair Text Text]
ssl_mode_to_jdbc_properties : SSL_Mode -> Vector (Pair Text Text)
ssl_mode_to_jdbc_properties use_ssl = case use_ssl of
SSL_Mode.Disable -> []
SSL_Mode.Prefer -> [Pair.new 'sslmode' 'prefer']

View File

@ -1,7 +1,7 @@
from Standard.Base import all
import project.Connection.Connection_Options.Connection_Options
import project.Internal.SQLite.SQLite_Connection
import project.Internal.SQLite.SQLite_Connection.SQLite_Connection
type SQLite_Details
## Connect to a SQLite DB File or InMemory DB.
@ -15,7 +15,7 @@ type SQLite_Details
Arguments:
- options: Overrides for the connection properties.
connect : Connection_Options -> SQLite_Connection.SQLite_Connection
connect : Connection_Options -> SQLite_Connection
connect self options =
properties = options.merge self.jdbc_properties
SQLite_Connection.create self.jdbc_url properties

View File

@ -88,7 +88,7 @@ make_unary_op name =
- function: A function taking exactly one argument: the generated SQL code
for the argument of the operation, and returning the generated SQL code for
the whole operation.
lift_unary_op : Text -> (Builder -> Builder) -> [Text, (Vector Builder -> Builder)]
lift_unary_op : Text -> (Builder -> Builder) -> Pair Text (Vector Builder -> Builder)
lift_unary_op name function =
generator = arguments -> case arguments.length == 1 of
False -> Error.throw <| Illegal_State.Error ("Invalid amount of arguments for operation " + name + ".")
@ -104,7 +104,7 @@ lift_unary_op name function =
- function: A function taking exactly two arguments: the generated SQL code
for the argument of the operation, and returning the generated SQL code for
the whole operation.
lift_binary_op : Text -> (Builder -> Builder -> Builder) -> [Text, (Vector Builder -> Builder)]
lift_binary_op : Text -> (Builder -> Builder -> Builder) -> Pair Text (Vector Builder -> Builder)
lift_binary_op name function =
generator = arguments -> case arguments.length == 2 of
False -> Error.throw <| Illegal_State.Error ("Invalid amount of arguments for operation " + name + ".")

View File

@ -22,7 +22,7 @@ type Column_Fetcher
It takes an initial size as an argument. That size is only a suggestion
for initial capacity and the builder must be ready to accept more or
less rows than that.
Value (fetch_value : ResultSet -> Integer -> Any) (make_builder : Integer -> Builder)
Value (fetch_value : ResultSet -> Integer -> Any) (make_builder : Integer -> Column_Fetcher_Builder)
## We could use `Storage.make_builder` here, but this builder allows us to pass
raw Truffle values around (like `long`) instead of boxing them.
@ -30,7 +30,7 @@ type Column_Fetcher
I suspect this can allow the Truffle PE to compile this into tighter loop,
but so far I have no proof. If it turns out to be an unnecessary
micro-optimization, we can always switch to `Storage.make_builder`.
type Builder
type Column_Fetcher_Builder
## PRIVATE
Wraps an underlying builder to provide a generic interface.
@ -53,7 +53,7 @@ boolean_fetcher =
append v =
if v.is_nothing then java_builder.appendNulls 1 else
java_builder.appendBoolean v
Builder.Value append (seal_java_builder java_builder)
Column_Fetcher_Builder.Value append (seal_java_builder java_builder)
Column_Fetcher.Value fetch_value make_builder
## PRIVATE
@ -67,7 +67,7 @@ double_fetcher =
append v =
if v.is_nothing then java_builder.appendNulls 1 else
java_builder.appendDouble v
Builder.Value append (seal_java_builder java_builder)
Column_Fetcher_Builder.Value append (seal_java_builder java_builder)
Column_Fetcher.Value fetch_value make_builder
## PRIVATE
@ -81,7 +81,7 @@ long_fetcher bits =
append v =
if v.is_nothing then java_builder.appendNulls 1 else
java_builder.appendLong v
Builder.Value append (seal_java_builder java_builder)
Column_Fetcher_Builder.Value append (seal_java_builder java_builder)
Column_Fetcher.Value fetch_value make_builder
## PRIVATE
@ -178,4 +178,4 @@ make_builder_from_java_object_builder java_builder =
append v =
if v.is_nothing then java_builder.appendNulls 1 else
java_builder.append v
Builder.Value append (seal_java_builder java_builder)
Column_Fetcher_Builder.Value append (seal_java_builder java_builder)

View File

@ -43,7 +43,7 @@ check_connection entity1 entity2 =
It always returns a vector, if the argument was already a vector, it is kept
as-is, otherwise it is wrapped in a singleton vector.
unify_vector_singleton : (Any | Vector Any) -> Vector.Vector Any
unify_vector_singleton : (Any | Vector Any) -> Vector Any
unify_vector_singleton x = case x of
_ : Vector -> x
_ -> [x]

View File

@ -21,7 +21,7 @@ polyglot java import java.lang.StringBuilder as Java_String_Builder
- host: The hostname of the database server.
- port: The port of the database server.
- database: The database to connect to.
read : Text -> Integer -> Text -> Text -> [Pair Text Text]
read : Text -> Integer -> Text -> Text -> Vector (Pair Text Text)
read host port database username=Nothing =
pgpass_file = locate
if pgpass_file.is_nothing || (verify pgpass_file . not) then [] else

View File

@ -24,6 +24,19 @@ from project.Internal.Result_Set import read_column
from project.Internal.Upload_Table import first_column_name_in_structure
type Postgres_Connection
## PRIVATE
Creates a Postgres connection based on a URL, properties and a dialect.
Arguments:
- url: The URL to connect to.
- properties: A vector of properties for the connection.
- make_new: a function that returns a new connection.
create : Text -> Vector -> (Text -> Text -> Postgres_Connection) -> Postgres_Connection
create url properties make_new =
jdbc_connection = JDBC_Connection.create url properties
entity_naming_properties = Postgres_Entity_Naming_Properties.new jdbc_connection
Postgres_Connection.Value (Connection.new jdbc_connection Dialect.postgres entity_naming_properties) make_new
## PRIVATE
@ -42,7 +55,7 @@ type Postgres_Connection
close self = self.connection.close
## Returns the list of databases (or catalogs) for the connection.
databases : [Text]
databases : Vector Text
databases self =
self.connection.jdbc_connection.with_connection connection->
result_set = connection.createStatement.executeQuery "select datname from pg_database where datname not in ('template0', 'template1')"
@ -244,17 +257,3 @@ type Postgres_Connection
on the 'subclasses'.
base_connection : Connection
base_connection self = self.connection
## PRIVATE
Creates a Postgres connection based on a URL, properties and a dialect.
Arguments:
- url: The URL to connect to.
- properties: A vector of properties for the connection.
- make_new: a function that returns a new connection.
create : Text -> Vector -> (Text -> Text -> Postgres_Connection) -> Postgres_Connection
create url properties make_new =
jdbc_connection = JDBC_Connection.create url properties
entity_naming_properties = Postgres_Entity_Naming_Properties.new jdbc_connection
Postgres_Connection.Value (Connection.new jdbc_connection Dialect.postgres entity_naming_properties) make_new

View File

@ -23,6 +23,18 @@ from project.Errors import SQL_Error, Table_Already_Exists, Table_Not_Found
from project.Internal.Upload_Table import first_column_name_in_structure
type SQLite_Connection
## PRIVATE
Creates a SQLite connection based on a JDBC URL and properties.
Arguments:
- url: The URL to connect to.
- properties: A vector of properties for the connection.
create : Text -> Vector -> SQLite_Connection
create url properties =
jdbc_connection = JDBC_Connection.create url properties
SQLite_Connection.Value (Connection.new jdbc_connection Dialect.sqlite SQLite_Entity_Naming_Properties.new)
## PRIVATE
A SQLite database connection.
@ -238,15 +250,3 @@ type SQLite_Connection
on the 'subclasses'.
base_connection : Connection
base_connection self = self.connection
## PRIVATE
Creates a SQLite connection based on a JDBC URL and properties.
Arguments:
- url: The URL to connect to.
- properties: A vector of properties for the connection.
create : Text -> Vector -> SQLite_Connection
create url properties =
jdbc_connection = JDBC_Connection.create url properties
SQLite_Connection.Value (Connection.new jdbc_connection Dialect.sqlite SQLite_Entity_Naming_Properties.new)

View File

@ -4,7 +4,7 @@ import Standard.Base.Errors.Illegal_State.Illegal_State
import Standard.Base.Runtime.Ref.Ref
import Standard.Table.Internal.Problem_Builder.Problem_Builder
from Standard.Table import Value_Type, Aggregate_Column
from Standard.Table import Aggregate_Column, Value_Type
from Standard.Table.Data.Aggregate_Column.Aggregate_Column import all
import project.Connection.Connection.Connection

View File

@ -102,7 +102,7 @@ select_into_table_implementation source_table connection table_name primary_key
will never return a name of a table that exists but
was created outside of a dry run.
connection.drop_table tmp_table_name if_exists=True
internal_upload_table source_table connection tmp_table_name primary_key temporary on_problems=on_problems row_limit=dry_run_row_limit
internal_upload_table source_table connection tmp_table_name primary_key temporary=True on_problems=on_problems row_limit=dry_run_row_limit
temporary_table = connection.base_connection.internal_allocate_dry_run_table table.name
warning = Dry_Run_Operation.Warning "Only a dry run of `select_into_database_table` was performed - a temporary table ("+tmp_table_name+") was created, containing a sample of the data."
Warning.attach warning temporary_table
@ -216,7 +216,7 @@ raise_duplicated_primary_key_error source_table primary_key original_panic =
Error.throw (Non_Unique_Primary_Key.Error primary_key example_entry example_count)
## PRIVATE
align_structure : Database_Table | In_Memory_Table | Vector Column_Description -> Vector Column_Description
align_structure : Connection | Any -> Database_Table | In_Memory_Table | Vector Column_Description -> Vector Column_Description
align_structure connection table_or_columns = case table_or_columns of
vector : Vector -> align_vector_structure vector
table : Database_Table -> structure_from_existing_table connection table

View File

@ -41,7 +41,7 @@ xls =
url = "https://enso-data-samples.s3.us-west-1.amazonaws.com/spreadsheet.xls"
file = enso_project.data / 'spreadsheet.xls'
if file.exists.not then
Context.Output.with_enabled <| Data.fetch url . body . to_file file
Context.Output.with_enabled <| HTTP.fetch url . body . to_file file
file
## An example XLSX file for experimenting with Table and its APIs.
@ -56,7 +56,7 @@ xlsx =
url = "https://enso-data-samples.s3.us-west-1.amazonaws.com/spreadsheet.xlsx"
file = enso_project.data / 'spreadsheet.xlsx'
if file.exists.not then
Context.Output.with_enabled <| Data.fetch url . body . to_file file
Context.Output.with_enabled <| HTTP.fetch url . body . to_file file
file
## A file that is used for writing temporary data as part of tests.
@ -145,7 +145,7 @@ geo_data_url = "https://enso-data-samples.s3.us-west-1.amazonaws.com/Bus_Stop_Be
Calling this method will cause Enso to make a network request to a data
endpoint.
get_response : Response
get_response = HTTP.fetch geo_data_url
get_response = HTTP.fetch geo_data_url . with_materialized_body
## Gets HTTP data from a network endpoint.
@ -153,11 +153,7 @@ get_response = HTTP.fetch geo_data_url
Calling this method will cause Enso to make a network request to a data
endpoint.
get_geo_data : Response_Body
get_geo_data = HTTP.fetch geo_data_url
## A simple HTTP client for examples.
http_client : HTTP
http_client = HTTP.new (timeout = Duration.new seconds=30)
get_geo_data = Data.fetch geo_data_url try_auto_parse_response=False
## A basic URI for examples.
uri : URI

View File

@ -40,7 +40,7 @@ type Spreadsheets
get_table self sheet_id sheet_range =
request = self.java_service.spreadsheets.values.get sheet_id sheet_range . setMajorDimension 'COLUMNS' . setValueRenderOption 'UNFORMATTED_VALUE'
response = request.execute
values = Vector.from_polyglot_array response.getValues . map Vector.Vector
values = Vector.from_polyglot_array response.getValues
columned = values.map v-> [v.first, v.drop 1]
Table.new columned

View File

@ -4,8 +4,8 @@ import project.Data.Type.Storage
import project.Data.Type.Value_Type.Bits
import project.Data.Type.Value_Type.Value_Type
polyglot java import org.enso.table.data.column.storage.type.IntegerType
polyglot java import org.enso.base.polyglot.NumericConverter
polyglot java import org.enso.table.data.column.storage.type.IntegerType
## PRIVATE
Finds the most specific `Value_Type` that can be used to hold the given

View File

@ -7,10 +7,10 @@ import Standard.Table.Data.Type.Value_Type.Bits
import Standard.Table.Data.Type.Value_Type.Value_Type
from Standard.Table.Errors import Inexact_Type_Coercion
polyglot java import org.enso.table.data.column.builder.Builder
polyglot java import org.enso.table.data.column.builder.Builder as Java_Builder
polyglot java import org.enso.table.data.column.storage.type.AnyObjectType
polyglot java import org.enso.table.data.column.storage.type.Bits as Java_Bits
polyglot java import org.enso.table.data.column.storage.type.BigIntegerType
polyglot java import org.enso.table.data.column.storage.type.Bits as Java_Bits
polyglot java import org.enso.table.data.column.storage.type.BooleanType
polyglot java import org.enso.table.data.column.storage.type.DateTimeType
polyglot java import org.enso.table.data.column.storage.type.DateType
@ -87,6 +87,6 @@ from_value_type_strict value_type =
## PRIVATE
Creates a column storage builder for the given storage type.
make_builder : StorageType -> Integer -> Builder
make_builder : StorageType -> Integer -> Java_Builder
make_builder storage initial_size=64 =
Builder.getForType storage initial_size
Java_Builder.getForType storage initial_size

View File

@ -98,7 +98,7 @@ find_common_type types strict =
method returns `Nothing` if the value is `Nothing` - so the caller can
try to treat this value as fitting any type, or accordingly to specific
semantics of each method.
find_argument_type : Any -> Value_Type | Nothing
find_argument_type : Any -> Boolean -> Value_Type | Nothing
find_argument_type value infer_precise_type=True = if Nothing == value then Nothing else
case is_column value of
False -> Enso_Types.most_specific_value_type value use_smallest=True

View File

@ -1,5 +1,6 @@
from Standard.Base import all
import Standard.Base.Network.HTTP.Response.Response
import Standard.Base.System.Input_Stream.Input_Stream
from Standard.Base.Widget_Helpers import make_delimiter_selector
import project.Data.Data_Formatter.Data_Formatter
@ -97,10 +98,9 @@ type Delimited_Format
## PRIVATE
Implements the `Data.parse` for this `File_Format`
read_web : Response -> Any
read_web self response =
text = Text.from_bytes response.body.bytes self.encoding
Delimited_Reader.read_text text self Report_Warning
read_stream : Input_Stream -> Any
read_stream self stream:Input_Stream =
Delimited_Reader.read_stream self stream on_problems=Report_Warning
## PRIVATE
ADVANCED

View File

@ -132,7 +132,7 @@ type No_Input_Columns_Selected
## Indicates that an aggregation calculation could not be completed.
type Invalid_Aggregation
## PRIVATE
Error (column:Text) (rows:[Integer]) (message:Text)
Error (column:Text) (rows:(Vector Integer)) (message:Text)
## PRIVATE
@ -158,7 +158,7 @@ type Floating_Point_Equality
without any quote character
type Unquoted_Delimiter
## PRIVATE
Error (column:Text) (rows:[Integer])
Error (column:Text) (rows:(Vector Integer))
## PRIVATE
@ -186,7 +186,7 @@ type Additional_Warnings
a single instance of `Additional_Invalid_Rows`.
type Invalid_Row
## PRIVATE
Error (source_file_line_number : Integer) (index : Integer | Nothing) (row : [Text]) (expected_columns : Integer)
Error (source_file_line_number : Integer) (index : Integer | Nothing) (row : Vector Text) (expected_columns : Integer)
## PRIVATE
@ -239,7 +239,7 @@ type Parser_Error
be corrupted.
type Unquoted_Characters_In_Output
## PRIVATE
Warning (column : Text) (rows : [Integer])
Warning (column : Text) (rows : Vector Integer)
## PRIVATE
Pretty print the unquoted characters error.
@ -274,7 +274,7 @@ type Invalid_Location
format.
type Invalid_Format
## PRIVATE
Error column:(Text|Nothing) (value_type:Value_Type|Integer|Number|Date|Date_Time|Time_Of_Day|Boolean|Any) (cells:[Text])
Error column:(Text|Nothing) (value_type:Value_Type|Integer|Number|Date|Date_Time|Time_Of_Day|Boolean|Any) (cells:(Vector Text))
## PRIVATE
Pretty print the invalid format error.

View File

@ -1,5 +1,6 @@
from Standard.Base import all
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
import Standard.Base.System.Input_Stream.Input_Stream
import project.Data.Match_Columns.Match_Columns
import project.Data.Table.Table
@ -64,9 +65,12 @@ type Excel_Format
If the File_Format supports reading from the web response, return a configured instance.
for_web : Text -> URI -> Excel_Format | Nothing
for_web content_type uri =
_ = [content_type, uri]
## Currently not loading Excel files automatically as these need to be loaded as a connection.
Nothing
_ = [uri]
parts = content_type.split ";" . map .trim
case parts.first of
"application/vnd.ms-excel" -> Excel_Format.Excel xls_format=True
"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet" -> Excel_Format.Excel xls_format=False
_ -> Nothing
## PRIVATE
ADVANCED
@ -78,6 +82,23 @@ type Excel_Format
Excel_Section.Workbook -> Excel_Workbook.new file format self.headers
_ -> Excel_Reader.read_file file self.section self.headers on_problems format
## PRIVATE
Implements the `Data.parse` for this `File_Format`
read_stream : Input_Stream -> Any
read_stream self stream:Input_Stream =
xls_format = if self.xls_format == Infer then False else self.xls_format
bad_format _ =
message = case self.xls_format of
Infer -> "Cannot infer xls_format from a stream (tried xlsx)."
True -> "Stream was not in the expected format (xls)."
False -> "Stream was not in the expected format (xlsx)."
Error.throw (Illegal_Argument.Error message)
Excel_Reader.handle_bad_format_with_handler bad_format <|
workbook = Excel_Workbook.from_stream stream xls_format self.headers
workbook.read_section self.section
## PRIVATE
ADVANCED
Implements the `Table.write` for this `File_Format`.

View File

@ -2,12 +2,14 @@ from Standard.Base import all
import Standard.Base.Errors.File_Error.File_Error
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
import Standard.Base.Metadata.Display
import Standard.Base.System.Input_Stream.Input_Stream
from Standard.Base.Data.Filter_Condition import sql_like_to_regex
from Standard.Base.Metadata.Choice import Option
from Standard.Base.Metadata.Widget import Single_Choice
import project.Data.Table.Table
import project.Excel.Excel_Range.Excel_Range
import project.Excel.Excel_Section.Excel_Section
import project.Internal.Excel_Reader
polyglot java import org.apache.poi.ss.usermodel.Workbook
@ -27,13 +29,26 @@ type Excel_Workbook
file_obj = File.new file . normalize
File_Error.handle_java_exceptions file_obj <| Excel_Reader.handle_bad_format file_obj <|
file_obj.with_input_stream [File_Access.Read] stream->
stream.with_java_stream java_stream->
workbook = ExcelReader.readWorkbook java_stream xls_format
Excel_Workbook.Value workbook file_obj xls_format headers
Excel_Workbook.from_stream stream xls_format headers file_obj
## PRIVATE
ADVANCED
Load an Input_Stream as a connection to an Excel workbook.
Arguments:
- stream: The stream to load.
- xls_format: Whether to use the old XLS format (default is XLSX).
- headers: Whether to use the first row as headers (default is to infer).
- file: Optional file reference.
from_stream : Input_Stream -> Boolean -> Boolean | Infer -> File | Nothing -> Excel_Workbook
from_stream stream xls_format=False headers=Infer file=Nothing = Excel_Reader.handle_bad_format file <|
stream.with_java_stream java_stream->
workbook = ExcelReader.readWorkbook java_stream xls_format
Excel_Workbook.Value workbook file xls_format headers
## PRIVATE
Creates an Excel_Workbook connection.
Value workbook:Workbook file:File xls_format:Boolean headers:(Boolean|Infer)
Value workbook:Workbook (file:(File|Nothing)) xls_format:Boolean headers:(Boolean|Infer)
## Returns the list of databases (or catalogs) for the connection.
databases : Nothing
@ -41,7 +56,7 @@ type Excel_Workbook
## Returns the name of the current database (or catalog).
database : Text
database self = self.file.path
database self = if self.file.is_nothing then "" else self.file.path
## Returns a new Connection with the specified database set as default.
@ -151,3 +166,30 @@ type Excel_Workbook
Excel_Reader.prepare_reader_table Report_Warning <| case query of
_ : Excel_Range -> ExcelReader.readRange self.workbook query.java_range java_headers 0 limit
_ : Text -> ExcelReader.readRangeByName self.workbook query java_headers 0 limit
## GROUP Standard.Base.Input
Read an Excel_Section from the Workbook
Arguments:
- section: the section to read.
read_section : Excel_Section -> (Excel_Workbook | Table | Vector)
read_section self section =
java_headers = Excel_Reader.make_java_headers self.headers
case section of
Excel_Section.Workbook -> self
Excel_Section.Sheet_Names -> self.sheet_names
Excel_Section.Range_Names -> self.named_ranges
Excel_Section.Worksheet sheet skip_rows row_limit ->
sheet_name = case sheet of
_ : Text -> sheet
_ : Integer ->
names = self.sheet_names
if (sheet < 1 || sheet >= names.length) then Error.throw (Illegal_Argument.Error "Worksheet index out of range.") else
names.at (sheet - 1)
_ -> Error.throw (Illegal_Argument.Error "Worksheet must be either Text or an Integer.")
Excel_Reader.prepare_reader_table Report_Warning <|
ExcelReader.readRangeByName self.workbook sheet_name java_headers skip_rows row_limit
Excel_Section.Cell_Range address skip_rows row_limit ->
Excel_Reader.prepare_reader_table Report_Warning <| case address of
_ : Excel_Range -> ExcelReader.readRange self.workbook address.java_range java_headers skip_rows row_limit
_ : Text -> ExcelReader.readRangeByName self.workbook address java_headers skip_rows row_limit

View File

@ -35,7 +35,7 @@ polyglot java import org.enso.table.aggregations.Sum as SumAggregator
- problems: Set of any problems when validating the input
type Validated_Aggregate_Columns
## PRIVATE
Value (key_columns:[Column]) (valid_columns:[Pair Text Aggregate_Column]) (problems:[Any])
Value (key_columns:(Vector Column)) (valid_columns:(Vector (Pair Text Aggregate_Column))) (problems:(Vector Any))
## PRIVATE
Prepares an aggregation input for further processing:

View File

@ -1,7 +1,7 @@
from Standard.Base import all
import Standard.Base.Errors.File_Error.File_Error
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
import Standard.Base.System.File.Input_Stream
import Standard.Base.System.Input_Stream.Input_Stream
import project.Data.Data_Formatter.Data_Formatter
import project.Data.Table.Table

View File

@ -1,7 +1,7 @@
from Standard.Base import all
import Standard.Base.Errors.File_Error.File_Error
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
import Standard.Base.System.File.Output_Stream
import Standard.Base.System.Output_Stream.Output_Stream
import project.Data.Data_Formatter.Data_Formatter
import project.Data.Match_Columns.Match_Columns

View File

@ -1,7 +1,7 @@
from Standard.Base import all
import Standard.Base.Errors.File_Error.File_Error
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
import Standard.Base.System.File.Input_Stream
import Standard.Base.System.Input_Stream.Input_Stream
import project.Data.Table.Table
import project.Excel.Excel_Range.Excel_Range
@ -81,6 +81,12 @@ read_file file section headers on_problems xls_format=False =
handle_bad_format file ~action =
bad_format caught_panic =
Error.throw (File_Error.Corrupted_Format file caught_panic.payload.getMessage caught_panic.payload)
Panic.catch UnsupportedFileFormatException handler=bad_format <|
Panic.catch NotOLE2FileException handler=bad_format <|
handle_bad_format_with_handler bad_format action
## PRIVATE
A helper that handles the Java exceptions reported when a malformed XLS file
is opened.
handle_bad_format_with_handler handler ~action =
Panic.catch UnsupportedFileFormatException handler=handler <|
Panic.catch NotOLE2FileException handler=handler <|
action

View File

@ -106,7 +106,7 @@ make_order_by_selector table display=Display.Always =
## PRIVATE
Make a column rename name selector.
make_rename_name_vector_selector : Table -> Display -> Vector_Editor
make_rename_name_vector_selector : Table -> Display -> Widget
make_rename_name_vector_selector table display=Display.Always =
col_names = table.column_names
names = col_names.map n-> Option n "["+n.pretty+", "+n.pretty+"]"

View File

@ -179,14 +179,14 @@ limit_data limit data = case limit of
extreme + data.take (Index_Sub_Range.Sample (limit - extreme.length))
## PRIVATE
json_from_table : Table -> [Integer]|Nothing -> Integer|Nothing -> Text
json_from_table : Table -> Vector Integer | Nothing -> Integer | Nothing -> Text
json_from_table table bounds limit =
data = table.point_data |> bound_data bounds |> limit_data limit
axes = table.axes
JS_Object.from_pairs [[data_field, data], [axis_field, axes]] . to_json
## PRIVATE
json_from_vector : Vector Any -> [Integer]|Nothing -> Integer|Nothing -> Text
json_from_vector : Vector Any -> Vector Integer | Nothing -> Integer | Nothing -> Text
json_from_vector vec bounds limit =
data = vec.point_data |> bound_data bounds |> limit_data limit
JS_Object.from_pairs [[data_field, data], [axis_field, Nothing]] . to_json

View File

@ -61,7 +61,7 @@ public final class EnsoFile implements EnsoObject {
return this.truffleFile.newOutputStream(openOptions);
}
@Builtin.Method
@Builtin.Method(name = "input_stream_builtin")
@Builtin.WrapException(from = IOException.class)
@Builtin.Specialize
@Builtin.ReturningGuestObject

View File

@ -0,0 +1,21 @@
package org.enso.aws.file_system;
import org.enso.base.file_system.FileSystemSPI;
@org.openide.util.lookup.ServiceProvider(service = FileSystemSPI.class)
public class S3FileSystemSPI extends FileSystemSPI {
@Override
protected String getModuleName() {
return "Standard.AWS.S3.S3_File";
}
@Override
protected String getTypeName() {
return "S3_File";
}
@Override
protected String getProtocol() {
return "s3";
}
}

View File

@ -0,0 +1,45 @@
package org.enso.base.file_system;
import java.util.ServiceLoader;
import org.graalvm.polyglot.Context;
import org.graalvm.polyglot.Value;
public abstract class FileSystemSPI {
private static final ServiceLoader<org.enso.base.file_system.FileSystemSPI> loader =
ServiceLoader.load(
org.enso.base.file_system.FileSystemSPI.class,
org.enso.base.file_format.FileFormatSPI.class.getClassLoader());
public static Value get_type(String protocol, boolean refresh) {
if (refresh) {
loader.reload();
}
var first =
loader.stream()
.filter(provider -> provider.get().getProtocol().equals(protocol))
.findFirst();
return first
.map(fileSystemSPIProvider -> fileSystemSPIProvider.get().getTypeObject())
.orElse(null);
}
public static Value[] get_types(boolean refresh) {
if (refresh) {
loader.reload();
}
return loader.stream().map(provider -> provider.get().getTypeObject()).toArray(Value[]::new);
}
public Value getTypeObject() {
final var context = Context.getCurrent().getBindings("enso");
final var module = context.invokeMember("get_module", getModuleName());
return module.invokeMember("get_type", getTypeName());
}
protected abstract String getModuleName();
protected abstract String getTypeName();
protected abstract String getProtocol();
}

View File

@ -0,0 +1,7 @@
name: AWS_Tests
namespace: enso_dev
version: 0.0.1
license: MIT
author: enso-dev@enso.org
maintainer: enso-dev@enso.org
prefer-local-libraries: true

View File

@ -0,0 +1,8 @@
from Standard.Base import all
from Standard.Test import Test_Suite
import project.S3_Spec
main = Test_Suite.run_main <|
S3_Spec.spec

View File

@ -0,0 +1,129 @@
from Standard.Base import all
import Standard.Base.Errors.No_Such_Key.No_Such_Key
import Standard.Base.Runtime.Ref.Ref
from Standard.AWS import S3, AWS_Credential
from Standard.AWS.Errors import AWS_SDK_Error, More_Records_Available, S3_Error, S3_Bucket_Not_Found
from Standard.Test import Test, Test_Suite
import Standard.Test.Extensions
spec =
bucket_name = "enso-data-samples"
not_a_bucket_name = "not_a_bucket_enso"
object_name = "Bus_Stop_Benches.geojson"
folder_name = "examples/"
sub_folder_name = "examples/folder 1/"
api_pending = if Environment.get "AWS_ACCESS_KEY_ID" . is_nothing then "No Access Key found." else Nothing
Test.group "S3.parse_uri" <|
Test.specify "parse bucket only uris" <|
S3.parse_uri "s3://" . should_equal (Pair.new "" "")
S3.parse_uri "s3://asda" . should_equal (Pair.new "asda" "")
S3.parse_uri "s3://banana/" . should_equal (Pair.new "banana" "")
Test.specify "parse full paths uris" <|
S3.parse_uri "s3://banana/apple" . should_equal (Pair.new "banana" "apple")
S3.parse_uri "s3://banana/apple/orange" . should_equal (Pair.new "banana" "apple/orange")
Test.specify "reject invalid urils" <|
S3.parse_uri "asda" . should_equal Nothing
S3.parse_uri "s3:///" . should_equal Nothing
S3.parse_uri "s3:///apple/orange" . should_equal Nothing
buckets = Ref.new []
Test.group "S3.list_buckets" pending=api_pending <|
Test.specify "should be able to list buckets" <|
bucket_list = S3.list_buckets . should_succeed
buckets.put bucket_list
if bucket_name != Nothing then bucket_list . should_contain bucket_name
Test.specify "should handle auth issues" <|
S3.list_buckets (AWS_Credential.Profile "NoSuchProfile") . should_fail_with AWS_SDK_Error
## Rest of tests need a functional S3 connection
pending = if bucket_name.is_nothing then "No S3 bucket set." else if buckets.get.is_error then "S3 Access Failed." else if buckets.get.contains bucket_name then Nothing else "S3 Bucket Not Found."
Test.group "S3.head (bucket)" pending=pending <|
Test.specify "should be able to head a bucket" <|
S3.head bucket_name . should_equal Map.empty
S3.head not_a_bucket_name . should_fail_with S3_Bucket_Not_Found
Test.group "S3.read_bucket" pending=pending <|
Test.specify "should be able to read bucket" <|
objects_and_folders = S3.read_bucket bucket_name
folders = objects_and_folders.first
folders . should_contain folder_name
objects = objects_and_folders.second
objects . should_contain object_name
Test.specify "should be able to read sub folder" <|
objects_and_folders = S3.read_bucket bucket_name folder_name
folders = objects_and_folders.first
folders . should_contain sub_folder_name
Test.specify "should attach a warning if not a complete list" <|
objects = S3.read_bucket bucket_name max_count=1
warnings = Warning.get_all objects
warnings.length . should_equal 1
warning = warnings.first
warning.value.should_be_a More_Records_Available
Test.specify "should handle missing bucket gracefully" <|
S3.read_bucket not_a_bucket_name . should_fail_with S3_Bucket_Not_Found
Test.specify "should handle auth issues" <|
S3.read_bucket bucket_name credentials=(AWS_Credential.Profile "NoSuchProfile") . should_fail_with AWS_SDK_Error
list = Ref.new []
Test.group "S3.list_objects" pending=pending <|
Test.specify "should be able to list objects" <|
objects = S3.list_objects bucket_name
objects . should_contain object_name
list.put objects
Test.specify "should attach a warning if not a complete list" <|
objects = S3.list_objects bucket_name max_count=1
warnings = Warning.get_all objects
warnings.length . should_equal 1
warning = warnings.first
warning.value.should_be_a More_Records_Available
Test.specify "should handle missing bucket gracefully" <|
S3.list_objects not_a_bucket_name . should_fail_with S3_Bucket_Not_Found
Test.specify "should handle auth issues" <|
S3.list_objects bucket_name credentials=(AWS_Credential.Profile "NoSuchProfile") . should_fail_with AWS_SDK_Error
## These tests need a valid object, so check we found it within the bucket.
pending_object = if pending.is_nothing.not then pending else
if list.get.contains object_name then Nothing else
"Unable to find test object in bucket."
Test.group "S3.head (object)" pending=pending_object <|
Test.specify "should be able to head an object" <|
S3.head bucket_name object_name . should_succeed
S3.head not_a_bucket_name object_name . should_fail_with No_Such_Key
S3.head bucket_name "not_an_object" . should_fail_with No_Such_Key
Test.specify "should handle auth issues" <|
S3.list_objects bucket_name object_name credentials=(AWS_Credential.Profile "NoSuchProfile") . should_fail_with AWS_SDK_Error
Test.group "S3.get_object" pending=pending_object <|
Test.specify "should be able to get an object" <|
response = S3.get_object bucket_name object_name
response.should_succeed
response.decode_as_json.should_succeed
S3.get_object not_a_bucket_name object_name . should_fail_with S3_Bucket_Not_Found
S3.get_object bucket_name "not_an_object" . should_fail_with No_Such_Key
Test.specify "should handle auth issues" <|
S3.get_object bucket_name object_name credentials=(AWS_Credential.Profile "NoSuchProfile") . should_fail_with AWS_SDK_Error
main = Test_Suite.run_main spec

View File

@ -76,9 +76,6 @@ spec = Test.group "Examples" <|
Test.specify "should provide a response containing geo data" <|
Examples.get_geo_data
Test.specify "should provide a basic HTTP client" <|
Examples.http_client
Test.specify "should provide an example URI" <|
Examples.uri