mirror of
https://github.com/enso-org/enso.git
synced 2025-01-03 14:04:44 +03:00
Creating datalinks from code (#9957)
- Closes #9673 - Adds the ability to save an existing Postgres connection as a datalink into the Enso Cloud, automatically promoting plain-text passwords into a Secret. - Fixes dataflow error propagation in `JS_Object.from_pairs`.
This commit is contained in:
parent
8823a2aaa0
commit
38ad7b0afa
@ -663,6 +663,8 @@
|
||||
- [Added `Vector.duplicates`.][9917]
|
||||
- [Log operations performed on a Postgres database connection obtained through a
|
||||
Data Link.][9873]
|
||||
- [Added ability to save an existing Postgres connection as a Data Link in Enso
|
||||
Cloud.][9957]
|
||||
|
||||
[debug-shortcuts]:
|
||||
https://github.com/enso-org/enso/blob/develop/app/gui/docs/product/shortcuts.md#debug
|
||||
@ -973,6 +975,7 @@
|
||||
[9766]: https://github.com/enso-org/enso/pull/9766
|
||||
[9917]: https://github.com/enso-org/enso/pull/9917
|
||||
[9873]: https://github.com/enso-org/enso/pull/9873
|
||||
[9957]: https://github.com/enso-org/enso/pull/9957
|
||||
|
||||
#### Enso Compiler
|
||||
|
||||
|
@ -168,6 +168,9 @@ type JS_Object
|
||||
pairs.map pair->
|
||||
case pair.first of
|
||||
text : Text ->
|
||||
## Ensure that any dataflow errors that could be stored in `pair.second` are propagated.
|
||||
Potential errors in `pair.first` are propagated thanks to the case-of above.
|
||||
pair.second.if_not_error <|
|
||||
jackson_value_node = to_json_node pair.second.to_js_object
|
||||
new_object.set text jackson_value_node
|
||||
builder.append text
|
||||
|
@ -107,6 +107,12 @@ parse_secure_value json -> Text | Enso_Secret =
|
||||
other -> Error.throw (Illegal_State.Error "Unexpected value inside of a data-link: "+other+".")
|
||||
_ -> Error.throw (Illegal_State.Error "Parsing a secure value failed. Expected either a string or an object representing a secret, but got "+(Meta.type_of json . to_display_text)+".")
|
||||
|
||||
secure_value_to_json (value : Text | Enso_Secret) -> JS_Object | Text =
|
||||
case value of
|
||||
text : Text -> text
|
||||
secret : Enso_Secret ->
|
||||
JS_Object.from_pairs [["type", "secret"], ["secretPath", secret.path.to_text]]
|
||||
|
||||
## PRIVATE
|
||||
parse_format (json : Any) -> Any ! Illegal_State = case json of
|
||||
Nothing -> Auto_Detect
|
||||
|
@ -72,7 +72,7 @@ type Enso_File
|
||||
Enso_File.Value (Enso_Path.parse path)
|
||||
|
||||
## PRIVATE
|
||||
Value (enso_path : Enso_Path)
|
||||
private Value (enso_path : Enso_Path)
|
||||
|
||||
## ICON folder
|
||||
Represents the root folder of the current users.
|
||||
@ -103,7 +103,10 @@ type Enso_File
|
||||
Checks if the folder or file exists
|
||||
exists : Boolean
|
||||
exists self =
|
||||
Existing_Enso_Asset.get_asset_reference_for self . is_error . not
|
||||
r = Existing_Enso_Asset.get_asset_reference_for self . if_not_error True
|
||||
r.catch File_Error error-> case error of
|
||||
File_Error.Not_Found _ -> False
|
||||
_ -> r
|
||||
|
||||
## GROUP Metadata
|
||||
ICON metadata
|
||||
@ -111,7 +114,7 @@ type Enso_File
|
||||
size : Integer
|
||||
size self -> Integer =
|
||||
asset = Existing_Enso_Asset.get_asset_reference_for self
|
||||
if asset.is_regular_file.not then Error.throw (Illegal_Argument.Error "`size` can only be queried for regular files.") else
|
||||
if asset.is_regular_file.not then Error.throw (Illegal_Argument.Error "`size` can only be queried for files.") else
|
||||
metadata = asset.get_file_description |> get_required_field "metadata"
|
||||
get_required_field "size" metadata expected_type=Integer
|
||||
|
||||
@ -121,7 +124,7 @@ type Enso_File
|
||||
creation_time : Date_Time
|
||||
creation_time self -> Date_Time =
|
||||
asset = Existing_Enso_Asset.get_asset_reference_for self
|
||||
if asset.is_directory then Unimplemented.throw "Enso_File.creation_time is not implemented yet for directories." else
|
||||
if asset.is_regular_file.not then Error.throw (Illegal_Argument.Error "`creation_time` can only be queried for files.") else
|
||||
metadata = asset.get_file_description |> get_required_field "metadata"
|
||||
Date_Time.parse (get_required_field "created_at" metadata expected_type=Text) Date_Time_Formatter.iso_offset_date_time
|
||||
. catch Time_Error error-> Error.throw (Enso_Cloud_Error.Invalid_Response_Payload error)
|
||||
@ -132,7 +135,7 @@ type Enso_File
|
||||
last_modified_time : Date_Time
|
||||
last_modified_time self =
|
||||
asset = Existing_Enso_Asset.get_asset_reference_for self
|
||||
if asset.is_directory then Unimplemented.throw "Enso_File.last_modified_time is not implemented yet for directories." else
|
||||
if asset.is_regular_file.not then Error.throw (Illegal_Argument.Error "`last_modified_time` can only be queried for files.") else
|
||||
metadata = asset.get_file_description |> get_required_field "metadata"
|
||||
Date_Time.parse (get_required_field "modified_at" metadata expected_type=Text) Date_Time_Formatter.iso_offset_date_time
|
||||
. catch Time_Error error-> Error.throw (Enso_Cloud_Error.Invalid_Response_Payload error)
|
||||
@ -186,16 +189,20 @@ type Enso_File
|
||||
with_output_stream : Vector File_Access -> (Output_Stream -> Any ! File_Error) -> Any ! File_Error
|
||||
with_output_stream self (open_options : Vector) action =
|
||||
Context.Output.if_enabled disabled_message="Writing to an Enso_File is forbidden as the Output context is disabled." panic=False <|
|
||||
open_as_data_link = (open_options.contains Data_Link_Access.No_Follow . not) && (Data_Link.is_data_link self)
|
||||
is_data_link = Data_Link.is_data_link self
|
||||
open_as_data_link = (open_options.contains Data_Link_Access.No_Follow . not) && is_data_link
|
||||
if open_as_data_link then Data_Link_Helpers.write_data_link_as_stream self open_options action else
|
||||
if open_options.contains File_Access.Append then Unimplemented.throw "Enso_File currently does not support appending to a file. Instead you may read it, modify and then write the new contents." else
|
||||
File_Access.ensure_only_allowed_options "with_output_stream" [File_Access.Write, File_Access.Create_New, File_Access.Truncate_Existing, File_Access.Create, Data_Link_Access.No_Follow] open_options <|
|
||||
allow_existing = open_options.contains File_Access.Create_New . not
|
||||
case is_data_link of
|
||||
False ->
|
||||
tmp_file = File.create_temporary_file "enso-cloud-write-tmp"
|
||||
Panic.with_finalizer tmp_file.delete <|
|
||||
perform_upload self allow_existing <|
|
||||
result = tmp_file.with_output_stream [File_Access.Write] action
|
||||
result.if_not_error [tmp_file, result]
|
||||
True -> create_datalink_from_stream_action self allow_existing action
|
||||
|
||||
## PRIVATE
|
||||
ADVANCED
|
||||
@ -216,14 +223,14 @@ type Enso_File
|
||||
if open_as_data_link then Data_Link_Helpers.read_data_link_as_stream self open_options action else
|
||||
File_Access.ensure_only_allowed_options "with_input_stream" [File_Access.Read, Data_Link_Access.No_Follow] open_options <|
|
||||
asset = Existing_Enso_Asset.get_asset_reference_for self
|
||||
uri = case asset.asset_type of
|
||||
Enso_Asset_Type.File -> asset.get_download_url
|
||||
response = case asset.asset_type of
|
||||
Enso_Asset_Type.File ->
|
||||
HTTP.fetch asset.get_download_url HTTP_Method.Get
|
||||
Enso_Asset_Type.Data_Link ->
|
||||
Runtime.assert (open_options.contains Data_Link_Access.No_Follow)
|
||||
asset.internal_uri
|
||||
Utils.http_request HTTP_Method.Get asset.internal_uri
|
||||
_ ->
|
||||
Error.throw (Illegal_Argument.Error "Only files can be opened as a stream.")
|
||||
response = HTTP.fetch uri HTTP_Method.Get []
|
||||
response.if_not_error <| response.body.with_stream action
|
||||
|
||||
## ALIAS load, open
|
||||
@ -438,7 +445,7 @@ Enso_Asset_Type.from (that:Text) = case that of
|
||||
## PRIVATE
|
||||
File_Format_Metadata.from (that:Enso_File) =
|
||||
asset_type = that.asset_type.catch File_Error _->Nothing
|
||||
if asset_type == Enso_Asset_Type.Data_Link then File_Format_Metadata.Value path=that.path name=that.name content_type=Data_Link.data_link_content_type else
|
||||
if asset_type == Enso_Asset_Type.Data_Link then File_Format_Metadata.Value path=that.path name=that.name content_type=Data_Link_Helpers.data_link_content_type else
|
||||
File_Format_Metadata.Value path=that.path name=that.name extension=(that.extension.catch _->Nothing)
|
||||
|
||||
## PRIVATE
|
||||
|
@ -28,7 +28,7 @@ polyglot java import org.enso.base.enso_cloud.HideableValue
|
||||
## A reference to a secret stored in the Enso Cloud.
|
||||
type Enso_Secret
|
||||
## PRIVATE
|
||||
Value name:Text id:Text
|
||||
Value name:Text id:Text path:Enso_Path
|
||||
|
||||
## GROUP Output
|
||||
ICON edit
|
||||
@ -44,17 +44,17 @@ type Enso_Secret
|
||||
Context.Output.if_enabled disabled_message="Creating a secret is forbidden as the Output context is disabled." panic=False <|
|
||||
if name.starts_with "connection-" then Error.throw (Illegal_Argument.Error "Secret name cannot start with 'connection-'") else
|
||||
parent_dir = parent.if_nothing Enso_File.current_working_directory
|
||||
# TODO verify if root needs special handling, maybe file a bug
|
||||
parent_id_pair = if parent_dir.is_current_user_root then [] else
|
||||
path = if name.contains "/" then Error.throw (Illegal_Argument.Error "Secret name cannot contain `/`.") else parent_dir.enso_path.resolve name
|
||||
path.if_not_error <|
|
||||
parent_dir_asset = Existing_Enso_Asset.get_asset_reference_for parent_dir
|
||||
[["parentDirectoryId", parent_dir_asset.id]]
|
||||
body = JS_Object.from_pairs [["name", name], ["value", value]]+parent_id_pair
|
||||
body = JS_Object.from_pairs <|
|
||||
[["name", name], ["value", value], ["parentDirectoryId", parent_dir_asset.id]]
|
||||
handle_already_exists _ =
|
||||
message = "A secret with the name " + name.pretty + " already exists inside of directory " + parent_dir.name + "."
|
||||
Error.throw (Illegal_Argument.Error message)
|
||||
error_handlers = Map.from_vector [["resource_already_exists", handle_already_exists]]
|
||||
id = Utils.http_request_as_json HTTP_Method.Post Utils.secrets_api body error_handlers=error_handlers
|
||||
Enso_Secret.Value name id
|
||||
Enso_Secret.Value name id path
|
||||
|
||||
## GROUP Output
|
||||
ICON trash
|
||||
@ -75,9 +75,10 @@ type Enso_Secret
|
||||
the secrets from the current working directory.
|
||||
list : Enso_File | Nothing -> Vector Enso_Secret
|
||||
list parent:(Enso_File | Nothing)=Nothing =
|
||||
secrets_as_assets = list_assets (parent.if_nothing Enso_File.current_working_directory) . filter f-> f.asset_type == Enso_Asset_Type.Secret
|
||||
effective_parent = parent.if_nothing Enso_File.current_working_directory
|
||||
secrets_as_assets = list_assets effective_parent . filter f-> f.asset_type == Enso_Asset_Type.Secret
|
||||
secrets_as_assets.map asset->
|
||||
Enso_Secret.Value asset.title asset.id
|
||||
Enso_Secret.Value asset.title asset.id (effective_parent.enso_path.resolve asset.title)
|
||||
|
||||
## GROUP Input
|
||||
ICON key
|
||||
@ -102,11 +103,13 @@ type Enso_Secret
|
||||
secret_path = effective_parent.enso_path.resolve name . to_text
|
||||
Enso_Secret.resolve_path secret_path
|
||||
|
||||
|
||||
## PRIVATE
|
||||
resolve_path (path:Text) -> Enso_Secret ! Not_Found =
|
||||
asset = Existing_Enso_Asset.resolve_path path if_not_found=(Error.throw Not_Found)
|
||||
if asset.asset_type != Enso_Asset_Type.Secret then Error.throw (Illegal_Argument.Error "The provided path points to "+asset.asset_type.to_text+", not a Secret.") else
|
||||
Enso_Secret.Value asset.title asset.id
|
||||
parsed_path = Enso_Path.parse path
|
||||
parsed_path.if_not_error <| if asset.asset_type != Enso_Asset_Type.Secret then Error.throw (Illegal_Argument.Error "The provided path points to "+asset.asset_type.to_text+", not a Secret.") else
|
||||
Enso_Secret.Value asset.title asset.id parsed_path
|
||||
|
||||
## GROUP Metadata
|
||||
ICON metadata
|
||||
|
@ -1,6 +1,7 @@
|
||||
import project.Any.Any
|
||||
import project.Data.Text.Text
|
||||
import project.Network.HTTP.HTTP_Status_Code.HTTP_Status_Code
|
||||
import project.Network.URI.URI
|
||||
|
||||
## PRIVATE
|
||||
Error if the user is not logged into Enso Cloud.
|
||||
@ -27,7 +28,7 @@ type Cloud_Session_Expired
|
||||
type Enso_Cloud_Error
|
||||
## PRIVATE
|
||||
An unhandled error response from Enso Cloud.
|
||||
Unexpected_Service_Error (status_code : HTTP_Status_Code) (payload : Text)
|
||||
Unexpected_Service_Error (uri : URI) (status_code : HTTP_Status_Code) (payload : Text)
|
||||
|
||||
## PRIVATE
|
||||
An error response from Enso Cloud that could not be parsed.
|
||||
@ -44,10 +45,14 @@ type Enso_Cloud_Error
|
||||
## PRIVATE
|
||||
to_display_text : Text
|
||||
to_display_text self = case self of
|
||||
Enso_Cloud_Error.Unexpected_Service_Error status_code payload -> "Unexpected error response from Enso Cloud: " + status_code.to_text + ": " + payload
|
||||
Enso_Cloud_Error.Invalid_Response_Payload cause -> "Internal error: A response from Enso Cloud could not be parsed: " + cause.to_display_text
|
||||
Enso_Cloud_Error.Unauthorized -> "Enso Cloud credentials file was found, but the service responded with 401 Unauthorized. You may try logging in again and restarting the workflow."
|
||||
Enso_Cloud_Error.Connection_Error cause -> "Error connecting to Enso Cloud: " + cause.to_display_text
|
||||
Enso_Cloud_Error.Unexpected_Service_Error uri status_code payload ->
|
||||
"Unexpected error response from Enso Cloud: endpoint " + uri.to_text + " replied with status " + status_code.to_text + ": " + payload
|
||||
Enso_Cloud_Error.Invalid_Response_Payload cause ->
|
||||
"Internal error: A response from Enso Cloud could not be parsed: " + cause.to_display_text
|
||||
Enso_Cloud_Error.Unauthorized ->
|
||||
"Enso Cloud credentials file was found, but the service responded with Unauthorized. You may try logging in again and restarting the workflow."
|
||||
Enso_Cloud_Error.Connection_Error cause ->
|
||||
"Error connecting to Enso Cloud: " + cause.to_display_text
|
||||
|
||||
## PRIVATE
|
||||
type Missing_Data_Link_Library
|
||||
|
@ -62,6 +62,7 @@ credentials_file = case Environment.get "ENSO_CLOUD_CREDENTIALS_FILE" of
|
||||
## PRIVATE
|
||||
type Authentication_Service
|
||||
## PRIVATE
|
||||
TODO: We cannot mark this constructor as `private` until we change token tests in `Enso_Cloud_Spec` to run with `--disable-private-check`.
|
||||
Instance (auth_data : Ref Authentication_Data)
|
||||
|
||||
## PRIVATE
|
||||
@ -116,7 +117,7 @@ type Authentication_Data
|
||||
## PRIVATE
|
||||
type Refresh_Token_Data
|
||||
## PRIVATE
|
||||
Value client_id:Text refresh_url:Text refresh_token:Text
|
||||
private Value client_id:Text refresh_url:Text refresh_token:Text
|
||||
|
||||
## PRIVATE
|
||||
read_from_credentials -> Refresh_Token_Data =
|
||||
|
@ -18,7 +18,7 @@ from project.Enso_Cloud.Public_Utils import get_optional_field, get_required_fie
|
||||
## PRIVATE
|
||||
type Enso_File_Data_Link
|
||||
## PRIVATE
|
||||
Value (path : Text) format_json
|
||||
private Value (path : Text) format_json
|
||||
|
||||
## PRIVATE
|
||||
parse json -> Enso_File_Data_Link =
|
||||
|
@ -1,6 +1,10 @@
|
||||
private
|
||||
|
||||
import project.Any.Any
|
||||
import project.Data.Json.Invalid_JSON
|
||||
import project.Data.Json.JS_Object
|
||||
import project.Data.Map.Map
|
||||
import project.Data.Text.Text
|
||||
import project.Enso_Cloud.Enso_File.Enso_Asset_Type
|
||||
import project.Enso_Cloud.Enso_File.Enso_File
|
||||
import project.Enso_Cloud.Errors.Enso_Cloud_Error
|
||||
@ -16,7 +20,11 @@ import project.Network.HTTP.Request_Error
|
||||
import project.Network.URI.URI
|
||||
import project.Nothing.Nothing
|
||||
import project.System.File.File
|
||||
import project.System.Output_Stream.Output_Stream
|
||||
from project.Data.Boolean import Boolean, False, True
|
||||
from project.Data.Text.Extensions import all
|
||||
from project.Enso_Cloud.Data_Link_Helpers import data_link_encoding
|
||||
from project.Enso_Cloud.Public_Utils import get_required_field
|
||||
|
||||
## PRIVATE
|
||||
upload_file (local_file : File) (destination : Enso_File) (replace_existing : Boolean) -> Enso_File =
|
||||
@ -30,18 +38,41 @@ upload_file (local_file : File) (destination : Enso_File) (replace_existing : Bo
|
||||
if is_source_file_not_found then Error.throw (File_Error.Not_Found local_file) else result
|
||||
|
||||
## PRIVATE
|
||||
`generate_request_body_and_result` should return a pair,
|
||||
where the first element is the request body and the second element is the result to be returned.
|
||||
It is executed lazily, only after all pre-conditions are successfully met.
|
||||
perform_upload (destination : Enso_File) (allow_existing : Boolean) (~generate_request_body_and_result) =
|
||||
A helper function that gathers the common logic for checking existence of
|
||||
a created asset and its parent directory.
|
||||
|
||||
The `create_action` function is called with the existing asset for the parent
|
||||
directory and for the file, if it already exists, or `Nothing` otherwise, and
|
||||
with a mapping of error handlers that may be added to the request.
|
||||
generic_create_asset (destination : Enso_File) (allow_existing : Boolean) (create_action : Existing_Enso_Asset -> (Existing_Enso_Asset | Nothing) -> Any) -> Any =
|
||||
parent_directory = destination.parent
|
||||
if parent_directory.is_nothing then Error.throw (Illegal_Argument.Error "The root directory cannot be a destination for upload. The destination must be a path to a file.") else
|
||||
if parent_directory.is_nothing then Error.throw (Illegal_Argument.Error "Please provide an asset name inside of the root directory.") else
|
||||
parent_directory_asset = Existing_Enso_Asset.get_asset_reference_for parent_directory
|
||||
# If the parent directory does not exist, we fail.
|
||||
parent_directory_asset.if_not_error <|
|
||||
existing_asset = Existing_Enso_Asset.get_asset_reference_for destination
|
||||
. catch File_Error _->Nothing
|
||||
. catch File_Error error-> case error of
|
||||
File_Error.Not_Found _ -> Nothing
|
||||
_ -> Error.throw error
|
||||
if existing_asset.is_nothing.not && allow_existing.not then Error.throw (File_Error.Already_Exists destination) else
|
||||
error_handlers = if existing_asset.is_nothing.not then Map.empty else
|
||||
## Currently we just report the race condition and request the user to re-run.
|
||||
We don't retry automatically because it is harder than it seems - the `create_action` usually
|
||||
depends on some user code that is writing to a stream (the callback given to `with_output_stream`).
|
||||
This action is generally not expected to be run more than once, but a simple retry logic would do exactly that.
|
||||
If ever needed, we could implement a more sophisticated retry mechanism, that saves the payload
|
||||
into memory or a temporary file and relies on that for the retry.
|
||||
For now, reporting the race condition in a sane way seemed like the simplest choice.
|
||||
This situation should be very rare.
|
||||
Map.from_vector [["resource_already_exists", Error.throw (Illegal_State.Error "A race-condition has been encountered - another process has created a colliding resource at "+destination.path+". Please try re-running the operation.")]]
|
||||
create_action parent_directory_asset existing_asset error_handlers
|
||||
|
||||
## PRIVATE
|
||||
`generate_request_body_and_result` should return a pair,
|
||||
where the first element is the request body and the second element is the result to be returned.
|
||||
It is executed lazily, only after all pre-conditions are successfully met.
|
||||
perform_upload (destination : Enso_File) (allow_existing : Boolean) (~generate_request_body_and_result) =
|
||||
generic_create_asset destination allow_existing parent_directory_asset-> existing_asset->
|
||||
if existing_asset.is_nothing.not && existing_asset.asset_type != Enso_Asset_Type.File then Error.throw (Illegal_Argument.Error "The destination must be a path to a file, not "+existing_asset.asset_type.to_text+".") else
|
||||
existing_asset_id = existing_asset.if_not_nothing <| existing_asset.id
|
||||
file_name = destination.name
|
||||
@ -53,9 +84,10 @@ perform_upload (destination : Enso_File) (allow_existing : Boolean) (~generate_r
|
||||
_ -> base_uri . add_query_argument "file_id" existing_asset_id
|
||||
pair = generate_request_body_and_result
|
||||
Asset_Cache.invalidate destination
|
||||
response = Utils.http_request HTTP_Method.Post full_uri pair.first
|
||||
response = Utils.http_request_as_json HTTP_Method.Post full_uri pair.first
|
||||
response.if_not_error <|
|
||||
Asset_Cache.update destination (Existing_Enso_Asset.from_json response)
|
||||
id = get_required_field "id" response expected_type=Text
|
||||
Asset_Cache.update destination (Existing_Enso_Asset.from_id_and_title id file_name) . if_not_error <|
|
||||
pair.second
|
||||
|
||||
## PRIVATE
|
||||
@ -75,3 +107,26 @@ create_directory_with_parents (target : Enso_File) -> Existing_Enso_Asset =
|
||||
created_asset.if_not_error <|
|
||||
Asset_Cache.update target created_asset
|
||||
created_asset
|
||||
|
||||
## PRIVATE
|
||||
create_datalink_from_stream_action (destination : Enso_File) (allow_existing : Boolean) (stream_action : Output_Stream -> Any) =
|
||||
generic_create_asset destination allow_existing parent_directory_asset-> existing_asset->
|
||||
if existing_asset.is_nothing.not && existing_asset.asset_type != Enso_Asset_Type.Data_Link then Error.throw (Illegal_Argument.Error "The destination must be a path to a Data Link, not "+existing_asset.asset_type.to_text+".") else
|
||||
# TODO once path resolver is updated to automatically add .datalink extension, we should strip the extension from the name
|
||||
title = destination.name
|
||||
stream_result = Output_Stream.with_memory_stream stream_action
|
||||
raw_bytes = stream_result.first
|
||||
action_result = stream_result.second
|
||||
raw_json = Text.from_bytes raw_bytes data_link_encoding . parse_json . catch Invalid_JSON error->
|
||||
Error.throw (Illegal_Argument.Error "A datalink can be created only with a valid JSON payload, but the written payload was invalid: "+error.to_display_text cause=error)
|
||||
stream_result.if_not_error <|
|
||||
payload = JS_Object.from_pairs <|
|
||||
[["parentDirectoryId", parent_directory_asset.id], ["name", title], ["value", raw_json]]
|
||||
+ (if existing_asset.is_nothing then [] else [["datalinkId", existing_asset.id]])
|
||||
|
||||
Asset_Cache.invalidate destination
|
||||
response = Utils.http_request_as_json HTTP_Method.Post Utils.datalinks_api payload
|
||||
response.if_not_error <|
|
||||
id = get_required_field "id" response expected_type=Text
|
||||
Asset_Cache.update destination (Existing_Enso_Asset.from_id_and_title id title) . if_not_error <|
|
||||
action_result
|
||||
|
@ -18,7 +18,7 @@ from project.Data.Text.Extensions import all
|
||||
This is a helper for handling `enso://` paths.
|
||||
type Enso_Path
|
||||
## PRIVATE
|
||||
Value (organization_name : Text) (path_segments : Vector Text)
|
||||
private Value (organization_name : Text) (path_segments : Vector Text)
|
||||
|
||||
## PRIVATE
|
||||
parse (path : Text) -> Enso_Path =
|
||||
|
@ -17,9 +17,9 @@ import project.Error.Error
|
||||
import project.Errors.Common.Not_Found
|
||||
import project.Errors.File_Error.File_Error
|
||||
import project.Errors.Illegal_Argument.Illegal_Argument
|
||||
import project.Errors.Unimplemented.Unimplemented
|
||||
import project.Network.HTTP.HTTP_Method.HTTP_Method
|
||||
import project.Network.URI.URI
|
||||
import project.Panic.Panic
|
||||
import project.Runtime.Context
|
||||
from project.Data.Boolean import Boolean, False, True
|
||||
from project.Data.Text.Extensions import all
|
||||
@ -33,7 +33,7 @@ from project.Enso_Cloud.Public_Utils import get_required_field
|
||||
type Existing_Enso_Asset
|
||||
## PRIVATE
|
||||
Represents an existing asset within the Enso cloud.
|
||||
Value title:Text id:Text asset_type:Enso_Asset_Type
|
||||
private Value title:Text id:Text asset_type:Enso_Asset_Type
|
||||
|
||||
## PRIVATE
|
||||
Target URI for the api
|
||||
@ -63,6 +63,8 @@ type Existing_Enso_Asset
|
||||
## PRIVATE
|
||||
Fetches the basic information about a file from the Cloud endpoint.
|
||||
get_file_description self -> JS_Object =
|
||||
if self.asset_type != Enso_Asset_Type.File then
|
||||
Panic.throw (Illegal_Argument.Error "`get_file_description` can only be called on File assets.")
|
||||
Utils.http_request_as_json HTTP_Method.Get self.internal_uri
|
||||
|
||||
## PRIVATE
|
||||
@ -89,7 +91,7 @@ type Existing_Enso_Asset
|
||||
## Workaround to keep compatibility with old cloud API
|
||||
TODO remove it after https://github.com/enso-org/cloud-v2/pull/1236 has been deployed
|
||||
response = r.catch Enso_Cloud_Error error-> case error of
|
||||
Enso_Cloud_Error.Unexpected_Service_Error status _ ->
|
||||
Enso_Cloud_Error.Unexpected_Service_Error _ status _ ->
|
||||
if status.code != 404 then r else
|
||||
old_uri = (URI.from Utils.cloud_root_uri) / "path/resolve"
|
||||
old_payload = JS_Object.from_pairs [["path", path]]
|
||||
@ -111,11 +113,14 @@ type Existing_Enso_Asset
|
||||
assets.map Existing_Enso_Asset.from_json
|
||||
|
||||
## PRIVATE
|
||||
from_json json =
|
||||
from_json json -> Existing_Enso_Asset =
|
||||
title = get_required_field "title" json expected_type=Text
|
||||
id = get_required_field "id" json expected_type=Text
|
||||
#org = json.get "organizationId" ""
|
||||
asset_type = (id.take (Text_Sub_Range.Before "-")):Enso_Asset_Type
|
||||
Existing_Enso_Asset.from_id_and_title id title
|
||||
|
||||
## PRIVATE
|
||||
from_id_and_title id:Text title:Text -> Existing_Enso_Asset =
|
||||
asset_type = Enso_Asset_Type.from (id.take (Text_Sub_Range.Before "-"))
|
||||
Existing_Enso_Asset.Value title id asset_type
|
||||
|
||||
|
||||
|
@ -110,7 +110,7 @@ http_request (method : HTTP_Method) (url : URI) (body : Request_Body = Request_B
|
||||
error_code = json_payload.get "code"
|
||||
handler = error_code.if_not_nothing <| error_handlers.get error_code
|
||||
case handler of
|
||||
Nothing -> Error.throw (Enso_Cloud_Error.Unexpected_Service_Error response.code payload)
|
||||
Nothing -> Error.throw (Enso_Cloud_Error.Unexpected_Service_Error url response.code payload)
|
||||
_ : Function -> handler json_payload
|
||||
|
||||
## PRIVATE
|
||||
|
@ -1,3 +1,11 @@
|
||||
import project.Data.Vector.Vector
|
||||
import project.Meta
|
||||
import project.Metadata.Display
|
||||
import project.Metadata.Widget
|
||||
from project.Data.Boolean import Boolean, False, True
|
||||
from project.Metadata.Choice import Option
|
||||
from project.Metadata.Widget import Single_Choice
|
||||
|
||||
## Specifies the behavior of a write operation when the destination file
|
||||
already exists.
|
||||
type Existing_File_Behavior
|
||||
@ -20,3 +28,17 @@ type Existing_File_Behavior
|
||||
## If the file already exists, a `File_Error.Already_Exists` error is
|
||||
raised.
|
||||
Error
|
||||
|
||||
## PRIVATE
|
||||
widget (include_overwrite:Boolean = True) (include_backup:Boolean = True) (include_append:Boolean = True) (include_error:Boolean = True) -> Widget =
|
||||
fqn = Meta.get_qualified_type_name Existing_File_Behavior
|
||||
options = Vector.build builder->
|
||||
if include_overwrite then
|
||||
builder.append (Option "Overwrite" fqn+".Overwrite")
|
||||
if include_backup then
|
||||
builder.append (Option "Backup" fqn+".Backup")
|
||||
if include_append then
|
||||
builder.append (Option "Append" fqn+".Append")
|
||||
if include_error then
|
||||
builder.append (Option "Error" fqn+".Error")
|
||||
Single_Choice display=Display.Always values=options
|
||||
|
@ -1,9 +1,11 @@
|
||||
import project.Any.Any
|
||||
import project.Data.Numbers.Integer
|
||||
import project.Data.Pair.Pair
|
||||
import project.Data.Text.Encoding.Encoding
|
||||
import project.Data.Text.Extensions
|
||||
import project.Data.Vector.Vector
|
||||
import project.Errors.Encoding_Error.Encoding_Error
|
||||
import project.Errors.File_Error.File_Error
|
||||
import project.Errors.Problem_Behavior.Problem_Behavior
|
||||
import project.Nothing.Nothing
|
||||
import project.Runtime.Managed_Resource.Managed_Resource
|
||||
@ -11,6 +13,7 @@ import project.System.Input_Stream.Input_Stream
|
||||
from project.System.Input_Stream import close_stream
|
||||
|
||||
polyglot java import java.io.OutputStream as Java_Output_Stream
|
||||
polyglot java import java.io.ByteArrayOutputStream
|
||||
polyglot java import org.enso.base.encoding.ReportingStreamEncoder
|
||||
polyglot java import org.enso.base.Encoding_Utils
|
||||
|
||||
@ -27,6 +30,26 @@ type Output_Stream
|
||||
resource = Managed_Resource.register java_stream close_stream
|
||||
Output_Stream.Value resource error_handler
|
||||
|
||||
## PRIVATE
|
||||
ADVANCED
|
||||
Runs a given action with an output stream that writes to an in-memory
|
||||
byte array.
|
||||
This method can be used when the data will fit in memory and there is no
|
||||
reason to use a backing file.
|
||||
|
||||
The method returns a pair of values: the byte array that was written to
|
||||
and the return value of the `action` is discarded.
|
||||
|
||||
Arguments:
|
||||
- action: A callback to run with the in-memory output stream.
|
||||
with_memory_stream (action : Output_Stream -> Any) -> Pair Vector Any =
|
||||
java_stream = ByteArrayOutputStream.new
|
||||
output_stream = Output_Stream.new java_stream (File_Error.handle_java_exceptions Nothing)
|
||||
action_result = action output_stream
|
||||
action_result.if_not_error <|
|
||||
bytes = Vector.from_polyglot_array java_stream.toByteArray
|
||||
Pair.new bytes action_result
|
||||
|
||||
## PRIVATE
|
||||
An output stream, allowing for interactive writing of contents.
|
||||
|
||||
|
@ -48,7 +48,7 @@ type Postgres_Data_Link
|
||||
# TODO add related asset id here: https://github.com/enso-org/enso/issues/9869
|
||||
audit_mode = if Enso_User.is_logged_in then "cloud" else "local"
|
||||
default_options = Connection_Options.Value [["enso.internal.audit", audit_mode]]
|
||||
connection = self.details.connect default_options
|
||||
connection = self.details.connect default_options allow_data_links=False
|
||||
case self of
|
||||
Postgres_Data_Link.Connection _ -> connection
|
||||
Postgres_Data_Link.Table table_name _ ->
|
||||
|
@ -8,6 +8,7 @@ import project.Connection.Credentials.Credentials
|
||||
import project.Connection.SSL_Mode.SSL_Mode
|
||||
import project.Internal.Postgres.Pgpass
|
||||
import project.Internal.Postgres.Postgres_Connection.Postgres_Connection
|
||||
import project.Internal.Postgres.Postgres_Data_Link_Setup.Postgres_Data_Link_Setup
|
||||
|
||||
polyglot java import org.postgresql.Driver
|
||||
|
||||
@ -33,17 +34,20 @@ type Postgres_Details
|
||||
|
||||
Arguments:
|
||||
- options: Overrides for the connection properties.
|
||||
connect : Connection_Options -> Postgres_Connection
|
||||
connect self options =
|
||||
- allow_data_links: Whether to allow saving this connection as a data link.
|
||||
connect : Connection_Options -> Boolean -> Postgres_Connection
|
||||
connect self options (allow_data_links : Boolean = True) =
|
||||
if Driver.isRegistered.not then Driver.register
|
||||
|
||||
data_link_setup = if allow_data_links then Postgres_Data_Link_Setup.Available self else
|
||||
Postgres_Data_Link_Setup.Unavailable "Saving connections established through a Data Link is not allowed. Please copy the Data Link instead."
|
||||
properties = options.merge self.jdbc_properties
|
||||
|
||||
## Cannot use default argument values as gets in an infinite loop if you do.
|
||||
make_new database schema =
|
||||
Postgres_Details.Postgres self.host self.port (database.if_nothing self.database) (schema.if_nothing self.schema) self.credentials self.use_ssl self.client_cert . connect options
|
||||
Postgres_Details.Postgres self.host self.port (database.if_nothing self.database) (schema.if_nothing self.schema) self.credentials self.use_ssl self.client_cert . connect options allow_data_links
|
||||
|
||||
Postgres_Connection.create self.jdbc_url properties make_new
|
||||
Postgres_Connection.create self.jdbc_url properties make_new data_link_setup
|
||||
|
||||
## PRIVATE
|
||||
Provides the jdbc url for the connection.
|
||||
|
@ -16,6 +16,7 @@ import project.Dialect
|
||||
import project.Internal.Connection.Entity_Naming_Properties.Entity_Naming_Properties
|
||||
import project.Internal.IR.Query.Query
|
||||
import project.Internal.JDBC_Connection
|
||||
import project.Internal.Postgres.Postgres_Data_Link_Setup.Postgres_Data_Link_Setup
|
||||
import project.Internal.SQL_Type_Reference.SQL_Type_Reference
|
||||
import project.SQL_Query.SQL_Query
|
||||
import project.SQL_Statement.SQL_Statement
|
||||
@ -33,8 +34,8 @@ type Postgres_Connection
|
||||
- url: The URL to connect to.
|
||||
- properties: A vector of properties for the connection.
|
||||
- make_new: A function that returns a new connection.
|
||||
create : Text -> Vector -> (Text -> Text -> Postgres_Connection) -> Postgres_Connection
|
||||
create url properties make_new =
|
||||
create : Text -> Vector -> (Text -> Text -> Postgres_Connection) -> Postgres_Data_Link_Setup -> Postgres_Connection
|
||||
create url properties make_new data_link_setup =
|
||||
jdbc_connection = JDBC_Connection.create url properties
|
||||
encoding = parse_postgres_encoding (get_encoding_name jdbc_connection)
|
||||
|
||||
@ -44,7 +45,7 @@ type Postgres_Connection
|
||||
Our generator is supposed to always quote identifiers
|
||||
entity_naming_properties = Entity_Naming_Properties.from_jdbc_connection jdbc_connection encoding is_case_sensitive=True
|
||||
|
||||
Postgres_Connection.Value (Connection.new jdbc_connection Dialect.postgres entity_naming_properties) make_new
|
||||
Postgres_Connection.Value (Connection.new jdbc_connection Dialect.postgres entity_naming_properties) make_new data_link_setup
|
||||
|
||||
## PRIVATE
|
||||
|
||||
@ -53,7 +54,7 @@ type Postgres_Connection
|
||||
Arguments:
|
||||
- connection: the underlying connection.
|
||||
- make_new: a function that returns a new connection.
|
||||
private Value connection make_new
|
||||
private Value (connection:Connection) (make_new : Text -> Text -> Postgres_Connection) (data_link_setup : Postgres_Data_Link_Setup)
|
||||
|
||||
## ICON data_input
|
||||
Closes the connection releasing the underlying database resources
|
||||
@ -280,6 +281,12 @@ type Postgres_Connection
|
||||
base_connection : Connection
|
||||
base_connection self = self.connection
|
||||
|
||||
## Creates a Data Link that will open the same connection.
|
||||
@on_existing_file (Existing_File_Behavior.widget include_backup=False include_append=False)
|
||||
save_as_data_link self destination (on_existing_file:Existing_File_Behavior = Existing_File_Behavior.Error) =
|
||||
self.data_link_setup.save_as_data_link destination on_existing_file
|
||||
|
||||
|
||||
## PRIVATE
|
||||
get_encoding_name : JDBC_Connection.JDBC_Connection -> Text
|
||||
get_encoding_name jdbc_connection =
|
||||
|
@ -0,0 +1,68 @@
|
||||
private
|
||||
|
||||
from Standard.Base import all
|
||||
import Standard.Base.Enso_Cloud.Data_Link.Data_Link
|
||||
import Standard.Base.Errors.File_Error.File_Error
|
||||
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
|
||||
from Standard.Base.Enso_Cloud.Data_Link_Helpers import data_link_extension, secure_value_to_json
|
||||
|
||||
import project.Connection.Credentials.Credentials
|
||||
import project.Connection.Postgres_Details.Postgres_Details
|
||||
import project.Connection.SSL_Mode.SSL_Mode
|
||||
|
||||
## PRIVATE
|
||||
type Postgres_Data_Link_Setup
|
||||
## PRIVATE
|
||||
Available details:Postgres_Details
|
||||
|
||||
## PRIVATE
|
||||
Unavailable cause:Text
|
||||
|
||||
## PRIVATE
|
||||
save_as_data_link self destination on_existing_file:Existing_File_Behavior = case self of
|
||||
Postgres_Data_Link_Setup.Available details ->
|
||||
case destination of
|
||||
_ : Enso_File ->
|
||||
replace_existing = case on_existing_file of
|
||||
Existing_File_Behavior.Overwrite -> True
|
||||
Existing_File_Behavior.Error -> False
|
||||
_ -> Error.throw (Illegal_Argument.Error "Invalid value for `on_existing_file` parameter, only `Overwrite` and `Error` are supported here.")
|
||||
exists_checked = if replace_existing.not && destination.exists then Error.throw (File_Error.Already_Exists destination)
|
||||
exists_checked.if_not_error <|
|
||||
credentials = prepare_credentials destination details
|
||||
if (details.use_ssl != SSL_Mode.Prefer) || details.client_cert.is_nothing.not then Error.throw (Illegal_Argument.Error "Cannot save connection as Data Link: custom SSL settings are currently unsupported.") else
|
||||
json = JS_Object.from_pairs <|
|
||||
header = [["type", "Postgres_Connection"], ["libraryName", "Standard.Database"]]
|
||||
connection_part = [["host", details.host], ["port", details.port], ["database_name", details.database]]
|
||||
schema_part = if details.schema.not_empty then [["schema", details.schema]] else []
|
||||
credential_part = if credentials.is_nothing.not then [["credentials", credentials]] else []
|
||||
header + connection_part + schema_part + credential_part
|
||||
Data_Link.write_config destination json replace_existing
|
||||
_ -> Error.throw (Illegal_Argument.Error "Currently a connection can only be saved as a Data Link into the Enso Cloud. Please provide an `Enso_File` as destination.")
|
||||
|
||||
Postgres_Data_Link_Setup.Unavailable cause ->
|
||||
Error.throw (Illegal_Argument.Error "Cannot save connection as Data Link: "+cause)
|
||||
|
||||
## PRIVATE
|
||||
prepare_credentials data_link_location:Enso_File details:Postgres_Details -> JS_Object | Nothing =
|
||||
case details.credentials of
|
||||
Nothing -> Nothing
|
||||
credentials:Credentials ->
|
||||
# A plain text is automatically promoted to a secret.
|
||||
secret_password = case credentials.password of
|
||||
secret : Enso_Secret -> secret
|
||||
plain_text_password : Text ->
|
||||
secret_location = data_link_location.parent.if_nothing Enso_File.root
|
||||
location_name = if data_link_location.name.ends_with data_link_extension then data_link_location.name.drop (Index_Sub_Range.Last data_link_extension.length) else data_link_location.name
|
||||
|
||||
create_fresh_secret ix =
|
||||
secret_name = location_name + "-password" + (if ix == 0 then "" else "-"+ix.to_text)
|
||||
r = Enso_Secret.create secret_name plain_text_password secret_location
|
||||
r.catch Illegal_Argument error->
|
||||
if error.message.contains "already exists" then create_fresh_secret ix+1 else r
|
||||
|
||||
secret_location.if_not_error <| location_name.if_not_error <|
|
||||
create_fresh_secret 0
|
||||
|
||||
# But we keep the username as-is - if it was in plain text, it will stay in plain text.
|
||||
JS_Object.from_pairs [["username", secure_value_to_json credentials.username], ["password", secure_value_to_json secret_password]]
|
@ -170,13 +170,11 @@ add_specs suite_builder =
|
||||
source_file.exists . should_be_false
|
||||
|
||||
sample_data_link_content = Data_Link.read_raw_config (enso_project.data / "simple.datalink")
|
||||
# TODO Enso_File datalink once Enso_File & cloud datalink write is supported
|
||||
datalink_backends = backends.filter b-> b.name != "Enso Cloud"
|
||||
## This introduces a lot of combinations for testing the datalink copy/move logic, but unfortunately it is needed,
|
||||
because various combinations of backends may rely on different logic (different operations happen under the hood
|
||||
if a file is moved locally vs if it is moved from a local filesystem to S3 or vice versa), and all that different
|
||||
logic may be prone to mis-handling datalinks - so we need to test all paths to ensure coverage.
|
||||
datalink_backends.each source_backend-> datalink_backends.each destination_backend->
|
||||
backends.each source_backend-> backends.each destination_backend->
|
||||
## All Data Link tests depend on S3 - even if the backends do not use S3, the datalink itself targets S3,
|
||||
so `api_pending` is always checked and the test will not be run without S3 config present.
|
||||
pending = any_pending [source_backend, destination_backend] . if_nothing api_pending
|
||||
@ -239,7 +237,8 @@ add_specs suite_builder =
|
||||
source_link.exists . should_be_true
|
||||
|
||||
# But if we read it raw, we can see that it is still a datalink, not just a copy of the data:
|
||||
Data_Link.read_raw_config destination_link . should_equal sample_data_link_content
|
||||
# (We are comparing parsed representation, as things like non-significant whitespace can change within the representation.)
|
||||
Data_Link.read_raw_config destination_link . parse_json . should_equal sample_data_link_content.parse_json
|
||||
|
||||
group_builder.specify "should be able to move a datalink using Data_Link.move" <| with_default_credentials <|
|
||||
source_link = source_link_provider.get
|
||||
@ -264,7 +263,7 @@ add_specs suite_builder =
|
||||
source_link.exists . should_be_false
|
||||
|
||||
destination_link.read . should_equal "Hello WORLD!"
|
||||
Data_Link.read_raw_config destination_link . should_equal sample_data_link_content
|
||||
Data_Link.read_raw_config destination_link . parse_json . should_equal sample_data_link_content.parse_json
|
||||
|
||||
main filter=Nothing =
|
||||
suite = Test.build suite_builder->
|
||||
|
@ -1,5 +1,5 @@
|
||||
{
|
||||
"type": "Enso_File",
|
||||
"libraryName": "Standard.Base",
|
||||
"path": "enso://PLACEHOLDER_ORG_NAME/test-directory/another.txt"
|
||||
"path": "enso://PLACEHOLDER_ORG_NAME/PLACEHOLDER_PATH"
|
||||
}
|
||||
|
@ -16,6 +16,9 @@ type Author
|
||||
type Book
|
||||
Value title author
|
||||
|
||||
type My_Error
|
||||
Error
|
||||
|
||||
Text.should_fail_parsing_with self expected =
|
||||
as_fail = case Json.parse self of
|
||||
_ -> Spec_Result.Failure "Expected a parse error, but no error reported."
|
||||
@ -148,9 +151,9 @@ add_specs suite_builder =
|
||||
JS_Object.from_pairs [["foo", "bar"]] . to_json . should_equal '{"foo":"bar"}'
|
||||
JS_Object.from_pairs [["foo", "bar"], ["baz", Nothing]] . to_json . should_equal '{"foo":"bar","baz":null}'
|
||||
|
||||
group_builder.specify "should be buildable from pairs" <|
|
||||
JS_Object.from_pairs [["foo", "bar"]] . to_json . should_equal '{"foo":"bar"}'
|
||||
JS_Object.from_pairs [["foo", "bar"], ["baz", Nothing]] . to_json . should_equal '{"foo":"bar","baz":null}'
|
||||
group_builder.specify "`from_pairs` should propagate errors" <|
|
||||
JS_Object.from_pairs [["key", (Error.throw My_Error.Error)]] . should_fail_with My_Error
|
||||
JS_Object.from_pairs [[(Error.throw My_Error.Error), "value"]] . should_fail_with My_Error
|
||||
|
||||
group_builder.specify "should be handle equality on a key level" <|
|
||||
JS_Object.from_pairs [["a", 42]] . should_equal <| Json.parse '{"a": 42}'
|
||||
|
@ -18,22 +18,33 @@ from project.Network.Enso_Cloud.Cloud_Tests_Setup import create_local_datalink_t
|
||||
|
||||
add_specs suite_builder setup:Cloud_Tests_Setup =
|
||||
suite_builder.group "DataLinks in Enso Cloud" pending=setup.real_cloud_pending group_builder->
|
||||
test_root = Temporary_Directory.make "DataLinks-in-Cloud"
|
||||
group_builder.teardown test_root.cleanup
|
||||
|
||||
group_builder.specify "should be able to access an example HTTP data-link" <|
|
||||
## We assume that the cloud has a sample data-link called `TestDataLink` present
|
||||
TODO in future iterations this file will be created by the test suite itself, to make it self-contained
|
||||
assets = Enso_File.root.list
|
||||
datalink = assets.find a-> a.name.starts_with "TestDataLink-HTTP"
|
||||
datalink = test_root.get / "TestDataLink-HTTP.datalink"
|
||||
datalink.exists.should_be_false
|
||||
Data_Link.write_raw_config datalink http_data_link_content . should_succeed
|
||||
datalink.exists.should_be_true
|
||||
|
||||
datalink.should_be_a Enso_File
|
||||
datalink.asset_type.should_equal Enso_Asset_Type.Data_Link
|
||||
|
||||
# We assume that the datalink points to https://raw.githubusercontent.com/enso-org/enso/develop/test/Base_Tests/data/sample.txt
|
||||
# Currently metadata is not available on datalinks
|
||||
datalink.size.should_fail_with Illegal_Argument
|
||||
datalink.creation_time.should_fail_with Illegal_Argument
|
||||
|
||||
Test.with_retries <|
|
||||
# We assume that the datalink points to
|
||||
r = datalink.read
|
||||
r.should_be_a Text
|
||||
r.should_contain "Cupcake"
|
||||
|
||||
group_builder.specify "will report which library is missing if a datalink relying on other library is accessed" <|
|
||||
assets = Enso_File.root.list
|
||||
datalink = assets.find a-> a.name.starts_with "TestDataLink-S3"
|
||||
datalink = test_root.get / "TestDataLink-S3.datalink"
|
||||
Data_Link.write_raw_config datalink s3_data_link_content . should_succeed
|
||||
datalink.exists.should_be_true
|
||||
|
||||
datalink.should_be_a Enso_File
|
||||
datalink.asset_type.should_equal Enso_Asset_Type.Data_Link
|
||||
|
||||
@ -41,6 +52,9 @@ add_specs suite_builder setup:Cloud_Tests_Setup =
|
||||
r.should_fail_with Missing_Data_Link_Library
|
||||
r.catch.to_display_text . should_contain "The data link for S3 is provided by the library Standard.AWS which is not loaded."
|
||||
|
||||
# But we can still read the raw Data Link config:
|
||||
Data_Link.read_raw_config datalink . should_contain '{'
|
||||
|
||||
group_builder.specify "does not allow to use Data_Link to read/write regular files" <|
|
||||
temp_file = File.create_temporary_file "not-a-datalink" ".txt"
|
||||
Test.expect_panic Illegal_Argument <|
|
||||
@ -49,18 +63,20 @@ add_specs suite_builder setup:Cloud_Tests_Setup =
|
||||
Data_Link.read_raw_config temp_file
|
||||
|
||||
suite_builder.group "DataLinks to Enso Cloud" pending=setup.real_cloud_pending group_builder->
|
||||
test_root = Temporary_Directory.make "DataLinks"
|
||||
test_root = Temporary_Directory.make "DataLinks-to-Cloud"
|
||||
group_builder.teardown test_root.cleanup
|
||||
|
||||
group_builder.specify "should be able to read a local datalink to an Enso File" <|
|
||||
my_target_enso_file = "Hello Enso Cloud File!".write (test_root.get / "my-file.txt") . should_succeed
|
||||
datalink = transform_datalink (enso_project.data / "datalinks" / "example-enso-file.datalink") content->
|
||||
content.replace "PLACEHOLDER_ORG_NAME" Enso_User.current.organization_name
|
||||
datalink.read . should_equal "Hello Another!"
|
||||
content.replace "enso://PLACEHOLDER_ORG_NAME/PLACEHOLDER_PATH" my_target_enso_file.path
|
||||
datalink.read . should_equal "Hello Enso Cloud File!"
|
||||
|
||||
group_builder.specify "should be able to read a datalink in the Cloud to Enso File" <|
|
||||
# TODO currently this link is created manually, later we should be generating it as part of the test
|
||||
datalink = Enso_File.new "enso://"+Enso_User.current.organization_name+"/TestDataLink-EnsoFile"
|
||||
datalink.read . should_equal "Hello Another!"
|
||||
my_target_enso_file = "Hello Enso Cloud File!".write (test_root.get / "my-file.txt") . should_succeed
|
||||
datalink = test_root.get / "TestDataLink-EnsoFile.datalink"
|
||||
Data_Link.write_raw_config datalink (enso_file_data_link_content.replace "<PATH>" my_target_enso_file.path) . should_succeed
|
||||
datalink.read . should_equal "Hello Enso Cloud File!"
|
||||
|
||||
group_builder.specify "should be able to write Text to a data link pointing to a Cloud file" <|
|
||||
datalink = create_local_datalink_to (test_root.get / "my-file1.txt")
|
||||
@ -137,3 +153,35 @@ transform_datalink base_file transform =
|
||||
new_content = transform content
|
||||
temp_file = File.create_temporary_file prefix=base_file.name suffix=base_file.extension
|
||||
Data_Link.write_raw_config temp_file new_content replace_existing=True . if_not_error temp_file
|
||||
|
||||
replace_placeholder_path datalink_file target_file =
|
||||
content = Data_Link.read_raw_config datalink_file
|
||||
new_content = content.replace "enso://PLACEHOLDER_ORG_NAME/PLACEHOLDER_PATH" target_file.path
|
||||
temp_file = File.create_temporary_file prefix=datalink_file.name suffix=datalink_file.extension
|
||||
Data_Link.write_raw_config temp_file new_content replace_existing=True . if_not_error temp_file
|
||||
|
||||
http_data_link_content = """
|
||||
{
|
||||
"type": "HTTP",
|
||||
"libraryName": "Standard.Base",
|
||||
"method": "GET",
|
||||
"uri": "https://raw.githubusercontent.com/enso-org/enso/develop/test/Base_Tests/data/sample.txt"
|
||||
}
|
||||
|
||||
s3_data_link_content = """
|
||||
{
|
||||
"type": "S3",
|
||||
"libraryName": "Standard.AWS",
|
||||
"uri": "s3://enso-data-samples/examples/folder 2/hello.txt",
|
||||
"auth": {
|
||||
"type": "aws_auth",
|
||||
"subType": "default"
|
||||
}
|
||||
}
|
||||
|
||||
enso_file_data_link_content = """
|
||||
{
|
||||
"type": "Enso_File",
|
||||
"libraryName": "Standard.Base",
|
||||
"path": "<PATH>"
|
||||
}
|
||||
|
@ -2,6 +2,7 @@ from Standard.Base import all
|
||||
import Standard.Base.Enso_Cloud.Data_Link.Data_Link
|
||||
import Standard.Base.Errors.Illegal_State.Illegal_State
|
||||
import Standard.Base.Runtime.Context
|
||||
import Standard.Base.Runtime.Ref.Ref
|
||||
|
||||
from Standard.Test import Problems
|
||||
import Standard.Test.Test_Environment
|
||||
@ -175,16 +176,25 @@ type Mock_Credentials
|
||||
Mock_Credentials.Value self.access_token new_expire_at self.refresh_token self.refresh_url self.client_id
|
||||
|
||||
type Temporary_Directory
|
||||
Value ~get
|
||||
Value was_initialized:Ref ~get
|
||||
|
||||
timestamp_text -> Text = Date_Time.now.format "yyyy-MM-dd_HHmmss.fV" . replace "/" "."
|
||||
|
||||
make (name : Text) (with_initializer : (Enso_File -> Any) = (_->Nothing)) -> Temporary_Directory = Temporary_Directory.Value <|
|
||||
make (name : Text) (with_initializer : (Enso_File -> Any) = (_->Nothing)) -> Temporary_Directory =
|
||||
was_initialized = Ref.new False
|
||||
Temporary_Directory.Value was_initialized <|
|
||||
directory_name = "test-run-"+name+"-"+Temporary_Directory.timestamp_text
|
||||
test_root = (Enso_File.root / directory_name).create_directory
|
||||
test_root.if_not_error <|
|
||||
was_initialized.put True
|
||||
with_initializer test_root . if_not_error test_root
|
||||
|
||||
cleanup self = self.get.delete_if_exists recursive=True
|
||||
cleanup self =
|
||||
## Only run the cleanup if the directory was initialized.
|
||||
Otherwise, the mere cleanup action would, unnecessarily,
|
||||
initialize it - creating a new directory just to delete it, wasting time.
|
||||
if self.was_initialized.get then
|
||||
self.get.delete_if_exists recursive=True
|
||||
|
||||
create_local_datalink_to enso_file:Enso_File =
|
||||
config = JS_Object.from_pairs [["type", "Enso_File"], ["libraryName", "Standard.Base"], ["path", enso_file.path]]
|
||||
|
@ -271,8 +271,8 @@ add_specs suite_builder setup:Cloud_Tests_Setup = suite_builder.group "Enso Clou
|
||||
group_builder.specify "currently does not support metadata for directories" <|
|
||||
# TODO this test should be 'reversed' and merged with above once the metadata is implemented
|
||||
dir = test_root.get / "test-directory"
|
||||
Test.expect_panic Unimplemented dir.creation_time
|
||||
Test.expect_panic Unimplemented dir.last_modified_time
|
||||
dir.creation_time.should_fail_with Illegal_Argument
|
||||
dir.last_modified_time.should_fail_with Illegal_Argument
|
||||
|
||||
group_builder.specify "should be able to read other file metadata" pending="TODO needs further design" <|
|
||||
nested_file = Enso_File.root / "test-directory" / "another.txt"
|
||||
|
@ -70,11 +70,6 @@ add_specs suite_builder setup:Cloud_Tests_Setup =
|
||||
Panic.with_finalizer created_secret.delete <|
|
||||
Test.with_retries <|
|
||||
r1 = Enso_Secret.create name "my_secret_value"
|
||||
|
||||
## If the secret was created due to race condition - we clean it up
|
||||
TODO: this should be addressed at Cloud level, disallowing to create a secret with the same name
|
||||
if r1.is_error.not then r1.delete
|
||||
|
||||
r1.should_fail_with Illegal_Argument
|
||||
r1.catch.to_display_text . should_contain "already exists"
|
||||
|
||||
|
@ -1,5 +1,6 @@
|
||||
from Standard.Base import all
|
||||
import Standard.Base.Enso_Cloud.Data_Link.Data_Link
|
||||
import Standard.Base.Errors.File_Error.File_Error
|
||||
import Standard.Base.Errors.Illegal_Argument.Illegal_Argument
|
||||
import Standard.Base.Errors.Illegal_State.Illegal_State
|
||||
import Standard.Base.Runtime.Ref.Ref
|
||||
@ -31,6 +32,7 @@ from project.Common_Table_Operations.Util import all
|
||||
from project.Database.Types.Postgres_Type_Mapping_Spec import default_text
|
||||
|
||||
import enso_dev.Base_Tests.Network.Enso_Cloud.Cloud_Tests_Setup.Cloud_Tests_Setup
|
||||
import enso_dev.Base_Tests.Network.Enso_Cloud.Cloud_Tests_Setup.Temporary_Directory
|
||||
import enso_dev.Base_Tests.Network.Http.Http_Test_Setup
|
||||
|
||||
|
||||
@ -937,6 +939,54 @@ add_data_link_specs suite_builder =
|
||||
r.should_fail_with Illegal_Argument
|
||||
r.catch.to_display_text . should_contain "The Postgres Data Link cannot be saved to a file."
|
||||
|
||||
cloud_setup = Cloud_Tests_Setup.prepare
|
||||
suite_builder.group "[PostgreSQL] Saving to DataLink" pending=cloud_setup.real_cloud_pending group_builder->
|
||||
test_root = Temporary_Directory.make "Postgres-DataLinks"
|
||||
group_builder.teardown test_root.cleanup
|
||||
group_builder.specify "allows to save an established connection as a Data Link" <|
|
||||
cloud_location = test_root.get / "my-postgres-db.datalink"
|
||||
raw_connection = Database.connect connection_details
|
||||
Panic.with_finalizer raw_connection.close <|
|
||||
r1 = raw_connection.save_as_data_link cloud_location
|
||||
# The written data link location is returned
|
||||
r1.should_equal cloud_location
|
||||
|
||||
# A secret for the plain-text password is created:
|
||||
secrets = Enso_Secret.list test_root.get
|
||||
secrets.length . should_equal 1
|
||||
secrets.first.name . should_contain "my-postgres-db"
|
||||
secrets.first.name . should_contain "password"
|
||||
|
||||
written_data_link_connection = cloud_location.read
|
||||
Panic.with_finalizer written_data_link_connection.close <|
|
||||
written_data_link_connection.tables.column_names . should_contain "Name"
|
||||
q = written_data_link_connection.query 'SELECT 1 AS "A"'
|
||||
q.column_names . should_equal ["A"]
|
||||
q.at "A" . to_vector . should_equal [1]
|
||||
|
||||
# Also test existing file behaviour
|
||||
r2 = raw_connection.save_as_data_link cloud_location
|
||||
r2.should_fail_with File_Error
|
||||
r2.catch.should_be_a File_Error.Already_Exists
|
||||
|
||||
r3 = raw_connection.save_as_data_link cloud_location on_existing_file=Existing_File_Behavior.Overwrite
|
||||
r3.should_succeed
|
||||
|
||||
group_builder.specify "does not allow Backup or Append modes in save_as_data_link" <|
|
||||
cloud_location = test_root.get / "my-postgres-db-2.datalink"
|
||||
raw_connection = Database.connect connection_details
|
||||
Panic.with_finalizer raw_connection.close <|
|
||||
raw_connection.save_as_data_link cloud_location on_existing_file=Existing_File_Behavior.Backup . should_fail_with Illegal_Argument
|
||||
raw_connection.save_as_data_link cloud_location on_existing_file=Existing_File_Behavior.Append . should_fail_with Illegal_Argument
|
||||
|
||||
group_builder.specify "but will not allow to save a connection from a Data Link as a new Data Link" <|
|
||||
## (Because the Data Link connection is hiding its connection details from the user.)
|
||||
cloud_location = test_root.get / "my-postgres-db-3.datalink"
|
||||
data_link_connection = Data.read data_link_file.get
|
||||
Panic.with_finalizer data_link_connection.close <|
|
||||
r = data_link_connection.save_as_data_link cloud_location
|
||||
r.should_fail_with Illegal_Argument
|
||||
|
||||
Audit_Spec.add_specs suite_builder "[PostgreSQL] " data_link_file.get database_pending=pending
|
||||
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user